text stringlengths 1 1.05M |
|---|
<filename>Calligraphy/src/com/jinke/pdfcreator/ComparatorImg.java
package com.jinke.pdfcreator;
import java.util.Comparator;
//list 排序, 重写compare 方法
public class ComparatorImg implements Comparator{
public int compare(Object arg0,Object arg1){
Item item1=(Item)arg0;
Item item2=(Item)arg1;
int index1=item1.getIndex();
int index2=item2.getIndex();
int flag=index1-index2;
return flag;
}
}
|
<reponame>zephray/Dramite<gh_stars>1-10
/*
* Copyright (C) 2015-2018 <NAME>
*
* This file is part of IBMulator.
*
* IBMulator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* IBMulator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with IBMulator. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Portions of code Copyright (C) 2002-2013 The DOSBox Team
*
*/
#include "common.h"
#include "debugger.h"
#include "../cpucore.h"
#include <cstring>
#include <iomanip>
#define DECODE_ALL_INT false
/*
* TODO this map is valid only for a specific version of the PS/1 2011 BIOS
*/
std::map<uint32_t, const char*> CPUDebugger::ms_addrnames = {
{ 0xE4920, "INT_10" },
{ 0xF008A, "CPU_TEST" },
{ 0xF00CB, "POST_RESET" },
{ 0xF0149, "POST_START" },
{ 0xF0DE8, "RESET_01" },
{ 0xF1588, "RESET_02" },
{ 0xF1528, "RESET_03" },
{ 0xF1DBF, "RESET_04" },
{ 0xF012E, "RESET_05" },
{ 0xF15AC, "RESET_06" },
{ 0xF158B, "RESET_07" },
{ 0xF0BBB, "RESET_08" },
{ 0xF5371, "RESET_09" },
{ 0xF0134, "RESET_0A" },
{ 0xF0138, "RESET_0B" },
{ 0xF0540, "RESET_0C" },
{ 0xF0EDB, "PIC_INIT" },
{ 0xF2171, "WAIT" },
{ 0xF2084, "CMOS_READ" },
{ 0xF209F, "CMOS_WRITE" },
{ 0xF2121, "SET_DS_TO_40h" },
{ 0xF237E, "IVT_DEF_HANDLER" },
{ 0xF23D6, "CPU_RESET" },
{ 0xF29CD, "IDT_DEF_HANDLER" },
{ 0xF46CA, "INT_13" },
{ 0xF5023, "INT_15" },
{ 0xFF065, "INT_10_JMP" },
{ 0xFF859, "INT_15_JMP" },
{ 0xFE05B, "RESET" },
{ 0xFFE05B, "RESET" },
{ 0xFFFF0, "RESET_VECTOR" }
};
unsigned CPUDebugger::disasm(char *_buf, uint _buflen, uint32_t _cs, uint32_t _eip,
CPUCore *_core, Memory *_mem, const uint8_t *_instr_buf, uint _instr_buf_len, bool _32bit)
{
return m_dasm.disasm(_buf, _buflen, _cs, _eip, _core, _mem, _instr_buf, _instr_buf_len, _32bit);
}
unsigned CPUDebugger::last_disasm_opsize()
{
return m_dasm.last_operand_size();
}
uint32_t CPUDebugger::get_hex_value(char *_str, char *&_hex, CPUCore *_core)
{
uint32_t value = 0;
uint32_t regval = 0;
_hex = _str;
while (*_hex==' ') _hex++;
if(strstr(_hex,"EAX")==_hex){ _hex+=3; regval = _core->get_EAX(); }
else if(strstr(_hex,"EBX")==_hex){ _hex+=3; regval = _core->get_EBX(); }
else if(strstr(_hex,"ECX")==_hex){ _hex+=3; regval = _core->get_ECX(); }
else if(strstr(_hex,"EDX")==_hex){ _hex+=3; regval = _core->get_EDX(); }
else if(strstr(_hex,"ESI")==_hex){ _hex+=3; regval = _core->get_ESI(); }
else if(strstr(_hex,"EDI")==_hex){ _hex+=3; regval = _core->get_EDI(); }
else if(strstr(_hex,"EBP")==_hex){ _hex+=3; regval = _core->get_EBP(); }
else if(strstr(_hex,"ESP")==_hex){ _hex+=3; regval = _core->get_ESP(); }
else if(strstr(_hex,"EIP")==_hex){ _hex+=3; regval = _core->get_EIP(); }
else if(strstr(_hex,"AX")==_hex) { _hex+=2; regval = _core->get_AX(); }
else if(strstr(_hex,"BX")==_hex) { _hex+=2; regval = _core->get_BX(); }
else if(strstr(_hex,"CX")==_hex) { _hex+=2; regval = _core->get_CX(); }
else if(strstr(_hex,"DX")==_hex) { _hex+=2; regval = _core->get_DX(); }
else if(strstr(_hex,"SI")==_hex) { _hex+=2; regval = _core->get_SI(); }
else if(strstr(_hex,"DI")==_hex) { _hex+=2; regval = _core->get_DI(); }
else if(strstr(_hex,"BP")==_hex) { _hex+=2; regval = _core->get_BP(); }
else if(strstr(_hex,"SP")==_hex) { _hex+=2; regval = _core->get_SP(); }
else if(strstr(_hex,"IP")==_hex) { _hex+=2; regval = _core->get_EIP()&0xFFFF; }
else if(strstr(_hex,"CS")==_hex) { _hex+=2; regval = _core->get_CS().sel.value; }
else if(strstr(_hex,"DS")==_hex) { _hex+=2; regval = _core->get_DS().sel.value; }
else if(strstr(_hex,"ES")==_hex) { _hex+=2; regval = _core->get_ES().sel.value; }
else if(strstr(_hex,"SS")==_hex) { _hex+=2; regval = _core->get_SS().sel.value; }
else if(strstr(_hex,"FS")==_hex) { _hex+=2; regval = _core->get_FS().sel.value; }
else if(strstr(_hex,"GS")==_hex) { _hex+=2; regval = _core->get_GS().sel.value; };
int mult = 1;
while(*_hex) {
if((*_hex>='0') && (*_hex<='9')) {
value = (value<<4) + *_hex-'0';
} else if((*_hex>='A') && (*_hex<='F')) {
value = (value<<4) + *_hex-'A'+10;
} else {
if(*_hex == '+') {
_hex++;
return (regval + value)*mult + get_hex_value(_hex,_hex,_core);
};
if(*_hex == '-') {
_hex++;
return (regval + value)*mult - get_hex_value(_hex,_hex,_core);
};
if(*_hex == '*') {
_hex++;
mult = *_hex-'0';
} else {
break; // No valid char
}
}
_hex++;
};
return (regval + value)*mult;
};
unsigned CPUDebugger::get_seg_idx(char *_str)
{
if(strstr(_str,"CS")==_str) { return REGI_CS; }
else if(strstr(_str,"DS")==_str) { return REGI_DS; }
else if(strstr(_str,"ES")==_str) { return REGI_ES; }
else if(strstr(_str,"SS")==_str) { return REGI_SS; }
else if(strstr(_str,"FS")==_str) { return REGI_FS; }
else if(strstr(_str,"GS")==_str) { return REGI_GS; }
//return something, but what? throw an exception?
return REGI_CS;
}
char * upcase(char * str) {
for (char* idx = str; *idx ; idx++) *idx = toupper(*reinterpret_cast<unsigned char*>(idx));
return str;
}
char * lowcase(char * str) {
for(char* idx = str; *idx ; idx++) *idx = tolower(*reinterpret_cast<unsigned char*>(idx));
return str;
}
char * skip_blanks(char * str) {
while(*str == ' ' || *str == '\t') {
str++;
}
return str;
}
char * CPUDebugger::analyze_instruction(char *_dasm_inst, CPUCore *_core,
Memory *_memory, uint _opsize)
{
static char result[256];
char instu[256];
char prefix[3];
unsigned seg;
strcpy(instu, _dasm_inst);
upcase(instu);
result[0] = 0;
char* pos = strchr(instu,'[');
if(pos) {
// Segment prefix ?
if(*(pos-1) == ':') {
char* segpos = pos-3;
prefix[0] = tolower(*segpos);
prefix[1] = tolower(*(segpos+1));
prefix[2] = 0;
seg = get_seg_idx(segpos);
} else {
if(strstr(pos,"SP") || strstr(pos,"BP")) {
seg = REGI_SS;
strcpy(prefix,"ss");
} else {
seg = REGI_DS;
strcpy(prefix,"ds");
};
};
pos++;
uint32_t adr = get_hex_value(pos, pos, _core);
while (*pos!=']') {
if (*pos=='+') {
pos++;
adr += get_hex_value(pos, pos, _core);
} else if (*pos=='-') {
pos++;
adr -= get_hex_value(pos, pos, _core);
} else {
pos++;
}
};
if(_memory) {
static char outmask[] = "%s:[%04X]=%02X";
if(_core->is_pmode()) {
outmask[6] = '8';
}
try {
/*uint32_t address = _core->dbg_get_phyaddr(seg, adr, _memory);
switch (_opsize) {
case 8 : { uint8_t val = _memory->dbg_read_byte(address);
outmask[12] = '2';
sprintf(result,outmask,prefix,adr,val);
} break;
case 16: { uint16_t val = _memory->dbg_read_word(address);
outmask[12] = '4';
sprintf(result,outmask,prefix,adr,val);
} break;
case 32: { uint32_t val = _memory->dbg_read_dword(address);
outmask[12] = '8';
sprintf(result,outmask,prefix,adr,val);
} break;
}*/
PWARN("Unimplemented");
} catch(CPUException &) { }
}
// Variable found ?
/* TODO
DebugVar* var = DebugVar::FindVar(address);
if (var) {
// Replace occurrence
char* pos1 = strchr(inst,'[');
char* pos2 = strchr(inst,']');
if (pos1 && pos2) {
char temp[256];
strcpy(temp,pos2); // save end
pos1++; *pos1 = 0; // cut after '['
strcat(inst,var->GetName()); // add var name
strcat(inst,temp); // add end
};
};
// show descriptor info, if available
if ((cpu.pmode) && saveSelector) {
strcpy(curSelectorName,prefix);
};
*/
};
//CALL
if(strstr(instu,"CALL") == instu) {
//eg: CALL 000F2084 ($-3325)
pos = strchr(instu,' ');
assert(pos);
pos++;
uint32_t addr;
if(sscanf(pos, "%x",&addr)) {
auto name = ms_addrnames.find(addr);
if(name != ms_addrnames.end()) {
sprintf(result,"%s", name->second);
}
}
}
// Must be a jump
if (instu[0] == 'J')
{
bool jmp = false;
switch (instu[1]) {
case 'A' : { jmp = (_core->get_FLAGS(FMASK_CF)?false:true) && (_core->get_FLAGS(FMASK_ZF)?false:true); // JA
} break;
case 'B' : { if (instu[2] == 'E') {
jmp = (_core->get_FLAGS(FMASK_CF)?true:false) || (_core->get_FLAGS(FMASK_ZF)?true:false); // JBE
} else {
jmp = _core->get_FLAGS(FMASK_CF)?true:false; // JB
}
} break;
case 'C' : { if (instu[2] == 'X') {
jmp = _core->get_CX() == 0; // JCXZ
} else {
jmp = _core->get_FLAGS(FMASK_CF)?true:false; // JC
}
} break;
case 'E' : { jmp = _core->get_FLAGS(FMASK_ZF)?true:false; // JE
} break;
case 'G' : { if (instu[2] == 'E') {
jmp = (_core->get_FLAGS(FMASK_SF)?true:false)==(_core->get_FLAGS(FMASK_OF)?true:false); // JGE
} else {
jmp = (_core->get_FLAGS(FMASK_ZF)?false:true) && ((_core->get_FLAGS(FMASK_SF)?true:false)==(_core->get_FLAGS(FMASK_OF)?true:false)); // JG
}
} break;
case 'L' : { if (instu[2] == 'E') {
jmp = (_core->get_FLAGS(FMASK_ZF)?true:false) || ((_core->get_FLAGS(FMASK_SF)?true:false)!=(_core->get_FLAGS(FMASK_OF)?true:false)); // JLE
} else {
jmp = (_core->get_FLAGS(FMASK_SF)?true:false)!=(_core->get_FLAGS(FMASK_OF)?true:false); // JL
}
} break;
case 'M' : { jmp = true; // JMP
} break;
case 'N' : { switch (instu[2]) {
case 'B' :
case 'C' : { jmp = _core->get_FLAGS(FMASK_CF)?false:true; // JNB / JNC
} break;
case 'E' : { jmp = _core->get_FLAGS(FMASK_ZF)?false:true; // JNE
} break;
case 'O' : { jmp = _core->get_FLAGS(FMASK_OF)?false:true; // JNO
} break;
case 'P' : { jmp = _core->get_FLAGS(FMASK_PF)?false:true; // JNP
} break;
case 'S' : { jmp = _core->get_FLAGS(FMASK_SF)?false:true; // JNS
} break;
case 'Z' : { jmp = _core->get_FLAGS(FMASK_ZF)?false:true; // JNZ
} break;
}
} break;
case 'O' : { jmp = _core->get_FLAGS(FMASK_OF)?true:false; // JO
} break;
case 'P' : { if (instu[2] == 'O') {
jmp = _core->get_FLAGS(FMASK_PF)?false:true; // JPO
} else {
jmp = _core->get_FLAGS(FMASK_SF)?true:false; // JP / JPE
}
} break;
case 'S' : { jmp = _core->get_FLAGS(FMASK_SF)?true:false; // JS
} break;
case 'Z' : { jmp = _core->get_FLAGS(FMASK_ZF)?true:false; // JZ
} break;
}
pos = strchr(instu,' ');
assert(pos);
if(!_core->is_pmode()) {
uint32_t addr=0;
uint32_t seg,off;
pos = skip_blanks(pos);
if(sscanf(pos, "%x:%x",&seg,&off)==2) {
//eg: JMP F000:E05B
addr = (seg << 4) + off;
} else if(sscanf(pos, "%x",&addr)==1) {
//absolute address
} else if(strstr(pos,"NEAR") == pos) {
//jump near to EA word (abs offset)
pos = strchr(pos,' ');
pos = skip_blanks(pos);
if(pos[0]=='B' && pos[1]=='X') {
addr = _core->dbg_get_phyaddr(REGI_CS, _core->get_BX());
}
}
if(addr != 0) {
auto name = ms_addrnames.find(addr);
if(name != ms_addrnames.end()) {
sprintf(result,"%s", name->second);
}
}
}
char * curpos = result + strlen(result);
if (jmp) {
pos = strchr(instu,'$');
if (pos) {
pos = strchr(instu,'+');
if (pos) {
strcpy(curpos,"(down)");
} else {
strcpy(curpos,"(up)");
}
}
} else {
sprintf(curpos,"(no jmp)");
}
}
return result;
};
const char * CPUDebugger::INT_decode(bool call, uint8_t vector, uint16_t ax,
CPUCore *core, Memory *mem)
{
int reslen = 512;
static char result[512];
//uint8_t ah = ax>>8;
//uint8_t al = ax&0xFF;
uint axlen = 0;
auto interr = ms_interrupts.find(MAKE_INT_SEL(vector, 0, 0));
if(interr == ms_interrupts.end()) {
axlen = 1;
interr = ms_interrupts.find(MAKE_INT_SEL(vector, ax&0xFF00, 1));
if(interr == ms_interrupts.end()) {
axlen = 2;
interr = ms_interrupts.find(MAKE_INT_SEL(vector, ax, 2));
}
}
if(interr != ms_interrupts.end()) {
if(!interr->second.decode && !DECODE_ALL_INT) {
return nullptr;
}
const char * op;
if(call) {
op = ">";
} else {
op = "<";
}
if(axlen == 1) {
snprintf(result, reslen, "%s INT %02X/%02X %s", op, vector, (ax>>8), interr->second.name);
} else if(axlen == 0) {
snprintf(result, reslen, "%s INT %02X %s", op, vector, interr->second.name);
} else {
snprintf(result, reslen, "%s INT %02X/%04X %s", op, vector, ax, interr->second.name);
}
uint slen = strlen(result);
char * curpos = result + slen;
reslen -= slen;
assert(reslen>0);
if(interr->second.decoder) {
interr->second.decoder(call, ax, core, mem, curpos, reslen);
} else {
if(!call) {
INT_def_ret(core, curpos, reslen);
}
}
} else {
// for unknown INTs don't print the return
if(call) {
snprintf(result, reslen, "INT %02X/%04X ?", vector, ax);
}
}
return result;
}
void CPUDebugger::INT_def_ret(CPUCore *core, char* buf, uint buflen)
{
if(buflen<15) return;
snprintf(buf, buflen, " ret CF=%u", core->get_FLAGS(FMASK_CF)>>FBITN_CF);
}
void CPUDebugger::INT_def_ret_errcode(CPUCore *core, char* buf, uint buflen)
{
uint cf = core->get_FLAGS(FMASK_CF)>>FBITN_CF;
if(cf) {
const char * errstr = ms_dos_errors[core->get_AX()];
snprintf(buf, buflen, " ret CF=1: %s", errstr);
} else {
snprintf(buf, buflen, " ret CF=0");
}
}
std::string CPUDebugger::descriptor_table_to_CSV(Memory &_mem, uint32_t _base, uint16_t _limit)
{
/*if(_base+_limit > _mem.get_buffer_size()) {
throw std::range_error("descriptor table beyond RAM limit");
}
std::stringstream output;
output << std::setfill('0');
uint32_t ptr = _base;
int index = 0;
Descriptor desc;
output << "index,base,limit/offset,base_15_0/selector,base_23_16/word_count,";
output << "AR,type,accessed,DPL,P,valid\n";
while(ptr < _base+_limit) {
desc = _mem.dbg_read_qword(ptr);
// entry number
output << std::hex << std::setw(3) << index << ",";
// base
output << std::hex << std::setw(8) << int(desc.base) << ",";
// limit/offset
output << std::hex << std::setw(8) << int(desc.limit) << ",";
// base_15_0/selector
output << std::hex << std::setw(4) << int(desc.selector) << ",";
// base_23_16/word_count
output << std::hex << std::setw(2) << int(desc.word_count) << ",";
//AR
output << std::hex << std::setw(2) << int(desc.get_AR()) << ",";
if(desc.is_system_segment()) {
switch(desc.type) {
case DESC_TYPE_AVAIL_286_TSS:
output << "AVAIL 286 TSS";
break;
case DESC_TYPE_AVAIL_386_TSS:
output << "AVAIL 386 TSS";
break;
case DESC_TYPE_LDT_DESC:
output << "LDT DESC";
break;
case DESC_TYPE_BUSY_286_TSS:
output << "BUSY 286 TSS";
break;
case DESC_TYPE_BUSY_386_TSS:
output << "BUSY 386 TSS";
break;
case DESC_TYPE_286_CALL_GATE:
output << "286 CALL GATE";
break;
case DESC_TYPE_386_CALL_GATE:
output << "386 CALL GATE";
break;
case DESC_TYPE_TASK_GATE:
output << "TASK GATE";
break;
case DESC_TYPE_286_INTR_GATE:
output << "286 INTR GATE";
break;
case DESC_TYPE_386_INTR_GATE:
output << "386 INTR GATE";
break;
case DESC_TYPE_286_TRAP_GATE:
output << "286 TRAP GATE";
break;
case DESC_TYPE_386_TRAP_GATE:
output << "386 TRAP GATE";
break;
default:
output << "INVALID";
break;
}
output << ",,";
} else {
if(desc.is_code_segment()) {
output << "code ";
if(!desc.is_conforming()) {
output << "non conforming";
} else {
output << "conforming";
}
if(desc.is_readable()) {
output << " R";
}
} else {
output << "data ";
if(desc.is_expand_down()) {
output << "exp down ";
}
if(desc.is_writeable()) {
output << "RW";
} else {
output << "R";
}
}
output << ",";
if(desc.accessed) { output << "accessed" << ","; }
else { output << ","; }
}
// DPL
output << std::dec << std::setw(2) << int(desc.dpl) << ",";
// present
if(desc.present) { output << "P" << ","; }
else { output << "NP" << ","; }
// valid
if(desc.valid) { output << "valid"; }
else { output << "invalid"; }
ptr += 8;
index++;
output << "\n";
}
std::string str = output.str();
return str;*/
PWARN("Not implemented\n");
}
|
var searchData=
[
['b',['b',['../unionAPSR__Type.html#a7dbc79a057ded4b11ca5323fc2d5ab14',1,'APSR_Type::b()'],['../unionIPSR__Type.html#add0d6497bd50c25569ea22b48a03ec50',1,'IPSR_Type::b()'],['../unionxPSR__Type.html#a3b1063bb5cdad67e037cba993b693b70',1,'xPSR_Type::b()'],['../unionCONTROL__Type.html#adc6a38ab2980d0e9577b5a871da14eb9',1,'CONTROL_Type::b()']]],
['bfar',['BFAR',['../structSCB__Type.html#a3f8e7e58be4e41c88dfa78f54589271c',1,'SCB_Type']]]
];
|
def mean_median(arr):
arr_sum = 0
for i in range(len(arr)):
arr_sum += arr[i]
mean = arr_sum/len(arr)
arr = sorted(arr)
if len(arr) % 2 != 0:
median = arr[floor(len(arr)/2)]
else:
median = (arr[len(arr)//2] + arr[len(arr)//2 - 1]) / 2
return mean, median
mean, median = mean_median(arr)
print("Mean =", mean)
print("Median =", median) |
import * as React from 'react';
import block from 'bem-cn';
import { connect } from 'react-redux';
import { Dispatch, bindActionCreators } from 'redux';
import { featureConnect } from 'core';
import * as features from 'features';
import { IAppReduxState } from 'shared/types/app';
import { ICurrencyPair } from 'shared/types/models';
import { Action } from 'shared/types/redux';
import { ISwitchableMobileContentProps } from 'shared/types/ui';
import { selectors as configSelectors, actions as configActions } from 'services/config';
import { MTradeClassicTab } from '../../../../namespace';
import './MTradePage.scss';
interface IFeatureProps {
placeOrderFeatureEntry: features.placeOrder.Entry;
exchangeRatesFeatureEntry: features.exchangeRates.Entry;
orderBookFeatureEntry: features.orderBook.Entry;
}
interface IActionProps {
mSetCurrentCurrencyPairID: typeof configActions.mSetCurrentCurrencyPairID;
copyOrderToWidget: Action<features.placeOrder.namespace.ICopyOrderToWidget>;
}
interface IStateProps {
mCurrencyCurrencyPair: ICurrencyPair | null;
}
type IProps = IFeatureProps & IStateProps & IActionProps & ISwitchableMobileContentProps<MTradeClassicTab>;
const b = block('m-trade-page');
function mapState(state: IAppReduxState): IStateProps {
return {
mCurrencyCurrencyPair: configSelectors.mSelectCurrentCurrencyPair(state),
};
}
function mapDispatch(dispatch: Dispatch<any>, ownProps: IFeatureProps): IActionProps {
return bindActionCreators({
...configActions,
...ownProps.placeOrderFeatureEntry.actions,
}, dispatch);
}
class MTradePage extends React.PureComponent<IProps> {
public componentDidMount() {
const pairID = this.props.queryParams.pair;
if (pairID) {
this.props.mSetCurrentCurrencyPairID(pairID);
}
}
public render() {
const {
placeOrderFeatureEntry, exchangeRatesFeatureEntry, orderBookFeatureEntry,
mCurrencyCurrencyPair, copyOrderToWidget,
} = this.props;
return mCurrencyCurrencyPair && (
<div className={b()}>
<div className={b('left-panel')()}>
<exchangeRatesFeatureEntry.containers.MExchangeRatesToggle currentCurrencyPair={mCurrencyCurrencyPair} />
<div className={b('place-order')()}>
<placeOrderFeatureEntry.containers.FormWithOrderSideSwitch
currentCurrencyPair={mCurrencyCurrencyPair}
/>
</div>
</div>
<div className={b('order-book')()}>
<orderBookFeatureEntry.containers.MOrderBook
currentCurrencyPair={mCurrencyCurrencyPair}
copyOrderToWidget={copyOrderToWidget}
/>
</div>
</div>
);
}
}
export default (
featureConnect({
placeOrderFeatureEntry: features.placeOrder.loadEntry,
exchangeRatesFeatureEntry: features.exchangeRates.loadEntry,
orderBookFeatureEntry: features.orderBook.loadEntry,
})(
connect(mapState, mapDispatch)(
MTradePage
)));
|
# ITNOA
#!/bin/bash
# Check helm cli exist or not?
# https://stackoverflow.com/a/677212/1539100
if ! command -v helm &> /dev/null ; then
# https://askubuntu.com/a/1214268/101335
# TODO: Move to specific function (os name)
readonly os_name=$(cat /etc/os-release | awk -F '=' '/^NAME/{print $2}' | awk '{print $1}' | tr -d '"')
if [ "$os_name" == "Ubuntu" ]
then
echo "system is Ubuntu"
curl https://baltocdn.com/helm/signing.asc | sudo apt-key add -
sudo apt-get install apt-transport-https --yes
echo "deb http://baltocdn.com/helm/stable/debian/ all main" | sudo tee /etc/apt/sources.list.d/helm-stable-debian.list
sudo apt-get update
sudo apt-get install helm
elif [ "$os_name" == "CentOS" ]
then
echo "system is CentOS"
else
echo "system is $os_name"
fi
fi |
/**
* @license
* Copyright 2018 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.u2.view',
name: 'TableCellFormatter',
extends: 'FObjectProperty',
requires: [
'foam.core.FObjectProperty',
'foam.u2.view.FnFormatter',
],
properties: [
{
name: 'of',
value: 'foam.u2.view.Formatter'
},
{
name: 'adapt',
value: function(o, f, prop) {
if ( foam.Function.isInstance(f) ) {
return prop.FnFormatter.create({f: f});
}
return prop.FObjectProperty.ADAPT.value.call(this, o, f, prop);
}
},
{
name: 'value',
adapt: function(_, v) {
return this.adapt.call(this, _, v, this);
}
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'TableCellPropertyRefinement',
refines: 'foam.core.Property',
properties: [
{
name: 'tableHeaderFormatter',
value: function(axiom) {
this.add(axiom.label);
}
},
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
factory: function() {
return foam.u2.view.FnFormatter.create({
class: 'foam.u2.view.FnFormatter',
f: function(value, obj, axiom) {
this.add(value);
}
})
}
},
{
class: 'Int',
name: 'tableWidth'
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'ActionTableCellFormatterRefinement',
refines: 'foam.core.Action',
properties: [
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
value: function(_, obj, axiom) {
this.
startContext({ data: obj }).
add(axiom).
endContext();
}
},
{
name: 'tableHeaderFormatter',
value: function(axiom) {
this.add(axiom.label);
}
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'EnumTableCellFormatterRefinement',
refines: 'foam.core.Enum',
properties: [
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
value: function(value) {
this.add(value.label)
}
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'FObjectPropertyTableCellFormatterRefinement',
refines: 'foam.core.FObjectProperty',
properties: [ [ 'tableCellFormatter', null ] ]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'CurrencyTableCellFormatterRefinement',
refines: 'foam.core.Currency',
properties: [
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
value: function(value) {
this.start()
.style({'text-align': 'left', 'padding-right': '20px'})
.add('$' + (value/100).toFixed(2).replace(/(\d)(?=(\d{3})+\.)/g, '$1,'))
.end();
}
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'DateTableCellFormatterRefinement',
refines: 'foam.core.Date',
properties: [
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
value: function(date) {
if ( date ) this.add('*****', date.toISOString().substring(0,10));
}
}
]
});
foam.CLASS({
package: 'foam.u2.view',
name: 'DateTimeTableCellFormatterRefinement',
refines: 'foam.core.DateTime',
properties: [
{
class: 'foam.u2.view.TableCellFormatter',
name: 'tableCellFormatter',
value: function(date) {
// Output as yyyy-mm-dd hh:mm[a/p]
if ( date ) this.add(date.toISOString().substring(0,10) + date.toLocaleString().substring(10,16) + date.toLocaleString().substring(20,21).toLowerCase());
}
}
]
});
|
#!/bin/bash
if test $# -ne 3; then
echo "Usage: $0 <in_exec_file> <ent_file> <SSH_URL>" 1>&2
exit 1
fi
in_exec_file="$1"
ent_file="$2"
ssh_url="$3"
if test ! -f "$in_exec_file"; then
echo "Error: First argument ($in_exec_file) is not a file." 1>&2
exit 1
fi
if test ! -f "$ent_file"; then
echo "Error: Second argument ($ent_file) is not a file." 1>&2
exit 1
fi
./add_entitlement_to_exec.sh "$in_exec_file" "$ent_file" out.exec
./copy_remote.sh out.exec "$ssh_url" .
./run_remote.sh "$ssh_url" ./out.exec run.out
./retrieve_from_remote.sh "$ssh_url" "./run.out" .
|
<filename>machine/qemu/sources/u-boot/test/py/tests/test_efi_capsule/capsule_defs.py
# SPDX-License-Identifier: GPL-2.0+
# Directories
CAPSULE_DATA_DIR = '/EFI/CapsuleTestData'
CAPSULE_INSTALL_DIR = '/EFI/UpdateCapsule'
|
curl -L -o virtctl \
https://github.com/kubevirt/kubevirt/releases/download/${KUBEVIRT_VERSION}/virtctl-${KUBEVIRT_VERSION}-linux-amd64
chmod +x virtctl
|
import numpy as np
import tensorflow as tf
# Parameters
learning_rate = 0.001
n_inputs = 2
n_hidden = 20
n_outputs = 2
# Input & Output data
X = tf.placeholder(tf.float32, shape=[None, n_inputs])
labels = tf.placeholder(tf.int32, shape=[None, n_outputs])
# Create model
hidden_layer = tf.layers.dense(X, n_hidden, activation=tf.nn.relu)
logits = tf.layers.dense(hidden_layer, n_outputs)
# Define loss
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)
cost = tf.reduce_mean(cross_entropy)
# Define the optimizer
optimizer = tf.train.AdamOptimizer(learning_rate).minimize(cost)
# Initialize variables
init = tf.global_variables_initializer()
# Start training
with tf.Session() as sess:
sess.run(init)
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(mnist.train.num_examples/batch_size)
# Loop over all batches
for i in range(total_batch):
batch_x, batch_y = mnist.train.next_batch(batch_size)
_, c = sess.run([optimizer, cost], feed_dict={X: batch_x, y: batch_y})
avg_cost += c / total_batch
# Display logs for each epoch
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!") |
<reponame>jianglong0156/chromium.src
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WEBKIT_PLUGINS_NPAPI_WEBPLUGIN_IMPL_H_
#define WEBKIT_PLUGINS_NPAPI_WEBPLUGIN_IMPL_H_
#include <string>
#include <map>
#include <vector>
#include "base/basictypes.h"
#include "base/file_path.h"
#include "base/memory/weak_ptr.h"
#include "googleurl/src/gurl.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/WebPlugin.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebRect.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebString.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebURLLoaderClient.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebURLRequest.h"
#include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebVector.h"
#include "ui/gfx/native_widget_types.h"
#include "webkit/plugins/npapi/webplugin.h"
#include "webkit/plugins/webkit_plugins_export.h"
namespace WebKit {
class WebFrame;
class WebPluginContainer;
class WebURLResponse;
class WebURLLoader;
class WebURLRequest;
}
namespace webkit_glue {
class MultipartResponseDelegate;
} // namespace webkit_glue
namespace webkit {
namespace npapi {
class WebPluginDelegate;
class WebPluginPageDelegate;
// This is the WebKit side of the plugin implementation that forwards calls,
// after changing out of WebCore types, to a delegate. The delegate may
// be in a different process.
class WEBKIT_PLUGINS_EXPORT WebPluginImpl :
NON_EXPORTED_BASE(public WebPlugin),
NON_EXPORTED_BASE(public WebKit::WebPlugin),
NON_EXPORTED_BASE(public WebKit::WebURLLoaderClient) {
public:
WebPluginImpl(
WebKit::WebFrame* frame,
const WebKit::WebPluginParams& params,
const FilePath& file_path,
const base::WeakPtr<WebPluginPageDelegate>& page_delegate);
virtual ~WebPluginImpl();
// Helper function for sorting post data.
static bool SetPostData(WebKit::WebURLRequest* request,
const char* buf,
uint32 length);
virtual WebPluginDelegate* delegate();
private:
// WebKit::WebPlugin methods:
virtual bool initialize(
WebKit::WebPluginContainer* container);
virtual void destroy();
virtual NPObject* scriptableObject();
virtual bool getFormValue(WebKit::WebString& value);
virtual void paint(
WebKit::WebCanvas* canvas, const WebKit::WebRect& paint_rect);
virtual void updateGeometry(
const WebKit::WebRect& frame_rect, const WebKit::WebRect& clip_rect,
const WebKit::WebVector<WebKit::WebRect>& cut_outs, bool is_visible);
virtual void updateFocus(bool focused);
virtual void updateVisibility(bool visible);
virtual bool acceptsInputEvents();
virtual bool handleInputEvent(
const WebKit::WebInputEvent& event, WebKit::WebCursorInfo& cursor_info);
virtual void didReceiveResponse(const WebKit::WebURLResponse& response);
virtual void didReceiveData(const char* data, int data_length);
virtual void didFinishLoading();
virtual void didFailLoading(const WebKit::WebURLError& error);
virtual void didFinishLoadingFrameRequest(
const WebKit::WebURL& url, void* notify_data);
virtual void didFailLoadingFrameRequest(
const WebKit::WebURL& url, void* notify_data,
const WebKit::WebURLError& error);
virtual bool isPlaceholder() OVERRIDE;
// WebPlugin implementation:
virtual void SetWindow(gfx::PluginWindowHandle window) OVERRIDE;
virtual void SetAcceptsInputEvents(bool accepts) OVERRIDE;
virtual void WillDestroyWindow(gfx::PluginWindowHandle window) OVERRIDE;
#if defined(OS_WIN)
void SetWindowlessData(HANDLE pump_messages_event,
gfx::NativeViewId dummy_activation_window) { }
void ReparentPluginWindow(HWND window, HWND parent) { }
void ReportExecutableMemory(size_t size) { }
#endif
virtual void CancelResource(unsigned long id) OVERRIDE;
virtual void Invalidate() OVERRIDE;
virtual void InvalidateRect(const gfx::Rect& rect) OVERRIDE;
virtual NPObject* GetWindowScriptNPObject() OVERRIDE;
virtual NPObject* GetPluginElement() OVERRIDE;
virtual bool FindProxyForUrl(const GURL& url,
std::string* proxy_list) OVERRIDE;
virtual void SetCookie(const GURL& url,
const GURL& first_party_for_cookies,
const std::string& cookie) OVERRIDE;
virtual std::string GetCookies(const GURL& url,
const GURL& first_party_for_cookies) OVERRIDE;
virtual void URLRedirectResponse(bool allow, int resource_id) OVERRIDE;
#if defined(OS_MACOSX)
virtual WebPluginAcceleratedSurface* GetAcceleratedSurface(
gfx::GpuPreference gpu_preference) OVERRIDE;
virtual void AcceleratedPluginEnabledRendering() OVERRIDE;
virtual void AcceleratedPluginAllocatedIOSurface(int32 width,
int32 height,
uint32 surface_id) OVERRIDE;
virtual void AcceleratedPluginSwappedIOSurface() OVERRIDE;
#endif
// Given a (maybe partial) url, completes using the base url.
GURL CompleteURL(const char* url);
enum RoutingStatus {
ROUTED,
NOT_ROUTED,
INVALID_URL,
GENERAL_FAILURE
};
// Determines the referrer value sent along with outgoing HTTP requests
// issued by plugins.
enum Referrer {
PLUGIN_SRC,
DOCUMENT_URL,
NO_REFERRER
};
// Given a download request, check if we need to route the output to a frame.
// Returns ROUTED if the load is done and routed to a frame, NOT_ROUTED or
// corresponding error codes otherwise.
RoutingStatus RouteToFrame(const char* url,
bool is_javascript_url,
bool popups_allowed,
const char* method,
const char* target,
const char* buf,
unsigned int len,
int notify_id,
Referrer referrer_flag);
// Returns the next avaiable resource id. Returns 0 if the operation fails.
// It may fail if the page has already been closed.
unsigned long GetNextResourceId();
// Initiates HTTP GET/POST requests.
// Returns true on success.
bool InitiateHTTPRequest(unsigned long resource_id,
WebPluginResourceClient* client,
const GURL& url,
const char* method,
const char* buf,
int len,
const char* range_info,
Referrer referrer_flag,
bool notify_redirects,
bool check_mixed_scripting);
gfx::Rect GetWindowClipRect(const gfx::Rect& rect);
// Sets the actual Widget for the plugin.
void SetContainer(WebKit::WebPluginContainer* container);
// Destroys the plugin instance.
// The response_handle_to_ignore parameter if not NULL indicates the
// resource handle to be left valid during plugin shutdown.
void TearDownPluginInstance(WebKit::WebURLLoader* loader_to_ignore);
// WebURLLoaderClient implementation. We implement this interface in the
// renderer process, and then use the simple WebPluginResourceClient interface
// to relay the callbacks to the plugin.
virtual void willSendRequest(WebKit::WebURLLoader* loader,
WebKit::WebURLRequest& request,
const WebKit::WebURLResponse& response);
virtual void didSendData(WebKit::WebURLLoader* loader,
unsigned long long bytes_sent,
unsigned long long total_bytes_to_be_sent);
virtual void didReceiveResponse(WebKit::WebURLLoader* loader,
const WebKit::WebURLResponse& response);
virtual void didReceiveData(WebKit::WebURLLoader* loader, const char *buffer,
int data_length, int encoded_data_length);
virtual void didFinishLoading(WebKit::WebURLLoader* loader,
double finishTime);
virtual void didFail(WebKit::WebURLLoader* loader,
const WebKit::WebURLError& error);
// Helper function to remove the stored information about a resource
// request given its index in m_clients.
void RemoveClient(size_t i);
// Helper function to remove the stored information about a resource
// request given a handle.
void RemoveClient(WebKit::WebURLLoader* loader);
virtual void HandleURLRequest(const char* url,
const char *method,
const char* target,
const char* buf,
unsigned int len,
int notify_id,
bool popups_allowed,
bool notify_redirects) OVERRIDE;
virtual void CancelDocumentLoad() OVERRIDE;
virtual void InitiateHTTPRangeRequest(const char* url,
const char* range_info,
int pending_request_id) OVERRIDE;
virtual void SetDeferResourceLoading(unsigned long resource_id,
bool defer) OVERRIDE;
// Ignore in-process plugins mode for this flag.
virtual bool IsOffTheRecord() OVERRIDE;
// Handles HTTP multipart responses, i.e. responses received with a HTTP
// status code of 206.
void HandleHttpMultipartResponse(const WebKit::WebURLResponse& response,
WebPluginResourceClient* client);
void HandleURLRequestInternal(const char* url,
const char* method,
const char* target,
const char* buf,
unsigned int len,
int notify_id,
bool popups_allowed,
Referrer referrer_flag,
bool notify_redirects,
bool check_mixed_scripting);
// Tears down the existing plugin instance and creates a new plugin instance
// to handle the response identified by the loader parameter.
bool ReinitializePluginForResponse(WebKit::WebURLLoader* loader);
// Delayed task for downloading the plugin source URL.
void OnDownloadPluginSrcUrl();
struct ClientInfo;
// Helper functions
WebPluginResourceClient* GetClientFromLoader(WebKit::WebURLLoader* loader);
ClientInfo* GetClientInfoFromLoader(WebKit::WebURLLoader* loader);
// Helper function to set the referrer on the request passed in.
void SetReferrer(WebKit::WebURLRequest* request, Referrer referrer_flag);
// Check for invalid chars like @, ;, \ before the first / (in path).
bool IsValidUrl(const GURL& url, Referrer referrer_flag);
std::vector<ClientInfo> clients_;
bool windowless_;
gfx::PluginWindowHandle window_;
#if defined(OS_MACOSX)
bool next_io_surface_allocated_;
int32 next_io_surface_width_;
int32 next_io_surface_height_;
uint32 next_io_surface_id_;
#endif
bool accepts_input_events_;
base::WeakPtr<WebPluginPageDelegate> page_delegate_;
WebKit::WebFrame* webframe_;
WebPluginDelegate* delegate_;
// This is just a weak reference.
WebKit::WebPluginContainer* container_;
typedef std::map<WebPluginResourceClient*,
webkit_glue::MultipartResponseDelegate*>
MultiPartResponseHandlerMap;
// Tracks HTTP multipart response handlers instantiated for
// a WebPluginResourceClient instance.
MultiPartResponseHandlerMap multi_part_response_map_;
// The plugin source URL.
GURL plugin_url_;
// Indicates if the download would be initiated by the plugin or us.
bool load_manually_;
// Indicates if this is the first geometry update received by the plugin.
bool first_geometry_update_;
// Set to true if the next response error should be ignored.
bool ignore_response_error_;
// The current plugin geometry and clip rectangle.
WebPluginGeometry geometry_;
// The location of the plugin on disk.
FilePath file_path_;
// The mime type of the plugin.
std::string mime_type_;
// Holds the list of argument names and values passed to the plugin. We keep
// these so that we can re-initialize the plugin if we need to.
std::vector<std::string> arg_names_;
std::vector<std::string> arg_values_;
base::WeakPtrFactory<WebPluginImpl> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(WebPluginImpl);
};
} // namespace npapi
} // namespace webkit
#endif // WEBKIT_PLUGINS_NPAPI_WEBPLUGIN_IMPL_H_
|
<reponame>phovea/phovea_core
/**
* Created by <NAME> on 14.02.2017.
*/
export * from './UniqueIdManager';
export * from './PluginRegistry';
export * from './extensions';
export * from './AppContext';
export * from './UserSession';
export * from './DndUtils';
|
#!/bin/zsh
watch -n 600 -g -d git status
M="$(git status --short)"
./auto-test-push.sh $M
|
package me.SimplyBallistic.JoinVerify;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.plugin.messaging.PluginMessageListener;
import com.google.common.io.ByteArrayDataInput;
import com.google.common.io.ByteStreams;
import me.SimplyBallistic.JoinVerify.events.InventoryListener;
import me.SimplyBallistic.JoinVerify.events.JoinListener;
import me.SimplyBallistic.JoinVerify.inventory.Tester;
import me.dommi2212.BungeeBridge.packets.PacketCustom;
/**
*
* @author ryan9_000
*
*/
public class JoinVerify extends JavaPlugin implements PluginMessageListener{
public static JoinVerify instance;
public BukkitPlayersFile verified;
public static boolean verifyAll;
public static boolean useBungee;
public List<UUID> verifying;
@Override
public void onEnable() {
getLogger().info("Starting up...");
saveDefaultConfig();
verifyAll=getConfig().getBoolean("verify-all",false);
useBungee=getConfig().getBoolean("use-bungee", false);
if(useBungee&&!Bukkit.getPluginManager().isPluginEnabled("BungeeBridgeC")){
getLogger().warning("Use bungee was set to true in the config and BungeeBridge is not installed! Disabling...");
getPluginLoader().disablePlugin(this);
return;
}
instance=this;
verified=new BukkitPlayersFile();
verifying=new ArrayList<>();
getCommand("jverify").setExecutor(new VerifyCommand());
Bukkit.getPluginManager().registerEvents(new InventoryListener(), this);
Bukkit.getPluginManager().registerEvents(new JoinListener(), this);
getLogger().info("Started!");
reloadConfig();
}
@Override
public void reloadConfig(){
saveDefaultConfig();
super.reloadConfig();
List<String> blocks = null;
try{
blocks=getConfig().getStringList("blocks");
for(int i=0;i<blocks.size();){
String s=blocks.get(i);
try{Material.valueOf(s.replaceAll(" ", "_").toUpperCase());}catch(Exception e){
getLogger().warning(s+" isn't a valid block! Removing from list...");
blocks.remove(i);
continue;
}
i++;
}
}catch(Exception e){
getLogger().warning("Could not find 'blocks' list in config! Resulting to random blocks...");
}
finally{
getConfig().set("blocks", blocks);
saveConfig();
}
}
@Override
@Deprecated
public void onPluginMessageReceived(String channel, Player player, byte[] message) {
if (!channel.equals("BungeeCord")) {
return;
}
ByteArrayDataInput in = ByteStreams.newDataInput(message);
String subchannel = in.readUTF();
if (subchannel.equals("JoinVerify")) {
short len = in.readShort();
byte[] msgbytes = new byte[len];
in.readFully(msgbytes);
DataInputStream msgin = new DataInputStream(new ByteArrayInputStream(msgbytes));
try {
String data = msgin.readUTF();
//short somenumber = msgin.readShort();
System.out.println(data);
///<----------------------------------------DATA HERE IN THIS MESS----------------------------
} catch (IOException e) {
getLogger().warning("Failed in reading data from bungee! "+e.getMessage());
}finally{try {
msgin.close();
} catch (IOException e) {
e.printStackTrace();
}}
}
}
public String transCol(String s){
return ChatColor.translateAlternateColorCodes('&', s);
}
public void testPlayer(Player p){
verifying.add(p.getUniqueId());
JoinVerify.instance.getLogger().info("Testing "+p.getName()+"...");
new Tester(p, this, ()->{
verifying.remove(p.getUniqueId());
if(!useBungee&&!verifyAll)
verified.addPlayer(p.getUniqueId());
else if(useBungee){
//TODO Bungee send code here<-
PacketCustom packet = new PacketCustom("JoinVerify", "verified:"+p.getUniqueId().toString());
boolean answer = (boolean) packet.send();
System.out.println(answer);
}
},()->{
p.kickPlayer(transCol(
getConfig().getString("messages.kick")));
},getConfig().getStringList("blocks"),transCol(getConfig().getString("messages.pickme")),
transCol(getConfig().getString("messages.item")),transCol(getConfig().getString("messages.title"))).getInventory().open(p);
}
}
|
"""
Generate a random set of numbers
"""
import random
def random_number_set(size, range):
number_set = set()
while len(number_set) < size:
number_set.add(random.randint(1, range))
return number_set
if __name__ == "__main__":
size = 10
range = 50
print(random_number_set(size, range)) # {40, 5, 24, 49, 20, 8, 15, 32, 22, 35} |
<reponame>maksimandrianov/cdstructures
// The MIT License (MIT)
// Copyright (c) 2018 <NAME>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
/**
* @file
* @author <NAME> <<EMAIL>>
* @brief The cdc_avl_tree is a struct and functions that provide an avl tree.
*/
#ifndef CDCONTAINERS_INCLUDE_CDCONTAINERS_avl_tree_H
#define CDCONTAINERS_INCLUDE_CDCONTAINERS_avl_tree_H
#include <cdcontainers/common.h>
#include <cdcontainers/status.h>
#include <assert.h>
#include <stdarg.h>
#include <stdbool.h>
/**
* @defgroup cdc_avl_tree
* @brief The cdc_avl_tree is a struct and functions that provide an avl tree.
* @{
*/
/**
* @brief The cdc_avl_tree_node is service struct.
* @warning To avoid problems, do not change the structure fields in the code.
* Use only special functions to access and change structure fields.
*/
struct cdc_avl_tree_node {
struct cdc_avl_tree_node *parent;
struct cdc_avl_tree_node *left;
struct cdc_avl_tree_node *right;
void *key;
void *value;
unsigned char height;
};
/**
* @brief The cdc_avl_tree is service struct.
* @warning To avoid problems, do not change the structure fields in the code.
* Use only special functions to access and change structure fields.
*/
struct cdc_avl_tree {
struct cdc_avl_tree_node *root;
size_t size;
struct cdc_data_info *dinfo;
};
/**
* @brief The cdc_avl_tree_iter is service struct.
* @warning To avoid problems, do not change the structure fields in the code.
* Use only special functions to access and change structure fields.
*/
struct cdc_avl_tree_iter {
struct cdc_avl_tree *container;
struct cdc_avl_tree_node *prev;
struct cdc_avl_tree_node *current;
};
struct cdc_pair_avl_tree_iter {
struct cdc_avl_tree_iter first;
struct cdc_avl_tree_iter second;
};
struct cdc_pair_avl_tree_iter_bool {
struct cdc_avl_tree_iter first;
bool second;
};
// Base
/**
* @defgroup cdc_avl_tree_base Base
* @{
*/
/**
* @brief Constructs an empty avl tree.
* @param[out] t - cdc_avl_tree
* @param[in] info - cdc_data_info
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_ctor(struct cdc_avl_tree **t, struct cdc_data_info *info);
/**
* @brief Constructs an avl tree, initialized by an variable number of
* pointers on cdc_pair's(first - key, and the second - value). The last item
* must be CDC_END.
* @param[out] t - cdc_avl_tree
* @param[in] info - cdc_data_info
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*
* Example:
* @code{.c}
* struct cdc_avl_tree *tree = NULL;
* cdc_pair value1 = {CDC_FROM_INT(1), CDC_FROM_INT(2)};
* cdc_pair value2 = {CDC_FROM_INT(3), CDC_FROM_INT(4)};
* ...
* if (cdc_avl_tree_ctorl(&tree, info, &value1, &value2, CDC_END) != CDC_STATUS_OK) {
* // handle error
* }
* @endcode
*/
enum cdc_stat cdc_avl_tree_ctorl(struct cdc_avl_tree **t, struct cdc_data_info *info, ...);
/**
* @brief Constructs an avl tree, initialized by args. The last item must be
* CDC_END.
* @param[out] t - cdc_avl_tree
* @param[in] info - cdc_data_info
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_ctorv(struct cdc_avl_tree **t, struct cdc_data_info *info, va_list args);
/**
* @brief Destroys the avl tree.
* @param[in] t - cdc_avl_tree
*/
void cdc_avl_tree_dtor(struct cdc_avl_tree *t);
/** @} */
// Lookup
/**
* @defgroup cdc_avl_tree_lookup Lookup
* @{
*/
/**
* @brief Returns a value that is mapped to a key. If the key does
* not exist, then NULL will return.
* @param[in] t - cdc_avl_tree
* @param[in] key - key of the element to find
* @param[out] value - pinter to the value that is mapped to a key.
* @return CDC_STATUS_OK if the key is found, CDC_STATUS_NOT_FOUND otherwise.
*/
enum cdc_stat cdc_avl_tree_get(struct cdc_avl_tree *t, void *key, void **value);
/**
* @brief Returns the number of elements with key that compares equal to the
* specified argument key, which is either 1 or 0 since this container does not
* allow duplicates.
* @param[in] t - cdc_avl_tree
* @param[in] key - key value of the elements to count
* @return number of elements with key key, that is either 1 or 0.
*/
size_t cdc_avl_tree_count(struct cdc_avl_tree *t, void *key);
/**
* @brief Finds an element with key equivalent to key.
* @param[in] t - cdc_avl_tree
* @param[in] key - key value of the element to search for
* @param[out] it - pointer will be recorded iterator to an element with key
* equivalent to key. If no such element is found, past-the-end iterator is
* returned.
*/
void cdc_avl_tree_find(struct cdc_avl_tree *t, void *key, struct cdc_avl_tree_iter *it);
/** @} */
// Capacity
/**
* @defgroup cdc_avl_tree_capacity Capacity
* @{
*/
/**
* @brief Returns the number of items in the avl_tree.
* @param[in] t - cdc_avl_tree
* @return the number of items in the avl_tree.
*/
static inline size_t cdc_avl_tree_size(struct cdc_avl_tree *t)
{
assert(t != NULL);
return t->size;
}
/**
* @brief Checks if the avl tree has no elements.
* @param[in] t - cdc_avl_tree
* @return true if the avl tree is empty, false otherwise.
*/
static inline bool cdc_avl_tree_empty(struct cdc_avl_tree *t)
{
assert(t != NULL);
return t->size == 0;
}
/** @} */
// Modifiers
/**
* @defgroup cdc_avl_tree_modifiers Modifiers
* @{
*/
/**
* @brief Removes all the elements from the avl_tree.
* @param[in] t - cdc_avl_tree
*/
void cdc_avl_tree_clear(struct cdc_avl_tree *t);
/**
* @brief Inserts an element into the container, if the container doesn't already
* contain an element with an equivalent key.
* @param[in] t - cdc_avl_tree
* @param[in] key - key of the element
* @param[in] value - value of the element
* @param[out] ret - pair consisting of an iterator to the inserted element (or to
* the element that prevented the insertion) and a bool denoting whether the
* insertion took place. The pointer can be equal to NULL.
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_insert(struct cdc_avl_tree *t, void *key, void *value,
struct cdc_pair_avl_tree_iter_bool *ret);
/**
* @brief Inserts an element into the container, if the container doesn't already
* contain an element with an equivalent key.
* @param[in] t - cdc_avl_tree
* @param[in] key - key of the element
* @param[in] value - value of the element
* @param[out] it - iterator to the inserted element (or to the element that
* prevented the insertion). The pointer can be equal to NULL.
* @param[out] inserted - bool denoting whether the insertion
* took place. The pointer can be equal to NULL.
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_insert1(struct cdc_avl_tree *t, void *key, void *value,
struct cdc_avl_tree_iter *it, bool *inserted);
/**
* @brief Inserts an element or assigns to the current element if the key
* already exists.
* @param[in] t - cdc_avl_tree
* @param[in] key - key of the element
* @param[in] value - value of the element
* @param[out] ret - pair. The bool component is true if the insertion took place and
* false if the assignment took place. The iterator component is pointing at the
* element that was inserted or updated.
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_insert_or_assign(struct cdc_avl_tree *t, void *key, void *value,
struct cdc_pair_avl_tree_iter_bool *ret);
/**
* @brief Inserts an element or assigns to the current element if the key
* already exists.
* @param[in] t - cdc_avl_tree
* @param[in] key - key of the element
* @param[in] value - value of the element
* @param[out] it - iterator is pointing at the element that was inserted or updated.
* The pointer can be equal to NULL
* @param[out] inserted - bool is true if the insertion took place and false if the
* assignment took place. The pointer can be equal to NULL
* @return CDC_STATUS_OK in a successful case or other value indicating
* an error.
*/
enum cdc_stat cdc_avl_tree_insert_or_assign1(struct cdc_avl_tree *t, void *key, void *value,
struct cdc_avl_tree_iter *it, bool *inserted);
/**
* @brief Removes the element (if one exists) with the key equivalent to key.
* @param[in] t - cdc_avl_tree
* @param[in] key - key value of the elements to remove
* @return number of elements removed.
*/
size_t cdc_avl_tree_erase(struct cdc_avl_tree *t, void *key);
/**
* @brief Swaps avl_trees a and b. This operation is very fast and never fails.
* @param[in, out] a - cdc_avl_tree
* @param[in, out] b - cdc_avl_tree
*/
void cdc_avl_tree_swap(struct cdc_avl_tree *a, struct cdc_avl_tree *b);
/** @} */
// Iterators
/**
* @defgroup cdc_avl_tree_iterators Iterators
* @{
*/
/**
* @brief Initializes the iterator to the beginning.
* @param[in] t - cdc_avl_tree
* @param[out] it - cdc_avl_tree_iter
*/
void cdc_avl_tree_begin(struct cdc_avl_tree *t, struct cdc_avl_tree_iter *it);
/**
* @brief Initializes the iterator to the end.
* @param[in] t - cdc_avl_tree
* @param[out] it - cdc_avl_tree_iter
*/
void cdc_avl_tree_end(struct cdc_avl_tree *t, struct cdc_avl_tree_iter *it);
/** @} */
// Iterators
/**
* @defgroup cdc_avl_tree_iter
* @brief The cdc_avl_tree_iter is a struct and functions that provide an avl tree iterator.
* @{
*/
/**
* @brief Advances the iterator to the next element in the avl tree.
* @param[in] it - iterator
*/
void cdc_avl_tree_iter_next(struct cdc_avl_tree_iter *it);
/**
* @brief Advances the iterator to the previous element in the avl tree.
* @param[in] it - iterator
*/
void cdc_avl_tree_iter_prev(struct cdc_avl_tree_iter *it);
/**
* @brief Returns true if there is at least one element ahead of the iterator, i.e.
* the iterator is not at the back of the container; otherwise returns false.
* @param[in] it - iterator
* @return true if there is at least one element ahead of the iterator, i.e.
* the iterator is not at the back of the container; otherwise returns false.
*/
static inline bool cdc_avl_tree_iter_has_next(struct cdc_avl_tree_iter *it)
{
assert(it != NULL);
return it->current != NULL;
}
/**
* @brief Returns true if there is at least one element behind the iterator, i.e.
* the iterator is not at the front of the container; otherwise returns false.
* @param[in] it - iterator
* @return true if there is at least one element behind the iterator, i.e.
* the iterator is not at the front of the container; otherwise returns false.
*/
static inline bool cdc_avl_tree_iter_has_prev(struct cdc_avl_tree_iter *it)
{
assert(it != NULL);
return it->prev != NULL;
}
/**
* @brief Returns an item's key.
* @param[in] it - iterator
* @return the item's key.
*/
static inline void *cdc_avl_tree_iter_key(struct cdc_avl_tree_iter *it)
{
assert(it != NULL);
return it->current->key;
}
/**
* @brief Returns an item's value.
* @param[in] it - iterator
* @return the item's value.
*/
static inline void *cdc_avl_tree_iter_value(struct cdc_avl_tree_iter *it)
{
assert(it != NULL);
return it->current->value;
}
/**
* @brief Returns a pair, where first - key, second - value.
* @param[in] it - iterator
* @return pair, where first - key, second - value.
*/
static inline struct cdc_pair cdc_avl_tree_iter_key_value(struct cdc_avl_tree_iter *it)
{
assert(it != NULL);
struct cdc_pair pair = {it->prev->key, it->prev->value};
return pair;
}
/**
* @brief Returns false if the iterator |it1| equal to the iterator |it2|,
* otherwise returns false.
* @param[in] it1 - iterator
* @param[in] it2 - iterator
* @return false if the iterator |it1| equal to the iterator |it2|,
* otherwise returns false.
*/
static inline bool cdc_avl_tree_iter_is_eq(struct cdc_avl_tree_iter *it1,
struct cdc_avl_tree_iter *it2)
{
assert(it1 != NULL);
assert(it2 != NULL);
return it1->container == it2->container && it1->prev == it2->prev && it1->current == it2->current;
}
/** @} */
// Short names
#ifdef CDC_USE_SHORT_NAMES
typedef struct cdc_avl_tree_node avl_tree_node_t;
typedef struct cdc_avl_tree avl_tree_t;
typedef struct cdc_avl_tree_iter avl_tree_iter_t;
typedef struct cdc_pair_avl_tree_iter pair_avl_tree_iter_t;
typedef struct cdc_pair_avl_tree_iter_bool pair_avl_tree_iter_bool_t;
// Base
#define avl_tree_ctor(...) cdc_avl_tree_ctor(__VA_ARGS__)
#define avl_tree_ctorv(...) cdc_avl_tree_ctorv(__VA_ARGS__)
#define avl_tree_ctorl(...) cdc_avl_tree_ctorl(__VA_ARGS__)
#define avl_tree_dtor(...) cdc_avl_tree_dtor(__VA_ARGS__)
// Lookup
#define avl_tree_get(...) cdc_avl_tree_get(__VA_ARGS__)
#define avl_tree_count(...) cdc_avl_tree_count(__VA_ARGS__)
#define avl_tree_find(...) cdc_avl_tree_find(__VA_ARGS__)
// Capacity
#define avl_tree_size(...) cdc_avl_tree_size(__VA_ARGS__)
#define avl_tree_empty(...) cdc_avl_tree_empty(__VA_ARGS__)
// Modifiers
#define avl_tree_clear(...) cdc_avl_tree_clear(__VA_ARGS__)
#define avl_tree_insert(...) cdc_avl_tree_insert(__VA_ARGS__)
#define avl_tree_insert1(...) cdc_avl_tree_insert1(__VA_ARGS__)
#define avl_tree_insert_or_assign(...) cdc_avl_tree_insert_or_assign(__VA_ARGS__)
#define avl_tree_insert_or_assign1(...) cdc_avl_tree_insert_or_assign1(__VA_ARGS__)
#define avl_tree_erase(...) cdc_avl_tree_erase(__VA_ARGS__)
#define avl_tree_swap(...) cdc_avl_tree_swap(__VA_ARGS__)
// Iterators
#define avl_tree_begin(...) cdc_avl_tree_begin(__VA_ARGS__)
#define avl_tree_end(...) cdc_avl_tree_end(__VA_ARGS__)
// Iterators
#define avl_tree_iter_next(...) cdc_avl_tree_iter_next(__VA_ARGS__)
#define avl_tree_iter_has_next(...) cdc_avl_tree_iter_has_next(__VA_ARGS__)
#define avl_tree_iter_has_prev(...) cdc_avl_tree_iter_has_prev(__VA_ARGS__)
#define avl_tree_iter_prev(...) cdc_avl_tree_iter_prev(__VA_ARGS__)
#define avl_tree_iter_key(...) cdc_avl_tree_iter_key(__VA_ARGS__)
#define avl_tree_iter_value(...) cdc_avl_tree_iter_value(__VA_ARGS__)
#define avl_tree_iter_key_value(...) cdc_avl_tree_iter_key_value(__VA_ARGS__)
#define avl_tree_iter_is_eq(...) cdc_avl_tree_iter_is_eq(__VA_ARGS__)
#endif
/** @} */
#endif // CDCONTAINERS_INCLUDE_CDCONTAINERS_avl_tree_H
|
#!/bin/bash
print_cmds=true
execute_cmds=true
cmd() {
if ${print_cmds}; then echo "+ $@"; fi
if ${execute_cmds}; then eval "$@"; fi
}
cmd "nice -n 19 ionice -c 3 chmod -R a+rX,go-w /nopt/nrel/ecom/hpacf"
cmd "nice -n 19 ionice -c 3 chgrp -R n-ecom /nopt/nrel/ecom/hpacf"
|
def max_profit(prices):
min_price = float('inf')
max_profit = 0
for p in prices:
min_price = min(min_price, p)
profit = p - min_price
max_profit = max(max_profit, profit)
return max_profit |
<gh_stars>1-10
import * as React from 'react';
import { render, waitFor, screen } from '@testing-library/react';
import { setupNock, serverGet401, serverGet, serverGetAuth } from '../../test-utils';
import { APIContextActionNoRequireKey, APIContextState } from './api-context';
import { APIProvider, useAPIProviderStatus, useUpdateAPIProvider, useAPIProvider, apiReducer } from './index';
import { BASE_URL, VERSION_URL } from '../config';
import { VersionInfo } from '../api/models';
import { clearApikey, saveApikey } from '../../utils';
import userEvent from '@testing-library/user-event';
const versionInfo: VersionInfo = { version: 'CEP 1' };
const TestComponent: React.FC<{}> = props => {
const { showLoading, showNoService, showLogin, requireApikey, invalidReason, apiKey, version } = useAPIProviderStatus();
const { setApiKey, invalidateApiKey } = useUpdateAPIProvider();
const inputApiKey = React.createRef<HTMLInputElement>();
const inputRequiredToken = React.createRef<HTMLInputElement>();
const invalidate = React.useCallback(() => {
const reqApikey = !!inputRequiredToken.current?.checked;
invalidateApiKey(reqApikey);
}, [inputRequiredToken, invalidateApiKey]);
const setApikey = React.useCallback(() => {
const apiKeyString = inputApiKey.current?.value;
apiKeyString && setApiKey(apiKeyString);
}, [inputApiKey, setApiKey]);
return (
<>
<div data-testid='loading'>{showLoading ? 'true' : 'false'}</div>
<div data-testid='noservice'>{showNoService ? 'true' : 'false'}</div>
<div data-testid='login'>{showLogin ? 'true' : 'false'}</div>
<div data-testid='apikey'>{requireApikey === undefined ? 'undefined' : requireApikey === false ? 'false' : 'true'}</div>
<div data-testid='invalidkey'>{invalidReason}</div>
<div data-testid='version'>{version && `${apiKey}-${version}`}</div>
<div>
<input type='text' ref={inputApiKey} data-testid='inputkey' />
<button onClick={setApikey}>setApiKey</button>
</div>
<div>
<button onClick={invalidate}>invalidate</button>
</div>
</>
);
};
test('ApiProvider should start checking and show login when server do not response', async () => {
const spy = jest.spyOn(console, 'error').mockImplementation();
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('undefined'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(BASE_URL + ' not found'));
spy.mockRestore();
});
test('ApiProvider should start checking and show login when server require apikey and no apikey stored', async () => {
serverGet401(setupNock(BASE_URL), VERSION_URL);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('apiKey not found'));
});
test('ApiProvider should start checking and show app when server NO require apikey', async () => {
serverGet(setupNock(BASE_URL), VERSION_URL, 200, versionInfo);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent('-' + versionInfo.version));
});
test('ApiProvider should start checking and show app when apiKey is stored and valid', async () => {
clearApikey();
const apikey = '1234567890';
saveApikey(apikey);
serverGet401(setupNock(BASE_URL), VERSION_URL);
serverGetAuth(setupNock(BASE_URL), VERSION_URL, apikey, 200, versionInfo);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(apikey + '-' + versionInfo.version));
});
test('ApiProvider should start checking and show login when apiKey is stored and invalid', async () => {
clearApikey();
const apikey = '1234567890';
saveApikey(apikey);
serverGet401(setupNock(BASE_URL), VERSION_URL);
serverGet401(setupNock(BASE_URL), VERSION_URL);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('ApiKey 1234567890 is NOT valid'));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(''));
});
test('ApiProvider should login with new apikey', async () => {
clearApikey();
serverGet401(setupNock(BASE_URL), VERSION_URL);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('apiKey not found'));
const apikey = '1234567891';
serverGetAuth(setupNock(BASE_URL), VERSION_URL, apikey, 200, versionInfo);
await userEvent.type(await screen.findByTestId(/inputkey/), apikey);
const inputApikey = (await screen.findByTestId(/inputkey/)) as HTMLInputElement;
expect(inputApikey.value).toEqual(apikey);
userEvent.click(await screen.findByText(/setapikey/i));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(apikey + '-' + versionInfo.version));
});
test('ApiProvider should fails login with invalid apikey', async () => {
clearApikey();
serverGet401(setupNock(BASE_URL), VERSION_URL);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('apiKey not found'));
const invalidApikey = '12w345t6';
serverGet401(setupNock(BASE_URL), VERSION_URL);
await userEvent.type(await screen.findByTestId(/inputkey/), invalidApikey);
const inputApikey = (await screen.findByTestId(/inputkey/)) as HTMLInputElement;
expect(inputApikey.value).toEqual(invalidApikey);
userEvent.click(await screen.findByText(/setapikey/i));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('ApiKey ' + invalidApikey + ' is NOT valid'));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(''));
});
test('ApiProvider should invalidate an apikey and show login', async () => {
clearApikey();
const apikey = '1234567890';
saveApikey(apikey);
serverGet401(setupNock(BASE_URL), VERSION_URL);
serverGetAuth(setupNock(BASE_URL), VERSION_URL, apikey, 200, versionInfo);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(apikey + '-' + versionInfo.version));
serverGet401(setupNock(BASE_URL), VERSION_URL);
userEvent.click(await screen.findByText(/invalidate/i));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('apiKey not found'));
});
test('ApiProvider should invalidate an apikey and show App', async () => {
clearApikey();
const apikey = '1234567890';
saveApikey(apikey);
serverGet401(setupNock(BASE_URL), VERSION_URL);
serverGetAuth(setupNock(BASE_URL), VERSION_URL, apikey, 200, versionInfo);
render(<TestComponent />, { wrapper: APIProvider });
expect(screen.getByTestId(/loading/i)).toHaveTextContent('true');
expect(screen.getByTestId(/login/i)).toHaveTextContent('false');
expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent('');
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
await waitFor(() => expect(screen.getByTestId(/version/i)).toHaveTextContent(apikey + '-' + versionInfo.version));
serverGet(setupNock(BASE_URL), VERSION_URL, 200, versionInfo);
userEvent.click(await screen.findByText(/invalidate/i));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('true'));
await waitFor(() => expect(screen.getByTestId(/loading/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/noservice/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/login/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/apikey/i)).toHaveTextContent('false'));
await waitFor(() => expect(screen.getByTestId(/invalidkey/i)).toHaveTextContent(''));
});
const TestUseAPIComponent: React.FC<{}> = props => {
const { api, apiKey, version } = useAPIProvider();
return (
<>
<div data-testid='version'>{version && `${apiKey}-${version}`}</div>
<div data-testid='api'>{!!api ? 'true' : 'false'}</div>
</>
);
};
const TestProviderComponent: React.FC<{}> = props => {
const { showLoading, showNoService, showLogin } = useAPIProviderStatus();
if (showLoading || showNoService || showLogin) {
return <div data-testid='provider'></div>;
}
return <TestUseAPIComponent />;
};
test('ApiProvider should load api when it is loaded correctly without apikey', async () => {
serverGet(setupNock(BASE_URL), VERSION_URL, 200, versionInfo);
render(<TestProviderComponent />, { wrapper: APIProvider });
await screen.findByTestId(/provider/);
expect(await screen.findByTestId(/version/)).toHaveTextContent('-' + versionInfo.version);
expect(await screen.findByTestId(/api/)).toHaveTextContent('true');
});
test('ApiProvider should load api when it is loaded correctly with apikey', async () => {
clearApikey();
const apikey = '1234567890';
saveApikey(apikey);
serverGet401(setupNock(BASE_URL), VERSION_URL);
serverGetAuth(setupNock(BASE_URL), VERSION_URL, apikey, 200, versionInfo);
render(<TestProviderComponent />, { wrapper: APIProvider });
await screen.findByTestId(/provider/);
expect(await screen.findByTestId(/version/)).toHaveTextContent(apikey + '-' + versionInfo.version);
expect(await screen.findByTestId(/api/)).toHaveTextContent('true');
});
test('ApiProvider should throw an error when no api available', async () => {
const spy = jest.spyOn(console, 'error').mockImplementation();
clearApikey();
serverGet401(setupNock(BASE_URL), VERSION_URL);
expect(() => render(<TestUseAPIComponent />, { wrapper: APIProvider })).toThrow('no API available');
spy.mockRestore();
});
test('apiReducer return current state when receive an invalid action', () => {
const state = {} as APIContextState;
const action = ({
type: 'UNSUPPORTED'
} as unknown) as APIContextActionNoRequireKey;
expect(apiReducer(state, action)).toEqual(state);
});
|
// (C) 2007-2020 GoodData Corporation
import { Attribute, DateDataSet } from "../../base/types";
import { AttributeResourceSchema, DatasetResourceSchema, ITigerClient } from "@gooddata/api-client-tiger";
import { DefaultGetOptions } from "./tigerClient";
import {
convertAttribute,
convertTags,
createDatasetMap,
createLabelMap,
createTagMap,
DatasetMap,
getReferencedDataset,
LabelMap,
TagMap,
} from "./tigerCommon";
type DatasetWithAttributes = {
dataset: DatasetResourceSchema;
attributes: AttributeResourceSchema[];
};
function findDateDatasetsWithAttributes(
attributes: AttributeResourceSchema[],
datasetsMap: DatasetMap,
): DatasetWithAttributes[] {
const res: { [id: string]: DatasetWithAttributes } = {};
/*
* TODO: this can be replaced with server-side filtering, need to figure out the query
*/
const dateAttributes = attributes.filter((entity) => entity.attributes.granularity !== undefined);
dateAttributes.forEach((attribute) => {
const dataset = getReferencedDataset(attribute.relationships, datasetsMap);
if (!dataset) {
return;
}
const entry = res[dataset.id];
if (!entry) {
res[dataset.id] = {
dataset,
attributes: [attribute],
};
} else {
entry.attributes.push(attribute);
}
});
return Object.values(res);
}
function convertToExportableFormat(
dateDatasets: DatasetWithAttributes[],
labelsMap: LabelMap,
tagsMap: TagMap,
): DateDataSet[] {
return dateDatasets.map(({ dataset, attributes }) => {
return {
dateDataSet: {
meta: {
title: dataset.attributes.title ?? dataset.id,
identifier: dataset.id,
tags: convertTags(dataset.relationships, tagsMap),
},
content: {
attributes: attributes
.map((attribute) => convertAttribute(attribute, labelsMap, tagsMap))
.filter((a): a is Attribute => a !== undefined),
},
},
};
});
}
export async function loadDateDataSets(
_projectId: string,
tigerClient: ITigerClient,
): Promise<DateDataSet[]> {
const result = await tigerClient.metadata.attributesGet({
...DefaultGetOptions,
include: "labels,tags,dataset",
});
const tagsMap = createTagMap(result.data.included);
const labelsMap = createLabelMap(result.data.included);
const datasetsMap = createDatasetMap(result.data.included);
const dateDatasets = findDateDatasetsWithAttributes(result.data.data, datasetsMap);
return convertToExportableFormat(dateDatasets, labelsMap, tagsMap);
}
|
"""
Generate a random 8 character password that contain at least 1 uppercase letter, 1 lowercase letter and 1 number
"""
import string
import random
def generate_password():
chars = string.ascii_letters + string.digits
password = ''.join(random.sample(chars, 8))
upper_count = sum(1 for c in password if c.isupper())
lower_count = sum(1 for c in password if c.islower())
digit_count = sum(1 for c in password if c.isdigit())
while upper_count == 0 or lower_count == 0 or digit_count == 0:
password = ''.join(random.sample(chars, 8))
upper_count = sum(1 for c in password if c.isupper())
lower_count = sum(1 for c in password if c.islower())
digit_count = sum(1 for c in password if c.isdigit())
return password
if __name__ == '__main__':
print(generate_password()) |
package chylex.hee.world.biome;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.entity.monster.EntityEnderman;
import net.minecraft.world.biome.BiomeDecorator;
import net.minecraft.world.biome.BiomeGenBase;
import net.minecraft.world.biome.BiomeGenEnd;
import chylex.hee.entity.mob.EntityMobEnderman;
public final class BiomeGenHardcoreEnd extends BiomeGenEnd{
public static float overworldEndermanMultiplier;
private List emptyList = new ArrayList();
public BiomeGenHardcoreEnd(int id){
super(id);
theBiomeDecorator = createBiomeDecorator();
}
public void overrideMobLists(){
spawnableMonsterList.clear();
spawnableCreatureList.clear();
spawnableWaterCreatureList.clear();
spawnableCaveCreatureList.clear();
for(BiomeGenBase biome:BiomeGenBase.getBiomeGenArray()){
if (biome == null)continue;
SpawnListEntry endermanEntry = null;
int totalWeight = 0;
for(SpawnListEntry spawnEntry:(List<SpawnListEntry>)biome.getSpawnableList(EnumCreatureType.monster)){
if (spawnEntry.entityClass == EntityEnderman.class)endermanEntry = spawnEntry;
else totalWeight += spawnEntry.itemWeight;
}
if (endermanEntry != null){
int baseWeight = Math.round(totalWeight*overworldEndermanMultiplier*0.052F); // ~2.6x of vanilla weight; totalWeight is 505 in most vanilla biomes
// update existing entry
endermanEntry.entityClass = EntityMobEnderman.class;
endermanEntry.itemWeight = baseWeight;
endermanEntry.minGroupCount = endermanEntry.maxGroupCount = 1;
// add another entry for grouped Endermen
biome.getSpawnableList(EnumCreatureType.monster).add(new SpawnListEntry(EntityMobEnderman.class, baseWeight/5, 1, 3));
// should end up with 26+5 weight, with high chance for lone Endermen and small chance for groups of 2-3
}
}
}
@Override
public List getSpawnableList(EnumCreatureType type){
return emptyList;
}
@Override
public BiomeDecorator createBiomeDecorator(){
return new BiomeDecoratorHardcoreEnd();
}
} |
package main
import (
"context"
"log"
"net/http"
"os"
"strconv"
"testing"
"time"
)
func newTTG() *TTG {
owner, _ := strconv.Atoi(os.Getenv("TelegramOwner"))
cfg := &config{
TwitchAppID: os.Getenv("TwitchAppID"),
TwitchSecCode: os.Getenv("TwitchSecCode"),
TwitchChannelName: "leporel",
TelegramBotToken: os.Getenv("TelegramBotToken"),
TelegramGroup: -1001575283959,
TelegramOwner: owner,
Init: false,
Host: "localhost",
}
bot := &TTG{
cfg: cfg,
}
db, err := NewStorage()
if err != nil {
log.Fatalln(err)
}
bot.db = db
app, err := NewTwitchClient(cfg.TwitchChannelName, cfg.TwitchAppID, cfg.TwitchSecCode, cfg.Host)
if err != nil {
log.Fatalln(err)
}
bot.app = app
tg, err := NewTgBot(cfg.TelegramBotToken, cfg.TelegramGroup, cfg.TelegramOwner, cfg.Host, bot.commandHandler)
if err != nil {
log.Fatalln(err)
}
bot.tg = tg
return bot
}
func TestTTG_fetchFollowers(t *testing.T) {
ttg := newTTG()
fwls, err := ttg.app.getFollowers()
if err != nil {
t.Fatal(err)
}
var i = 0
for id, name := range fwls {
if i > 100 {
break
}
t.Log(id, name)
i++
}
}
func TestTTG_fetchTwitchUserInfo(t *testing.T) {
ttg := newTTG()
srv := &http.Server{Addr: ":8444"}
link, err := ttg.app.getAuthLink("test")
if err != nil {
t.Fatal(err)
}
log.Println(link)
hnd := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
state := r.FormValue("state")
if state != "test" {
t.Fatal("wrong state")
}
accessToken, err := ttg.app.getUserToken(r.FormValue("code"))
if err != nil {
t.Fatal(err)
}
user, err := ttg.app.getUser(accessToken)
if err != nil {
t.Fatal(err)
}
t.Log(user)
channels, err := ttg.app.getFollows(user.ID)
for s, s2 := range channels {
t.Log(s, s2)
}
w.WriteHeader(http.StatusOK)
if _, err := w.Write([]byte(`<html><body>Authorization successful, bot will soon give to you rights</body></html>`)); err != nil {
log.Println("ERROR: ", err)
}
go func() {
time.Sleep(1 * time.Second)
srv.Shutdown(context.Background())
}()
return
})
log.Printf("Started running on http://localhost:8444 \n")
http.Handle("/auth/callback", hnd)
log.Println(srv.ListenAndServe())
}
|
#ifndef INCLUDED_MAP_MAP_ELEMENT_SYSTEM_H
#define INCLUDED_MAP_MAP_ELEMENT_SYSTEM_H
#include "core/scene.h"
#include "engine/system.h"
#include "map_system.h"
namespace map {
class MapElementSystem : public engine::System
{
public:
MapElementSystem();
protected:
virtual void Init();
virtual void Update( double DeltaTime );
Scene& mScene;
Opt<MapSystem> mMapSystem;
};
} // namespace map
#endif//INCLUDED_MAP_MAP_ELEMENT_SYSTEM_H
|
def debug(x):
print(x)
def displayContinuousStdPlots(series):
# TODO: Add bar plot
# TODO: Add box plot
# TODO: Add histogram
def displayCategoricalStdPlots(series):
# TODO: Add bar plot
def displayScatterPlotMatrix(df):
# TODO: Add scatter plot
def displaySmallMultiples(df):
"""
Function for visualizing pairs of categorical features
"""
# TODO: ADD small multiples plot
def displayHistogramPerCategoricalLevel(df):
"""
Function for visualizing continuous features relative to categorical features
"""
# TODO: ADD plot
def displayStackedBarPlot(df):
"""
Function for visualizing pairs of categorical features
"""
# TODO: ADD plot |
require "faraday"
require "codelocks/version"
require "codelocks/client"
require "codelocks/collection_proxy"
require "codelocks/model"
require "codelocks/request"
require "codelocks/response"
require "codelocks/lock"
require "codelocks/net_code"
module Codelocks
class CodelocksError < StandardError; end
end
|
<gh_stars>0
package com.govnomarket.market;
import com.govnomarket.market.dto.PersonDTO;
import com.govnomarket.market.entity.Person;
import com.govnomarket.market.passwordhashing.SHA512Hasher;
import org.aspectj.lang.annotation.Before;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import java.security.SecureRandom;
import static org.junit.jupiter.api.Assertions.assertEquals;
@SpringBootTest
@Disabled
class MarketApplicationTests {
private SHA512Hasher hasher;
private SecureRandom secureRandom;
@Test
void contextLoads() {
}
@Test
public void whenConvertPersonEntityToPersonDto_thenCorrect() {
hasher = new SHA512Hasher();
secureRandom = new SecureRandom();
byte[] salt = new byte[16];
secureRandom.nextBytes(salt);
String hash1 = hasher.hash("password", salt);
Person person = new Person();
person.setId(1L);
person.setName("Иван");
person.setFullname("<NAME>");
person.setRole("client");
person.setEmail("<EMAIL>");
person.setPasswordHash(hash1);
person.setLogin("ivan");
PersonDTO personDto = PersonDTO.personToPersonDto(person);
assertEquals(personDto.getId() ,person.getId());
assertEquals(personDto.getName() ,person.getName());
assertEquals(personDto.getFullname() ,person.getFullname());
assertEquals(personDto.getRole() ,person.getRole());
assertEquals(personDto.getEmail() ,person.getEmail());
assertEquals(personDto.getPasswordHash() ,person.getPasswordHash());
assertEquals(personDto.getLogin() ,person.getLogin());
}
@Test
public void whenConvertPersonDtoToPersonEntity_thenCorrect() {
hasher = new SHA512Hasher();
secureRandom = new SecureRandom();
byte[] salt = new byte[16];
secureRandom.nextBytes(salt);
String hash1 = hasher.hash("password", salt);
PersonDTO personDto = new PersonDTO();
personDto.setId(1L);
personDto.setName("Иван");
personDto.setFullname("<NAME>");
personDto.setRole("client");
personDto.setEmail("<EMAIL>");
personDto.setPasswordHash(hash1);
personDto.setLogin("ivan");
Person person = PersonDTO.personDtoToPerson(personDto);
assertEquals(personDto.getId() ,person.getId());
assertEquals(personDto.getName() ,person.getName());
assertEquals(personDto.getFullname() ,person.getFullname());
assertEquals(personDto.getRole() ,person.getRole());
assertEquals(personDto.getEmail() ,person.getEmail());
assertEquals(personDto.getPasswordHash() ,person.getPasswordHash());
assertEquals(personDto.getLogin() ,person.getLogin());
}
}
|
#!/usr/bin/env bash
repository="myroslavseliverstov/porter"
commit=$(cat /dev/urandom | env LC_CTYPE=C tr -dc 'a-zA-Z0-9' | fold -w 8 | head -n 1 | awk '{print tolower($0)}')
build_num=( $(<build_numb.txt) )
build_num=$(( build_num + 1 ))
echo ${build_num} > build_numb.txt
echo ">>>> Building image <<<<"
docker build --build-arg BUNDLE_DIR=/cnab/app -t ${repository}:dev-${commit} .
docker tag ${repository}:dev-${commit} ${repository}:0.1.${build_num}
echo ">>>> Push image ${repository}:dev-${commit} <<<<"
docker push ${repository}:dev-${commit}
echo ">>>> Push image ${repository}:0.1.${build_num} <<<<"
docker push ${repository}:0.1.${build_num}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/JMProject/JMProject.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/JMProject/JMProject.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { DevinfosPage } from './devinfos.page';
const routes: Routes = [
{
path: '',
component: DevinfosPage
}
];
@NgModule({
imports: [RouterModule.forChild(routes)],
exports: [RouterModule],
})
export class DevinfosPageRoutingModule {}
|
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.plugins.processor.aggregate;
import com.amazon.dataprepper.model.event.Event;
import com.amazon.dataprepper.model.event.JacksonEvent;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.UUID;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class AggregateActionResponseTest {
@Test
void nullEventResponse_returns_correct_AggregateActionResponse() {
final AggregateActionResponse emptyEventResponse = AggregateActionResponse.nullEventResponse();
assertThat(emptyEventResponse.getEvent(), equalTo(null));
}
@Test
void AggregateActionResponse_fromEvent_returns_correct_AggregateActionResponse() {
final Event event = JacksonEvent.builder()
.withEventType("event")
.withData(Collections.singletonMap(UUID.randomUUID().toString(), UUID.randomUUID().toString()))
.build();
final AggregateActionResponse aggregateActionResponse = AggregateActionResponse.fromEvent(event);
assertThat(aggregateActionResponse.getEvent(), equalTo(event));
}
}
|
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.grpc.service;
import io.grpc.Context;
import io.grpc.Contexts;
import io.grpc.Metadata;
import io.grpc.ServerCall;
import io.grpc.ServerCall.Listener;
import io.grpc.ServerCallHandler;
import io.grpc.ServerInterceptor;
import io.stargate.db.Persistence;
public class MockInterceptor implements ServerInterceptor {
private final Persistence persistence;
public MockInterceptor(Persistence persistence) {
this.persistence = persistence;
}
@Override
public <ReqT, RespT> Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) {
Context context = Context.current();
context = context.withValue(GrpcService.CONNECTION_KEY, persistence.newConnection());
return Contexts.interceptCall(context, call, headers, next);
}
}
|
<filename>scraper/lhs.js
const request = require('requestretry').defaults({json: true});
const { lhscans } = require('../config.json');
const cheerio = require('cheerio');
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
async function getMangaTitles(){
let mangas = [];
let response = await request({
url: lhscans + 'manga-list.html?listType=allABC',
method: 'GET',
maxAttempts: 1,
retryDelay: 5000,
retryStrategy: request.RetryStrategies.HTTPOrNetworkError
});
const $ = cheerio.load(response.body);
$(response.body).find("span a").each((index, elem) => {
mangas.push(
{
name: $(elem).text(),
url: $(elem).attr("href"),
chapters: []
});
});
return mangas;
}
async function addChapter(){
let mangas = await getMangaTitles();
// Now dirty stuff starts
await asyncForEach(mangas, async(manga, index) => {
//console.log(manga);
let response = await request({
url: lhscans + manga.url,
method: 'GET',
maxAttempts: 1,
retryDelay: 5000,
retryStrategy: request.RetryStrategies
.HTTPOrNetworkError
});
const $ = cheerio.load(response.body);
$(response.body).find("#list-chapters p .titleLink a")
.each((index, elem) => {
mangas[index].chapters.push(
{
chapterNo: $(elem).attr("title"),
chapterUrl: $(elem).attr("href")
}
);
// Checking the output if its correct
console.log($(elem).attr("title"));
});
});
return mangas;
}
async function printer() {
let x = await addChapter();
console.log(x);
}
printer();
|
<reponame>BitPaw/BitFireEngine<gh_stars>1-10
#include "UIElement.h"
|
package io.indreams.ecommerceuserinfoservice.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import javax.persistence.*;
@Data
@NoArgsConstructor
@ToString
@AllArgsConstructor
@Entity
@Table(name = "users")
public class User {
@Id
private String username;
@Column(nullable = false, unique = false)
private String password;
@Column(nullable = false, unique = false)
private String contactNumber;
@Column(nullable = false)
private boolean isActive;
@Column(nullable = false)
private String roles;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_directions_subway = void 0;
var ic_directions_subway = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12 2c-4.42 0-8 .5-8 4v9.5C4 17.43 5.57 19 7.5 19L6 20.5v.5h12v-.5L16.5 19c1.93 0 3.5-1.57 3.5-3.5V6c0-3.5-3.58-4-8-4zM7.5 17c-.83 0-1.5-.67-1.5-1.5S6.67 14 7.5 14s1.5.67 1.5 1.5S8.33 17 7.5 17zm3.5-6H6V6h5v5zm5.5 6c-.83 0-1.5-.67-1.5-1.5s.67-1.5 1.5-1.5 1.5.67 1.5 1.5-.67 1.5-1.5 1.5zm1.5-6h-5V6h5v5z"
},
"children": []
}]
};
exports.ic_directions_subway = ic_directions_subway; |
import React, { Component } from 'react';
import Form from './Form';
import Post from './Post';
class App extends Component {
constructor() {
super();
this.state = {
posts: []
};
}
addPost = post => {
const newPost = [post, ...this.state.posts];
this.setState({
posts: newPost
});
};
render() {
return (
<div className="App">
<h1>React Social App</h1>
<Form addPost={this.addPost} />
<div>
{this.state.posts.map(post => (
<Post
key={post.id}
id={post.id}
author={post.author}
text={post.text}
/>
))}
</div>
</div>
);
}
}
export default App; |
#!/bin/bash
cd ./game-server && kill -9 $(ps -x|grep -E 'node'|grep -E 'production'|grep -v grep|awk '{print $1}' )
echo 'server stoping... you will be not see any process about node in few seconds'
sleep 5
ps -x | grep node
echo '============ game-server stop ok ============' |
const knex = require('../config/db');
const setupPaginator = require('knex-paginator');
const random = require('randomstring');
setupPaginator(knex);
exports.create = async (req,res) => {
let id = random.generate({length:32});
let name = req.body.name
let price = req.body.price
let picture = req.files
console.log(req.files);
const product = { id: `${id}`, name: `${name}`, price: `${price}`, modify_date: new Date(), create_date: new Date() };
knex('T_Product').insert(product)
.then(()=>{
res.status(200).json(product)
})
.catch((err)=>{
res.status(400).send({
message: err.message
})
});
picture.forEach(element => {
let img = element.originalname
let id = random.generate({length:32});
const image = { id: `${id}`, product_id: `${id}`, img: img, modify_date: new Date(), create_date: new Date() };
knex('AT_ProductImages').insert(image)
await .then(()=>{
res.status(200).json(image)
})
.catch((err)=>{
res.status(400).send({
message: err.message
})
});
});
}
exports.update = async (req,res) => {
let id = random.generate({length:32});
let name = req.body.name
let price = req.body.price
let picture = req.files
console.log(req.files);
const product = { id: `${id}`, name: `${name}`, price: `${price}`, modify_date: new Date(), create_date: new Date() };
knex('T_Product').update(product)
.where('T_Product.id', req.params.id)
.then(()=>{
res.status(200).send(product)
})
.catch((err)=>{
res.status(400).send({
message: err.message
})
});
picture.forEach(element => {
let img = element.originalname
let id = random.generate({length:32});
const image = { id: `${id}`, product_id: `${id}`, img: img, modify_date: new Date(), create_date: new Date() };
knex('AT_ProductImages').update(image)
await .then(()=>{
res.send(image)
})
.catch((err)=>{
res.status(400).send({
message: err.message
})
});
});
}
exports.index = (req, res) => {
knex.select(['T_Product.id','T_Product.name','T_Product.price', knex.raw('GROUP_CONCAT(AT_ProductImages.img) as image_list')]).
from('AT_ProductImages').leftJoin('T_Product', 'T_Product.id','AT_ProductImages.product_id')
.groupBy('T_Product.id')
.where('T_Product.id', '<', 100)
.paginate(5, req.body.page, true)
.then((paginator) => {
res.status(200).send({
paginator
})
})
.catch( (err)=>{
res.status(400).send({
message: err.message
})
} )
}
exports.show = (req, res) => {
knex.select(['T_Product.id','T_Product.name','T_Product.price', knex.raw('GROUP_CONCAT(AT_ProductImages.img) as image_list')]).
from('AT_ProductImages').leftJoin('T_Product', 'T_Product.id','AT_ProductImages.product_id')
.groupBy('T_Product.id')
.where('T_Product.id', req.params.id)
.then((data) =>{
res.status(200).send({
data
})
})
.catch( (err)=>{
res.status(400).send({
message: err.message
})
} )
}
exports.delete = async (req, res) => {
await knex('T_Product').where({'id':req.params.id}).del()
.then((rows) => {
res.status(200).send({
message:"Deleted success"
})
})
.catch( (err)=>{
res.status(401).send({
message: err.message
})
} )
} |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.chevronsRight = void 0;
var chevronsRight = {
"viewBox": "0 0 24 24",
"children": [{
"name": "polyline",
"attribs": {
"points": "13 17 18 12 13 7"
},
"children": []
}, {
"name": "polyline",
"attribs": {
"points": "6 17 11 12 6 7"
},
"children": []
}],
"attribs": {
"fill": "none",
"stroke": "currentColor",
"stroke-width": "2",
"stroke-linecap": "round",
"stroke-linejoin": "round"
}
};
exports.chevronsRight = chevronsRight; |
<reponame>VoicuTomut/Qountry
"""
"""
from .fibonacci_mapping import fibonacci_vocabulary
|
#!/usr/bin/env bash
#
# Generated by: https://github.com/openapitools/openapi-generator.git
#
wget -nc https://dist.nuget.org/win-x86-commandline/latest/nuget.exe
mozroots --import --sync
echo "[INFO] remove bin/Debug/Org.OpenAPITools.Test.dll"
rm src/Org.OpenAPITools.Test/bin/Debug/Org.OpenAPITools.Test.dll 2> /dev/null
echo "[INFO] install NUnit runners via NuGet"
wget -nc https://dist.nuget.org/win-x86-commandline/latest/nuget.exe
mozroots --import --sync
mono nuget.exe install src/Org.OpenAPITools.Test/packages.config -o packages
echo "[INFO] Install NUnit runners via NuGet"
mono nuget.exe install NUnit.Runners -Version 2.6.4 -OutputDirectory packages
echo "[INFO] Build the solution and run the unit test"
xbuild Org.OpenAPITools.sln && \
mono ./packages/NUnit.Runners.2.6.4/tools/nunit-console.exe src/Org.OpenAPITools.Test/bin/Debug/Org.OpenAPITools.Test.dll
|
def is_prime(n):
# Corner case
if n <= 1:
return False
# Check from 2 to the sqrt of n to see if it is divisible by any number
for i in range(2, int(math.sqrt(n)) + 1):
if n % i == 0:
return False
return True |
set -x
JOBSET=master
function release {
NAME="$1"
VERSION="$2"
pushd "${NAME}"
cabal v1-configure
cabal v1-sdist
cabal upload --publish "dist/${NAME}-${VERSION}.tar.gz"
popd
curl --location --output "${NAME}-${VERSION}-x86_64-linux.tar.bz2" "https://hydra.dhall-lang.org/job/dhall-haskell/${JOBSET}/tarball-${NAME}/latest/download/1/${NAME}.tar.bz2"
curl --location --remote-name "https://hydra.dhall-lang.org/job/dhall-haskell/${JOBSET}/image-${NAME}/latest/download/1/docker-image-${NAME}.tar.gz"
skopeo copy --dest-creds=gabriel439:$(< dockerPassword.txt) "docker-archive:docker-image-${NAME}.tar.gz" "docker://dhallhaskell/${NAME}"
skopeo copy --dest-creds=gabriel439:$(< dockerPassword.txt) "docker-archive:docker-image-${NAME}.tar.gz" "docker://dhallhaskell/${NAME}:${VERSION}"
}
release dhall-lsp-server 1.0.3
release dhall-json 1.6.0
release dhall-yaml 1.0.0
release dhall-bash 1.0.25
release dhall-nix 1.1.10
release dhall 1.28.0
|
#!/bin/bash
# Taken from :
# https://gist.github.com/domenic/ec8b0fc8ab45f39403dd
set +x #
set -e # Exit with nonzero exit code if anything fails
SOURCE_BRANCH="master"
TARGET_BRANCH="gh-pages"
function doCompile () {
make doc && \
mv doc/* out/ && \
rm -rf doc
}
# Pull requests and commits to other branches shouldn't try to deploy, just build to verify
if [ "$CI_COMMIT_REF_NAME" != "$SOURCE_BRANCH" ]; then
echo "Skipping deploy; just doing a build."
mkdir out
doCompile
exit 0
fi
# Clone Github Repository
git clone https://guiadco:$GITHUB_ACCESS_TOKEN@github.com/GeekHomeInside/oneforall.git build
# Go inside build Folder
cd build
# Install virtualenv prerequisite
pip install --user virtualenv
# Save some useful information
REPO=`git config remote.origin.url`
SSH_REPO=${REPO/https:\/\/github.com\//git@github.com:}
SHA=`git rev-parse --verify HEAD`
# Clone the existing gh-pages for this repo into doc/
# Create a new empty branch if gh-pages doesn't exist yet (should only happen on first deply)
git clone $REPO out
cd out
git checkout $TARGET_BRANCH || git checkout --orphan $TARGET_BRANCH
git reset --hard
cd ..
# Clean out existing contents
rm -rf out/**/* || exit 0
# Run our compile script
doCompile
# Now let's go have some fun with the cloned repo
cd out
git config user.name "Gitlab CI"
git config user.email "$COMMIT_AUTHOR_EMAIL"
# If there are no changes to the compiled out (e.g. this is a README update) then just bail.
if git diff --quiet; then
echo "No changes to the output on this push; exiting."
exit 0
fi
# Commit the "changes", i.e. the new version.
# The delta will show diffs between new and old versions.
git add -A .
git commit -m "Build from: $CI_SERVER_NAME $CI_PIPELINE_ID & Deploy to GitHub Pages: ${SHA}"
git push $SSH_REPO $TARGET_BRANCH
|
#!/bin/bash
if ! [ -x "$(command -v docker-compose)" ]; then
echo 'Error: docker-compose is not installed.' >&2
exit 1
fi
domains=(alphadev.ga www.alphadev.ga)
rsa_key_size=4096
data_path="./data/certbot"
email="eduardoamparos@gmail.com" # Adding a valid address is strongly recommended
staging=0 # Set to 1 if you're testing your setup to avoid hitting request limits
if [ -d "$data_path" ]; then
read -p "Existing data found for $domains. Continue and replace existing certificate? (y/N) " decision
if [ "$decision" != "Y" ] && [ "$decision" != "y" ]; then
exit
fi
fi
if [ ! -e "$data_path/conf/options-ssl-nginx.conf" ] || [ ! -e "$data_path/conf/ssl-dhparams.pem" ]; then
echo "### Downloading recommended TLS parameters ..."
mkdir -p "$data_path/conf"
curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot-nginx/certbot_nginx/_internal/tls_configs/options-ssl-nginx.conf > "$data_path/conf/options-ssl-nginx.conf"
curl -s https://raw.githubusercontent.com/certbot/certbot/master/certbot/certbot/ssl-dhparams.pem > "$data_path/conf/ssl-dhparams.pem"
echo
fi
echo "### Creating dummy certificate for $domains ..."
path="/etc/letsencrypt/live/$domains"
mkdir -p "$data_path/conf/live/$domains"
docker-compose run --rm --entrypoint "\
openssl req -x509 -nodes -newkey rsa:4096 -days 1\
-keyout '$path/privkey.pem' \
-out '$path/fullchain.pem' \
-subj '/CN=localhost'" certbot
echo
echo "### Starting nginx ..."
docker-compose up --force-recreate -d nginx
echo
echo "### Deleting dummy certificate for $domains ..."
docker-compose run --rm --entrypoint "\
rm -Rf /etc/letsencrypt/live/$domains && \
rm -Rf /etc/letsencrypt/archive/$domains && \
rm -Rf /etc/letsencrypt/renewal/$domains.conf" certbot
echo
echo "### Requesting Let's Encrypt certificate for $domains ..."
#Join $domains to -d args
domain_args=""
for domain in "${domains[@]}"; do
domain_args="$domain_args -d $domain"
done
# Select appropriate email arg
case "$email" in
"") email_arg="--register-unsafely-without-email" ;;
*) email_arg="--email $email" ;;
esac
# Enable staging mode if needed
if [ $staging != "0" ]; then staging_arg="--staging"; fi
docker-compose run --rm --entrypoint "\
certbot certonly --webroot -w /var/www/certbot \
$staging_arg \
$email_arg \
$domain_args \
--rsa-key-size $rsa_key_size \
--agree-tos \
--force-renewal" certbot
echo
echo "### Reloading nginx ..."
docker-compose exec nginx nginx -s reload |
#include "Logic.h"
void Logic::initVariables() {
this->window = nullptr;
this->spawnTimerMax = 1000.f;
this->spawnTimer = this->spawnTimerMax;
}
void Logic::initWindow() {
this->window = new sf::RenderWindow(sf::VideoMode(800, 600), "Game Mode", sf::Style::Titlebar | sf::Style::Close | sf::Style::Resize);
this->window->setFramerateLimit(60);
}
// Main Constructor
Logic::Logic() {
this->initVariables();
this->initWindow();
this->drawGreen();
this->drawEnemies();
}
// Destructor
Logic::~Logic() {
delete this->window;
}
const bool Logic::running()
{
return this->window->isOpen();
}
void Logic::drawGreen() {
this->green.setSize(sf::Vector2f(100.f, 100.f));
this->green.setPosition(static_cast<float>(rand() % static_cast<int>(this->window->getSize().x - this->green.getSize().x)), rand() % static_cast<int>(this->window->getSize().y - this->green.getSize().y));
this->green.setScale(sf::Vector2f(0.5f, 0.5f));
this->green.setFillColor(sf::Color::Green);
}
void Logic::renderGreen() {
this->window->draw(this->green);
}
void Logic::drawEnemies() {
this->box.setSize(sf::Vector2f(10.f, 10.f));
this->box.setScale(sf::Vector2f(0.5f, 0.5f));
this->box.setPosition(static_cast<float>(rand() % static_cast<int>(this->window->getSize().x - this->box.getSize().x)), rand() % static_cast<int>(this->window->getSize().y - this->box.getSize().y));
this->box.setFillColor(sf::Color::Red);
this->enemies.push_back(this->box);
}
void Logic::updateEnemies() {
if (this->enemies.size() < 100) {
this->drawEnemies();
// if (this->spawnTimer >= this->spawnTimerMax) {
// this->drawEnemies();
// this->spawnTimer = 0.f;
// } else {
// this->spawnTimer += 1.f;
// }
}
// for(auto& e : this->enemies) {
// e.move(0.f, 1.f);
// }
}
void Logic::renderEnemies() {
for(auto& e : this->enemies) {
this->window->draw(e);
}
}
void Logic::update() {
this->pollEvent();
this->updateEnemies();
}
void Logic::render() {
this->window->clear();
this->renderGreen();
this->renderEnemies();
this->window->display();
}
void Logic::updateCollision() {
}
void Logic::pollEvent()
{
while (this->window->pollEvent(this->ev))
{
switch (this->ev.type) {
case sf::Event::Closed:
this->window->close();
break;
case sf::Event::KeyPressed:
if (this->ev.key.code == sf::Keyboard::Escape)
this->window->close();
if (this->ev.key.code == sf::Keyboard::Up) {
this->green.move(0,-10);
}
if (this->ev.key.code == sf::Keyboard::Down) {
this->green.move(0,10);
// for (size_t i = 0; i < this->enemies.size(); i++) {
// if(this->green.getGlobalBounds().intersects(this->enemies[i]->getGlobalBounds()))
// this->green.move(0,-10);
// }
}
if (this->ev.key.code == sf::Keyboard::Left) {
this->green.move(-10,0);
}
if (this->ev.key.code == sf::Keyboard::Right) {
this->green.move(10,0);
}
if (this->ev.key.code == sf::Keyboard::Space) {
this->green.move(0,-50);
// this->green.move(0,50);
}
break;
}
}
} |
/**
* Class that handles the "business logic" of the bot and levels
*/
function BotManager(blocklyManager) {
const workspace = blocklyManager.getWorkspace();
const DEFAULT_STEP_TIMER = 150;
let currentCode = "";
let outputContainer = null;
let displayManager = null;
let storageManager = null;
let uiManager = null;
let currentLevelIndex = 0;
let stepping = false;
const MAX_TILES = 10;
const MIN_TILES = 3;
const INITIAL_WATER_SUPPLY = 2;
let forceStop = false;
let onStepClick = null;
function handleAlertDismiss() {}
function ExtinguishWithoutWaterError() {
this.message = "Tried to extinguish a flame without water";
}
ExtinguishWithoutWaterError.prototype = Error.prototype;
ExtinguishWithoutWaterError.prototype.code = "NO_WATER";
function ExtinguishNoFlameError() {
this.message = "Tried to extinguish a place that did not have a flame";
}
ExtinguishNoFlameError.prototype = Error.prototype;
ExtinguishNoFlameError.prototype.code = "NO_FLAME";
function WalkedOutsideOfTheBoundariesError() {
this.message = "You walked outside of the path";
}
WalkedOutsideOfTheBoundariesError.prototype = Error.prototype;
WalkedOutsideOfTheBoundariesError.prototype.code = "OUTSIDE_OF_PATH";
function handleStopClick() {
forceStop = true;
stepping = false;
onStepClick = null;
}
function handleStepClick() {
if (!stepping) {
stepping = true;
onStepClick = runCode();
}
onStepClick();
}
function randomInRange(max, min) {
return Math.floor(Math.random() * (max - min) + min);
}
function getRunData(currentLevel) {
const currentRunData = {};
if (currentLevel.dynamicTiles) {
currentRunData.numberOfTiles = randomInRange(MAX_TILES, MIN_TILES);
} else {
currentRunData.numberOfTiles = currentLevel.numberOfTiles;
}
if (currentLevel.dynamicFires) {
const fireMap = {};
currentRunData.fires = [];
let maxFires = INITIAL_WATER_SUPPLY;
if (currentLevel.unlimitedFires) {
maxFires = randomInRange(
INITIAL_WATER_SUPPLY,
currentRunData.numberOfTiles
);
}
for (let i = 0; i < maxFires; i++) {
let newPosition = randomInRange(currentRunData.numberOfTiles, 0);
while (fireMap[newPosition] === true) {
newPosition = randomInRange(currentRunData.numberOfTiles, 0);
}
fireMap[newPosition] = true;
currentRunData.fires.push(newPosition);
}
} else {
currentRunData.fires = (currentLevel.fires || []).map(
({ position }) => position - 1
);
}
currentRunData.fires.sort();
currentRunData.currentPosition = 0;
currentRunData.waterSupply = INITIAL_WATER_SUPPLY;
return currentRunData;
}
function onStartIntepreting(currentRunData) {
forceStop = false;
displayManager.runLevel(currentRunData);
}
function onStopIntepreting(finalRunData, currentLevel) {
const {
fires,
waterSupply,
extinguishedFires,
currentPosition,
numberOfTiles,
failed,
message,
stopped
} = finalRunData;
if (stopped) {
return;
}
const errorMessage = "You did not passed this level";
const successMessage = "You passed this level";
let passed = false;
stepping = false;
onStepClick = null;
let displayMessage = message || errorMessage;
if (currentLevel.hasOwnProperty("checkSuccess")) {
const levelPassData = currentLevel.checkSuccess(finalRunData);
passed = levelPassData.passed;
if (levelPassData.message) {
displayMessage = levelPassData.message;
} else if (passed) {
displayMessage = message || successMessage;
}
} else if (
!failed &&
fires.length === extinguishedFires.length &&
currentPosition === numberOfTiles - 1
) {
passed = true;
displayMessage = message || successMessage;
}
if (passed) {
displayManager.setLevelAsPassed(currentLevelIndex);
storageManager.setPassedLevel(currentLevelIndex);
uiManager.onSuccess(displayMessage);
} else {
displayManager.unsetLevelAsPassed(currentLevelIndex);
storageManager.unsetPassedLevel(currentLevelIndex);
uiManager.onFailure(displayMessage);
}
}
function onLevelSelected(newLevelIndex) {
storageManager.setCurrentLevel(newLevelIndex);
//TODO move level seelctor to ui manager
uiManager.onLevelSelected();
const codeText = storageManager.getLevelCode(newLevelIndex);
if (codeText) {
blocklyManager.setCodeFromText(codeText);
}
currentLevelIndex = newLevelIndex;
stepping = false;
onStepClick = null;
handleStopClick();
}
function runCode() {
const codeAsText = blocklyManager.getCurrentCodeAsText();
if (codeAsText) {
storageManager.saveLevelCode(currentLevelIndex, codeAsText);
}
const currentLevel = levelInfo[currentLevelIndex];
const runData = getRunData(currentLevel);
onStartIntepreting(runData);
let { waterSupply, fires, currentPosition, numberOfTiles } = runData;
const extinguishedFires = [];
const getTileOnFire = () => {
return fires.findIndex(pos => pos === currentPosition) > -1;
};
let tileOnFire = getTileOnFire();
let positionThatWaterRunOut = 0;
let tileAhead = currentPosition + 1 < numberOfTiles;
let runTimeException = null;
let stepTimer = DEFAULT_STEP_TIMER;
let executedCommand = false;
function initIntepreter(interpreter, scope) {
function setState(newState = {}) {
const oldState = { tileAhead, tileOnFire, waterSupply };
console.log(`INITING INTERPRETER ${JSON.stringify({ oldState })}`);
const currentState = { ...oldState, ...newState };
({ tileAhead, tileOnFire, waterSupply } = currentState);
// if we don't pass this to the intepreter back the value will not be evaluated
interpreter.setProperty(scope, "tileAhead", tileAhead);
interpreter.setProperty(scope, "tileOnFire", tileOnFire);
interpreter.setProperty(scope, "waterSupply", waterSupply);
displayManager.setHud(currentState);
if (Object.keys(newState).length) {
executedCommand = true;
}
}
setState();
function moveForward() {
stepTimer = Math.max(500, stepTimer);
console.log("ON MOVE FORWARD", {
tileOnFire,
tileAhead,
currentPosition,
waterSupply
});
currentPosition++;
if (!tileAhead) {
runTimeException = new WalkedOutsideOfTheBoundariesError();
return;
}
tileAhead = currentPosition + 1 < numberOfTiles;
tileOnFire = getTileOnFire();
displayManager.moveBot(currentPosition);
setState({ tileAhead, tileOnFire });
console.log("AFTER FORWARD", {
tileOnFire,
tileAhead,
currentPosition,
waterSupply
});
}
function extinguishFire() {
stepTimer = Math.max(300, stepTimer);
console.log("ON EXTINGUISH FIRE", {
tileOnFire,
tileAhead,
currentPosition,
waterSupply
});
console.log(`EXTINGUISHING FIRE WITH WATER SUPPLY ${waterSupply}`);
if (waterSupply <= 0) {
runTimeException = new ExtinguishWithoutWaterError();
return;
}
if (!tileOnFire) {
runTimeException = new ExtinguishNoFlameError();
return;
}
waterSupply--;
if (waterSupply === 0) {
positionThatWaterRunOut = currentPosition;
}
extinguishedFires.push(currentPosition);
displayManager.extinguishFlame(currentPosition);
setState({ waterSupply, tileOnFire: false });
console.log("AFTER EXTINGUISH FIRE", {
tileOnFire,
tileAhead,
currentPosition,
waterSupply
});
}
function highlightBlock(id) {
workspace.highlightBlock(id);
}
interpreter.setProperty(
scope,
"moveForward",
interpreter.createNativeFunction(moveForward)
);
interpreter.setProperty(
scope,
"extinguishFire",
interpreter.createNativeFunction(extinguishFire)
);
interpreter.setProperty(
scope,
"highlightBlock",
interpreter.createNativeFunction(highlightBlock)
);
}
try {
const jsInterpreter = new Interpreter(currentCode, initIntepreter);
let stepCount = -1;
const nextStep = () => {
if (forceStop) {
onStopIntepreting({ stopped: true }, currentLevel);
return;
}
if (runTimeException) {
onStopIntepreting(
{ failed: true, message: runTimeException.message },
currentLevel
);
return;
}
stepCount++;
stepTimer = DEFAULT_STEP_TIMER;
const lastOne = jsInterpreter.step() === false;
if (lastOne) {
onStopIntepreting(
{
fires,
waterSupply,
extinguishedFires,
currentPosition,
numberOfTiles,
positionThatWaterRunOut
},
currentLevel
);
return;
}
if (stepCount % 2 === 0) {
nextStep();
return;
}
const currentTimer = stepTimer;
stepTimer = DEFAULT_STEP_TIMER;
if (!stepping || !executedCommand) {
// unfortunately since the blocks can be more than one code is hard to control the speed
window.setTimeout(nextStep, currentTimer);
} else if (stepping) {
uiManager.onStepExecuted();
}
};
if (stepping) {
return () => {
executedCommand = false;
nextStep();
};
}
nextStep();
} catch (err) {
alert(err.message);
}
}
function onWorspaceUpdate(event) {
try {
currentCode = Blockly.JavaScript.workspaceToCode(workspace);
outputContainer.value = currentCode;
} catch (e) {
console.error(e);
console.error(
"HEY LISTEN!!! We couldn't convert your code, we are ignoring it but if that was unintentional you should check it out "
);
console.log("the data was ", event);
}
}
function init() {
storageManager = new StorageManager();
uiManager = new UIManager({
onRun: runCode,
onStop: handleStopClick,
onDismiss: handleAlertDismiss,
onStep: handleStepClick
});
currentLevelIndex = storageManager.getCurrentLevel();
const passedLevels = storageManager.getPassedLevels();
onLevelSelected(currentLevelIndex);
displayManager = new DisplayManager({
levelIndex: currentLevelIndex,
onLevelSelected,
passedLevels
});
outputContainer = document.getElementById("output");
workspace.addChangeListener(onWorspaceUpdate);
}
init();
}
|
#!/bin/sh
# SPARQL query
QUERY="PREFIX cco: <http://rdf.ebi.ac.uk/terms/chembl#>
SELECT ?moleculeid ?ensg
WHERE{
?mechanism a cco:Mechanism ;
cco:hasTarget ?target ;
cco:hasMolecule ?molecule .
?target cco:hasTargetComponent ?component .
?component cco:targetCmptXref ?ensg_uri .
BIND (REPLACE(STR(?ensg_uri), 'http://identifiers.org/ensembl/', '') AS ?ensg)
FILTER(CONTAINS(?ensg, "ENSG"))
?molecule cco:chemblId ?moleculeid .
}
ORDER BY ?moleculeid"
# curl -> format -> delete header
curl -s -H "Accept: text/csv" --data-urlencode "query=$QUERY" https://integbio.jp/rdf/mirror/ebi/sparql | sed -e 's/\"//g; s/,/\t/g' | sed -e '1d' > pair.tsv
|
#!/bin/bash
#unset ORACLE_HOME ORACLE_BASE LD_LIBRARY_PATH SQLPATH NLS_LANG NLS_CHARACTERSET NLS_NCHAR_CHARACTERSET NLS_DATE_FORMAT CLASSPATH TNS_ADMIN
# Oracle 32
if [ -d "/usr/lib/oracle/12.1/client" ]; then
export ORACLE_HOME="/usr/lib/oracle/12.1/client"
fi
# Oracle 64
if [ -d "/usr/lib/oracle/12.1/client64" ]; then
export ORACLE_HOME="/usr/lib/oracle/12.1/client64"
fi
# Oracle Home
if [ -e "$ORACLE_HOME" ]; then
export ORACLE_BASE="$ORACLE_HOME"
export PATH="$PATH:$ORACLE_HOME/bin"
#if [ -z "$LD_LIBRARY_PATH" ]; then
# export LD_LIBRARY_PATH="$ORACLE_HOME/lib"
#else
# export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ORACLE_HOME/lib"
#fi
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ORACLE_HOME/lib"
if [ -e "$SQLPATH" ]; then
if [ -z "$SQLPATH" ]; then
export SQLPATH="$ORACLE_HOME/lib"
else
export SQLPATH="$SQLPATH:$ORACLE_HOME/lib"
fi
else
export SQLPATH="$ORACLE_HOME/lib"
fi
export NLS_LANG="BRAZILIAN PORTUGUESE_BRAZIL.UTF8"
export NLS_CHARACTERSET="UTF8"
export NLS_NCHAR_CHARACTERSET="AL32UTF8"
export NLS_DATE_FORMAT="DD/MM/YYYY HH24:MI:SS"
if [ -z "$CLASSPATH" ]; then
export CLASSPATH="$ORACLE_HOME/lib/ojdbc7.jar"
export CLASSPATH="$CLASSPATH:$ORACLE_HOME/lib/ojdbc6.jar"
else
export CLASSPATH="$CLASSPATH:$ORACLE_HOME/lib/ojdbc7.jar"
export CLASSPATH="$CLASSPATH:$ORACLE_HOME/lib/ojdbc6.jar"
fi
fi
# Oracle TNS
if [ -d "$HOME/oracle_client/network/admin" ]; then
export TNS_ADMIN="$HOME/oracle_client/network/admin"
#export ORACLE_DICT="$HOME/oracle_client/dict.sql"
fi
|
<gh_stars>0
import MathUtils from '../MathUtils'; // SUT
describe('calculatePercentageDifference', () => {
test('should return difference is 0', () => {
expect(MathUtils.calculatePercentageDifference(100, 100)).toBe(0);
});
test('should return difference is 100', () => {
expect(MathUtils.calculatePercentageDifference(0, 100)).toBe(100);
});
test('should throw when trying to divide by 0', () => {
expect(() => MathUtils.calculatePercentageDifference(100, 0)).toThrow();
});
}); |
'use strict';
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const Loader = require('@structures/Loader');
const Logger = require('@util/Logger');
const i18next = require('i18next');
const translationBackend = require('i18next-node-fs-backend');
const readdir = promisify(fs.readdir);
const pathFolder = path.resolve('src', 'locales');
class LanguagesLoader extends Loader {
constructor(client) {
super(client, true);
}
async load() {
const connected = await i18next
.use(translationBackend)
.init({
backend: { loadPath: `${pathFolder}/{{lng}}/{{ns}}.json` },
defaultNS: 'commands',
fallbackLng: 'en-US',
interpolation: { escapeValue: false },
ns: ['categories', 'commands', 'errors', 'permissions', 'common', 'loggers', 'api_errors'],
preload: await readdir(pathFolder),
returnEmptyString: false,
}, () => {
Logger.log(Object.keys(i18next.store.data));
})
.then(() => true)
.catch(Logger.error);
return connected;
}
}
module.exports = LanguagesLoader;
|
package com.io.routesapp;
import android.content.Context;
import android.content.SharedPreferences;
import org.apache.tomcat.util.codec.binary.Base64;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class CheckAccessTokenThread extends Thread {
String DPRefreshTokenUrl = "http://10.0.2.2:8081/api/refreshToken?clientID=2&refreshToken=";
@Override
public void run() {
String accessToken = MainActivity.getLoggedInUser().getCookies().get("AccessToken2");
while (true) {
while (!ifExpiresSoon(accessToken)) {
try {
Thread.sleep(600000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
refreshTokens();
}
}
private Boolean ifExpiresSoon(String accessToken) {
String[] split_string = accessToken.split("\\.");
String base64EncodedBody = split_string[1];
Base64 base64Url = new Base64(true);
String body = new String(base64Url.decode(base64EncodedBody));
String[] split = body.split(",");
if (split[2].startsWith("exp", 1)){
long expiresAt = Long.parseLong(split[2].substring(6));
System.out.println(expiresAt);
long currentTime = System.currentTimeMillis()/1000L;
return (expiresAt - currentTime) < 600 ;
}
return false;
}
private void refreshTokens() {
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url(DPRefreshTokenUrl + MainActivity.getLoggedInUser().getCookies().get("RefreshToken2"))
.build();
client.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, final Response response) throws IOException {
if (!response.isSuccessful()) {
throw new IOException("Error in response from DP Server :( error code: " + String.valueOf(response.code()));
} else {
String cookie = (response.headers().values("Set-Cookie").get(0).split(";")[0]);
MainActivity.getLoggedInUser().getCookies().put(cookie.split("=")[0], cookie.split("=")[1]);
cookie = (response.headers().values("Set-Cookie").get(1).split(";")[0]);
MainActivity.getLoggedInUser().getCookies().put(cookie.split("=")[0], cookie.split("=")[1]);
}
}
});
while (MainActivity.getLoggedInUser().getCookies().size() != 3) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.aliyun.odps.datacarrier.commons;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
/**
* @author: Jon (<EMAIL>)
*
* MetaManager reads/writes metadata from/to a directory. The structure is as follows:
*
* [output directory]
* |______global.json
* |______[database name]
* |______[database name].json
* |______table_meta
* | |______[table name].json
* |______partition_meta
* |______[table name].json
*
* The format of global.json is as follows:
* {
* "datasourceType" : xxx,
* "odpsVersion" : "ODPS_V1 or ODPS_V2",
* "hiveCompatible" : xxx
* }
*
* The format of [database name].json is as follows:
* {
* "databaseName" : xxx,
* "odpsProjectName" : xxx
* }
*
* The format of [table name].json is as follows:
* {
* "tableName" : "table_1",
* "odpsTableName" : "odps_table_1",
* "lifeCycle" : 10,
* "comment" : "first table",
* "ifNotExist" : true,
* "columns" : [
* {
* "name" : "column_1",
* "odpsColumnName" : "odps_column_1",
* "type" : "bigint",
* "comment" : "xxx"
* },
* ...
* ],
* "partitionColumns" : [
* {
* "name" : "column_1",
* "odpsColumnName" : "odps_column_1",
* "type" : "bigint",
* "comment" : "xxx"
* },
* ...
* ]
* }
*
* The format of table partition spec is as follows:
* {
* "databaseName" : "db_1",
* "tableName" : "table_1",
* "partitions" : [
* {
* "partitionSpec" : "xxx",
* "location" : "xxx",
* "creationTime" : "xxx"
* },
* ...
* ]
* }
*/
public class MetaManager {
public static class GlobalMetaModel{
public String datasourceType;
public String odpsVersion = "ODPS_V2";
public Boolean hiveCompatible = false;
}
public static class DatabaseMetaModel {
public String databaseName;
public String odpsProjectName;
}
public static class TableMetaModel {
public String tableName;
public String odpsTableName;
public Integer lifeCycle;
public String comment;
public Boolean ifNotExists = true;
public List<ColumnMetaModel> columns = new ArrayList<>();
public List<ColumnMetaModel> partitionColumns = new ArrayList<>();
}
public static class ColumnMetaModel {
public String columnName;
public String odpsColumnName;
public String type;
public String comment;
}
public static class PartitionMetaModel {
public String partitionSpec;
public String location;
public String createTime;
}
public static class TablePartitionMetaModel {
public String tableName;
public List<PartitionMetaModel> partitions = new ArrayList<>();
}
private static final String GLOBAL = "global";
private static final String TABLE_META_DIR = "table_meta";
private static final String PARTITION_META_DIR = "partition_meta";
private static final String JSON_SUFFIX = ".json";
private String root;
public MetaManager(String root) {
this.root = root;
File outputDir = new File(root);
if (!outputDir.exists() && !outputDir.mkdirs()) {
throw new IllegalArgumentException("Output directory does not exist and cannot be created.");
}
if (!outputDir.isDirectory()) {
throw new IllegalArgumentException("Please specify an existing directory.");
}
}
public GlobalMetaModel getGlobalMeta() throws IOException {
Path filePath = Paths.get(this.root, GLOBAL + JSON_SUFFIX);
String jsonString = DirUtils.readFromFile(filePath);
return Constants.GSON.fromJson(jsonString, GlobalMetaModel.class);
}
public void setGlobalMeta(GlobalMetaModel globalMeta) throws IOException {
Path filePath = Paths.get(this.root, GLOBAL + JSON_SUFFIX);
String jsonString = Constants.GSON.toJson(globalMeta, GlobalMetaModel.class);
DirUtils.writeToFile(filePath, jsonString);
}
public DatabaseMetaModel getDatabaseMeta(String databaseName) throws IOException {
Path filePath = Paths.get(this.root, databaseName, databaseName + JSON_SUFFIX);
String jsonString = DirUtils.readFromFile(filePath);
return Constants.GSON.fromJson(jsonString, DatabaseMetaModel.class);
}
public void setDatabaseMeta(DatabaseMetaModel databaseMeta) throws IOException {
Path filePath = Paths.get(this.root, databaseMeta.databaseName,
databaseMeta.databaseName + JSON_SUFFIX);
String jsonString = Constants.GSON.toJson(databaseMeta, DatabaseMetaModel.class);
DirUtils.writeToFile(filePath, jsonString);
}
public TableMetaModel getTableMeta(String databaseName, String tableName) throws IOException {
Path filePath = Paths.get(this.root, databaseName, TABLE_META_DIR,
tableName + JSON_SUFFIX);
String jsonString = DirUtils.readFromFile(filePath);
return Constants.GSON.fromJson(jsonString, TableMetaModel.class);
}
public void setTableMeta(String databaseName, TableMetaModel tableMetaModel)
throws IOException {
Path filePath = Paths.get(this.root, databaseName, TABLE_META_DIR,
tableMetaModel.tableName + JSON_SUFFIX);
String jsonString = Constants.GSON.toJson(tableMetaModel, TableMetaModel.class);
DirUtils.writeToFile(filePath, jsonString);
}
public TablePartitionMetaModel getTablePartitionMeta(String databaseName, String tableName)
throws IOException {
Path filePath = Paths.get(this.root, databaseName, PARTITION_META_DIR,
tableName + JSON_SUFFIX);
String jsonString = DirUtils.readFromFile(filePath);
return Constants.GSON.fromJson(jsonString, TablePartitionMetaModel.class);
}
public void setTablePartitionMeta(String databaseName,
TablePartitionMetaModel tablePartitionMeta) throws IOException {
Path filePath = Paths.get(this.root, databaseName, PARTITION_META_DIR,
tablePartitionMeta.tableName + JSON_SUFFIX);
String jsonString = Constants.GSON.toJson(tablePartitionMeta, TablePartitionMetaModel.class);
DirUtils.writeToFile(filePath, jsonString);
}
public String[] listDatabases() {
Path rootDir = Paths.get(this.root);
if (!Files.exists(rootDir)) {
return new String[0];
}
return DirUtils.listDirs(rootDir);
}
/**
* Return table names in a given database, including both non-partition tables and partition
* tables
*/
public String[] listTables(String databaseName) {
Path tableMetaDir = Paths.get(this.root, databaseName, TABLE_META_DIR);
if (!Files.exists(tableMetaDir)) {
return new String[0];
}
String[] tableMetaFiles = DirUtils.listFiles(tableMetaDir);
// Remove .json
for (int i = 0; i < tableMetaFiles.length; i++) {
String tableMetaFile = tableMetaFiles[i];
if (tableMetaFile.endsWith(JSON_SUFFIX)) {
tableMetaFiles[i] =
tableMetaFile.substring(0, tableMetaFile.length() - JSON_SUFFIX.length());
} else {
throw new IllegalArgumentException(
"Table meta directory contains invalid file: " + tableMetaFile);
}
}
return tableMetaFiles;
}
/**
* Return only partition table names in a given database
*/
public String[] listPartitionTables(String databaseName) {
Path partitionMetaDir = Paths.get(this.root, databaseName, PARTITION_META_DIR);
if (!Files.exists(partitionMetaDir)) {
return new String[0];
}
String[] partitionMetaFiles = DirUtils.listFiles(partitionMetaDir);
// Remove .json
for (int i = 0; i < partitionMetaFiles.length; i++) {
String partitionMetaFile = partitionMetaFiles[i];
if (partitionMetaFile.endsWith(JSON_SUFFIX)) {
partitionMetaFiles[i] = partitionMetaFile.substring(
0, partitionMetaFile.length() - JSON_SUFFIX.length());
} else {
throw new IllegalArgumentException(
"Partition meta directory contains invalid file: " + partitionMetaFile);
}
}
return partitionMetaFiles;
}
}
|
#!/bin/sh
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version=`grep version setup.py.src|sed -e 's/^.*= "//; s/",.*//'`
echo "Uploading $version"
scp ../dist/* michaels@132.185.142.2:/srv/www/sites/www.kamaelia.org/docs/release/
|
<filename>renpy/display/video.py
# Copyright 2004-2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
from renpy.compat import *
import renpy.display
import renpy.audio
import collections
# The movie displayable that's currently being shown on the screen.
current_movie = None
# True if the movie that is currently displaying is in fullscreen mode,
# False if it's a smaller size.
fullscreen = False
# The size of a Movie object that hasn't had an explicit size set.
default_size = (400, 300)
# The file we allocated the surface for.
surface_file = None
# The surface to display the movie on, if not fullscreen.
surface = None
def movie_stop(clear=True, only_fullscreen=False):
"""
Stops the currently playing movie.
"""
if (not fullscreen) and only_fullscreen:
return
renpy.audio.music.stop(channel='movie')
def movie_start(filename, size=None, loops=0):
"""
This starts a movie playing.
"""
if renpy.game.less_updates:
return
global default_size
if size is not None:
default_size = size
filename = [ filename ]
if loops == -1:
loop = True
else:
loop = False
filename = filename * (loops + 1)
renpy.audio.music.play(filename, channel='movie', loop=loop)
movie_start_fullscreen = movie_start
movie_start_displayable = movie_start
# A map from a channel name to the movie texture that is being displayed
# on that channel.
texture = { }
# The set of channels that are being displayed in Movie objects.
displayable_channels = collections.defaultdict(list)
# A map from a channel to the topmost Movie being displayed on
# that channel. (Or None if no such movie exists.)
channel_movie = { }
# Is there a video being displayed fullscreen?
fullscreen = False
# Movie channels that had a hide operation since the last interaction took
# place.
reset_channels = set()
def early_interact():
"""
Called early in the interact process, to clear out the fullscreen
flag.
"""
displayable_channels.clear()
channel_movie.clear()
def interact():
"""
This is called each time the screen is drawn, and should return True
if the movie should display fullscreen.
"""
global fullscreen
for i in list(texture.keys()):
if not renpy.audio.music.get_playing(i):
del texture[i]
if renpy.audio.music.get_playing("movie"):
for i in displayable_channels.keys():
if i[0] == "movie":
fullscreen = False
break
else:
fullscreen = True
else:
fullscreen = False
return fullscreen
def get_movie_texture(channel, mask_channel=None, side_mask=False, mipmap=None):
if not renpy.audio.music.get_playing(channel):
return None, False
if mipmap is None:
mipmap = renpy.config.mipmap_movies
c = renpy.audio.music.get_channel(channel)
surf = c.read_video()
if side_mask:
if surf is not None:
w, h = surf.get_size()
w //= 2
mask_surf = surf.subsurface((w, 0, w, h))
surf = surf.subsurface((0, 0, w, h))
else:
mask_surf = None
elif mask_channel:
mc = renpy.audio.music.get_channel(mask_channel)
mask_surf = mc.read_video()
else:
mask_surf = None
if mask_surf is not None:
# Something went wrong with the mask video.
if surf:
renpy.display.module.alpha_munge(mask_surf, surf, renpy.display.im.identity)
else:
surf = None
if surf is not None:
renpy.display.render.mutated_surface(surf)
tex = renpy.display.draw.load_texture(surf, True, { "mipmap" : mipmap })
texture[channel] = tex
new = True
else:
tex = texture.get(channel, None)
new = False
return tex, new
def render_movie(channel, width, height):
tex, _new = get_movie_texture(channel)
if tex is None:
return None
sw, sh = tex.get_size()
scale = min(1.0 * width / sw, 1.0 * height / sh)
dw = scale * sw
dh = scale * sh
rv = renpy.display.render.Render(width, height)
rv.forward = renpy.display.render.Matrix2D(1.0 / scale, 0.0, 0.0, 1.0 / scale)
rv.reverse = renpy.display.render.Matrix2D(scale, 0.0, 0.0, scale)
rv.blit(tex, (int((width - dw) / 2), int((height - dh) / 2)))
return rv
def default_play_callback(old, new): # @UnusedVariable
renpy.audio.music.play(new._play, channel=new.channel, loop=new.loop, synchro_start=True)
if new.mask:
renpy.audio.music.play(new.mask, channel=new.mask_channel, loop=new.loop, synchro_start=True)
class Movie(renpy.display.core.Displayable):
"""
:doc: movie
This is a displayable that shows the current movie.
`fps`
The framerate that the movie should be shown at. (This is currently
ignored, but the parameter is kept for backwards compatibility.
The framerate is auto-detected.)
`size`
This should be specified as either a tuple giving the width and
height of the movie, or None to automatically adjust to the size
of the playing movie. (If None, the displayable will be (0, 0)
when the movie is not playing.)
`channel`
The audio channel associated with this movie. When a movie file
is played on that channel, it will be displayed in this Movie
displayable. If this is not given, and the `play` is provided,
a channel name is automatically selected.
`play`
If given, this should be the path to a movie file. The movie
file will be automatically played on `channel` when the Movie is
shown, and automatically stopped when the movie is hidden.
`side_mask`
If true, this tells Ren'Py to use the side-by-side mask mode for
the Movie. In this case, the movie is divided in half. The left
half is used for color information, while the right half is used
for alpha information. The width of the displayable is half the
width of the movie file.
Where possible, `side_mask` should be used over `mask` as it has
no chance of frames going out of sync.
`mask`
If given, this should be the path to a movie file that is used as
the alpha channel of this displayable. The movie file will be
automatically played on `movie_channel` when the Movie is shown,
and automatically stopped when the movie is hidden.
`mask_channel`
The channel the alpha mask video is played on. If not given,
defaults to `channel`\ _mask. (For example, if `channel` is "sprite",
`mask_channel` defaults to "sprite_mask".)
`start_image`
An image that is displayed when playback has started, but the
first frame has not yet been decoded.
`image`
An image that is displayed when `play` has been given, but the
file it refers to does not exist. (For example, this can be used
to create a slimmed-down mobile version that does not use movie
sprites.) Users can also choose to fall back to this image as a
preference if video is too taxing for their system. The image will
also be used if the video plays, and then the movie ends.
`play_callback`
If not None, a function that's used to start the movies playing.
(This may do things like queue a transition between sprites, if
desired.) It's called with the following arguments:
`old`
The old Movie object, or None if the movie is not playing.
`new`
The new Movie object.
A movie object has the `play` parameter available as ``_play``,
while the ``channel``, ``loop``, ``mask``, and ``mask_channel`` fields
correspond to the given parameters.
Generally, this will want to use :func:`renpy.music.play` to start
the movie playing on the given channel, with synchro_start=True.
A minimal implementation is::
def play_callback(old, new):
renpy.music.play(new._play, channel=new.channel, loop=new.loop, synchro_start=True)
if new.mask:
renpy.music.play(new.mask, channel=new.mask_channel, loop=new.loop, synchro_start=True)
`loop`
If False, the movie will not loop. If `image` is defined, the image
will be displayed when the movie ends. Otherwise, the displayable will
become transparent.
"""
fullscreen = False
channel = "movie"
_play = None
mask = None
mask_channel = None
side_mask = False
image = None
start_image = None
play_callback = None
loop = True
def ensure_channel(self, name):
if name is None:
return
if renpy.audio.music.channel_defined(name):
return
if self.mask:
framedrop = True
else:
framedrop = False
renpy.audio.music.register_channel(name, renpy.config.movie_mixer, loop=True, stop_on_mute=False, movie=True, framedrop=framedrop)
def __init__(self, fps=24, size=None, channel="movie", play=None, mask=None, mask_channel=None, image=None, play_callback=None, side_mask=False, loop=True, start_image=None, **properties):
super(Movie, self).__init__(**properties)
global auto_channel_serial
if channel == "movie" and play and renpy.config.auto_movie_channel:
channel = "movie_{}_{}".format(play, mask)
self.size = size
self.channel = channel
self._play = play
self.loop = loop
if side_mask:
mask = None
self.mask = mask
if mask is None:
self.mask_channel = None
elif mask_channel is None:
self.mask_channel = channel + "_mask"
else:
self.mask_channel = mask_channel
self.side_mask = side_mask
self.ensure_channel(self.channel)
self.ensure_channel(self.mask_channel)
self.image = renpy.easy.displayable_or_none(image)
self.start_image = renpy.easy.displayable_or_none(start_image)
self.play_callback = play_callback
if (self.channel == "movie") and (renpy.config.hw_video) and renpy.mobile:
raise Exception("Movie(channel='movie') doesn't work on mobile when config.hw_video is true. (Use a different channel argument.)")
def render(self, width, height, st, at):
if self._play and not (renpy.game.preferences.video_image_fallback is True):
channel_movie[self.channel] = self
if st == 0:
reset_channels.add(self.channel)
playing = renpy.audio.music.get_playing(self.channel)
not_playing = not playing
if self.channel in reset_channels:
not_playing = False
if (self.image is not None) and not_playing:
surf = renpy.display.render.render(self.image, width, height, st, at)
w, h = surf.get_size()
rv = renpy.display.render.Render(w, h)
rv.blit(surf, (0, 0))
return rv
if self.size is None:
tex, _ = get_movie_texture(self.channel, self.mask_channel, self.side_mask, self.style.mipmap)
if (not not_playing) and (tex is not None):
width, height = tex.get_size()
rv = renpy.display.render.Render(width, height)
rv.blit(tex, (0, 0))
elif (not not_playing) and (self.start_image is not None):
surf = renpy.display.render.render(self.start_image, width, height, st, at)
w, h = surf.get_size()
rv = renpy.display.render.Render(w, h)
rv.blit(surf, (0, 0))
else:
rv = renpy.display.render.Render(0, 0)
else:
w, h = self.size
if not playing:
rv = None
else:
rv = render_movie(self.channel, w, h)
if rv is None:
rv = renpy.display.render.Render(w, h)
# Usually we get redrawn when the frame is ready - but we want
# the movie to disappear if it's ended, or if it hasn't started
# yet.
renpy.display.render.redraw(self, 0.1)
return rv
def play(self, old):
if old is None:
old_play = None
else:
old_play = old._play
if (self._play != old_play) or renpy.config.replay_movie_sprites:
if self._play:
if self.play_callback is not None:
self.play_callback(old, self)
else:
default_play_callback(old, self)
else:
renpy.audio.music.stop(channel=self.channel)
if self.mask:
renpy.audio.music.stop(channel=self.mask_channel)
def stop(self):
if self._play:
renpy.audio.music.stop(channel=self.channel)
if self.mask:
renpy.audio.music.stop(channel=self.mask_channel)
def per_interact(self):
displayable_channels[(self.channel, self.mask_channel)].append(self)
renpy.display.render.redraw(self, 0)
def visit(self):
return [ self.image, self.start_image ]
def playing():
if renpy.audio.music.get_playing("movie"):
return True
for i in displayable_channels:
channel, _mask_channel = i
if renpy.audio.music.get_playing(channel):
return True
return
def update_playing():
"""
Calls play/stop on Movie displayables.
"""
old_channel_movie = renpy.game.context().movie
for c, m in channel_movie.items():
old = old_channel_movie.get(c, None)
if (c in reset_channels) and renpy.config.replay_movie_sprites:
m.play(old)
elif old is not m:
m.play(old)
for c, m in old_channel_movie.items():
if c not in channel_movie:
m.stop()
renpy.game.context().movie = dict(channel_movie)
reset_channels.clear()
def frequent():
"""
Called to update the video playback. Returns true if a video refresh is
needed, false otherwise.
"""
update_playing()
renpy.audio.audio.advance_time()
if displayable_channels:
update = True
for i in displayable_channels:
channel, mask_channel = i
c = renpy.audio.audio.get_channel(channel)
if not c.video_ready():
update = False
break
if mask_channel:
c = renpy.audio.audio.get_channel(mask_channel)
if not c.video_ready():
update = False
break
if update:
for v in displayable_channels.values():
for j in v:
renpy.display.render.redraw(j, 0.0)
return False
elif fullscreen and not ((renpy.android or renpy.ios) and renpy.config.hw_video):
c = renpy.audio.audio.get_channel("movie")
if c.video_ready():
return True
else:
return False
return False
|
#!/bin/bash
./target/debug/todo-app-service \
--port=8080 \
--database-url=postgres://postgres:toor@localhost/todo_app \
--site-external-url=http://localhost:3000 \
--auth-service-url=http://localhost:8079
|
#!/bin/sh
#
# Copyright (c) 2005 Junio C Hamano
#
test_description='Three way merge with read-tree -m
This test tries three-way merge with read-tree -m
There is one ancestor (called O for Original) and two branches A
and B derived from it. We want to do a 3-way merge between A and
B, using O as the common ancestor.
merge A O B
Decisions are made by comparing contents of O, A and B pathname
by pathname. The result is determined by the following guiding
principle:
- If only A does something to it and B does not touch it, take
whatever A does.
- If only B does something to it and A does not touch it, take
whatever B does.
- If both A and B does something but in the same way, take
whatever they do.
- If A and B does something but different things, we need a
3-way merge:
- We cannot do anything about the following cases:
* O does not have it. A and B both must be adding to the
same path independently.
* A deletes it. B must be modifying.
- Otherwise, A and B are modifying. Run 3-way merge.
First, the case matrix.
- Vertical axis is for A'\''s actions.
- Horizontal axis is for B'\''s actions.
.----------------------------------------------------------------.
| A B | No Action | Delete | Modify | Add |
|------------+------------+------------+------------+------------|
| No Action | | | | |
| | select O | delete | select B | select B |
| | | | | |
|------------+------------+------------+------------+------------|
| Delete | | | ********** | can |
| | delete | delete | merge | not |
| | | | | happen |
|------------+------------+------------+------------+------------|
| Modify | | ********** | ?????????? | can |
| | select A | merge | select A=B | not |
| | | | merge | happen |
|------------+------------+------------+------------+------------|
| Add | | can | can | ?????????? |
| | select A | not | not | select A=B |
| | | happen | happen | merge |
.----------------------------------------------------------------.
In addition:
SS: a special case of MM, where A and B makes the same modification.
LL: a special case of AA, where A and B creates the same file.
TT: a special case of MM, where A and B makes mergeable changes.
DF: a special case, where A makes a directory and B makes a file.
'
. ./test-lib.sh
. "$TEST_DIRECTORY"/lib-read-tree.sh
. "$TEST_DIRECTORY"/lib-read-tree-m-3way.sh
################################################################
# Trivial "majority when 3 stages exist" merge plus #2ALT, #3ALT
# and #5ALT trivial merges.
cat >expected <<\EOF
100644 X 2 AA
100644 X 3 AA
100644 X 0 AN
100644 X 1 DD
100644 X 3 DF
100644 X 2 DF/DF
100644 X 1 DM
100644 X 3 DM
100644 X 1 DN
100644 X 3 DN
100644 X 0 LL
100644 X 1 MD
100644 X 2 MD
100644 X 1 MM
100644 X 2 MM
100644 X 3 MM
100644 X 0 MN
100644 X 0 NA
100644 X 1 ND
100644 X 2 ND
100644 X 0 NM
100644 X 0 NN
100644 X 0 SS
100644 X 1 TT
100644 X 2 TT
100644 X 3 TT
100644 X 2 Z/AA
100644 X 3 Z/AA
100644 X 0 Z/AN
100644 X 1 Z/DD
100644 X 1 Z/DM
100644 X 3 Z/DM
100644 X 1 Z/DN
100644 X 3 Z/DN
100644 X 1 Z/MD
100644 X 2 Z/MD
100644 X 1 Z/MM
100644 X 2 Z/MM
100644 X 3 Z/MM
100644 X 0 Z/MN
100644 X 0 Z/NA
100644 X 1 Z/ND
100644 X 2 Z/ND
100644 X 0 Z/NM
100644 X 0 Z/NN
EOF
check_result () {
git ls-files --stage | sed -e 's/ '"$OID_REGEX"' / X /' >current &&
test_cmp expected current
}
# This is done on an empty work directory, which is the normal
# merge person behaviour.
test_expect_success '3-way merge with git read-tree -m, empty cache' '
rm -fr [NDMALTS][NDMALTSF] Z &&
rm .git/index &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
# This starts out with the first head, which is the normal
# patch submitter behaviour.
test_expect_success '3-way merge with git read-tree -m, match H' '
rm -fr [NDMALTS][NDMALTSF] Z &&
rm .git/index &&
read_tree_must_succeed $tree_A &&
git checkout-index -f -u -a &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
: <<\END_OF_CASE_TABLE
We have so far tested only empty index and clean-and-matching-A index
case which are trivial. Make sure index requirements are also
checked.
"git read-tree -m O A B"
O A B result index requirements
-------------------------------------------------------------------
1 missing missing missing - must not exist.
------------------------------------------------------------------
2 missing missing exists take B* must match B, if exists.
------------------------------------------------------------------
3 missing exists missing take A* must match A, if exists.
------------------------------------------------------------------
4 missing exists A!=B no merge must match A and be
up-to-date, if exists.
------------------------------------------------------------------
5 missing exists A==B take A must match A, if exists.
------------------------------------------------------------------
6 exists missing missing remove must not exist.
------------------------------------------------------------------
7 exists missing O!=B no merge must not exist.
------------------------------------------------------------------
8 exists missing O==B remove must not exist.
------------------------------------------------------------------
9 exists O!=A missing no merge must match A and be
up-to-date, if exists.
------------------------------------------------------------------
10 exists O==A missing no merge must match A
------------------------------------------------------------------
11 exists O!=A O!=B no merge must match A and be
A!=B up-to-date, if exists.
------------------------------------------------------------------
12 exists O!=A O!=B take A must match A, if exists.
A==B
------------------------------------------------------------------
13 exists O!=A O==B take A must match A, if exists.
------------------------------------------------------------------
14 exists O==A O!=B take B if exists, must either (1)
match A and be up-to-date,
or (2) match B.
------------------------------------------------------------------
15 exists O==A O==B take B must match A if exists.
------------------------------------------------------------------
16 exists O==A O==B barf must match A if exists.
*multi* in one in another
-------------------------------------------------------------------
Note: we need to be careful in case 2 and 3. The tree A may contain
DF (file) when tree B require DF to be a directory by having DF/DF
(file).
END_OF_CASE_TABLE
test_expect_success '1 - must not have an entry not in A.' '
rm -f .git/index XX &&
echo XX >XX &&
git update-index --add XX &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '2 - must match B in !O && !A && B case.' '
rm -f .git/index NA &&
cp .orig-B/NA NA &&
git update-index --add NA &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B
'
test_expect_success '2 - matching B alone is OK in !O && !A && B case.' '
rm -f .git/index NA &&
cp .orig-B/NA NA &&
git update-index --add NA &&
echo extra >>NA &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B
'
test_expect_success '3 - must match A in !O && A && !B case.' '
rm -f .git/index AN &&
cp .orig-A/AN AN &&
git update-index --add AN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '3 - matching A alone is OK in !O && A && !B case.' '
rm -f .git/index AN &&
cp .orig-A/AN AN &&
git update-index --add AN &&
echo extra >>AN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B
'
test_expect_success '3 (fail) - must match A in !O && A && !B case.' '
rm -f .git/index AN &&
cp .orig-A/AN AN &&
echo extra >>AN &&
git update-index --add AN &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '4 - must match and be up-to-date in !O && A && B && A!=B case.' '
rm -f .git/index AA &&
cp .orig-A/AA AA &&
git update-index --add AA &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '4 (fail) - must match and be up-to-date in !O && A && B && A!=B case.' '
rm -f .git/index AA &&
cp .orig-A/AA AA &&
git update-index --add AA &&
echo extra >>AA &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '4 (fail) - must match and be up-to-date in !O && A && B && A!=B case.' '
rm -f .git/index AA &&
cp .orig-A/AA AA &&
echo extra >>AA &&
git update-index --add AA &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '5 - must match in !O && A && B && A==B case.' '
rm -f .git/index LL &&
cp .orig-A/LL LL &&
git update-index --add LL &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '5 - must match in !O && A && B && A==B case.' '
rm -f .git/index LL &&
cp .orig-A/LL LL &&
git update-index --add LL &&
echo extra >>LL &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '5 (fail) - must match A in !O && A && B && A==B case.' '
rm -f .git/index LL &&
cp .orig-A/LL LL &&
echo extra >>LL &&
git update-index --add LL &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '6 - must not exist in O && !A && !B case' '
rm -f .git/index DD &&
echo DD >DD &&
git update-index --add DD &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '7 - must not exist in O && !A && B && O!=B case' '
rm -f .git/index DM &&
cp .orig-B/DM DM &&
git update-index --add DM &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '8 - must not exist in O && !A && B && O==B case' '
rm -f .git/index DN &&
cp .orig-B/DN DN &&
git update-index --add DN &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '9 - must match and be up-to-date in O && A && !B && O!=A case' '
rm -f .git/index MD &&
cp .orig-A/MD MD &&
git update-index --add MD &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '9 (fail) - must match and be up-to-date in O && A && !B && O!=A case' '
rm -f .git/index MD &&
cp .orig-A/MD MD &&
git update-index --add MD &&
echo extra >>MD &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '9 (fail) - must match and be up-to-date in O && A && !B && O!=A case' '
rm -f .git/index MD &&
cp .orig-A/MD MD &&
echo extra >>MD &&
git update-index --add MD &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '10 - must match and be up-to-date in O && A && !B && O==A case' '
rm -f .git/index ND &&
cp .orig-A/ND ND &&
git update-index --add ND &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '10 (fail) - must match and be up-to-date in O && A && !B && O==A case' '
rm -f .git/index ND &&
cp .orig-A/ND ND &&
git update-index --add ND &&
echo extra >>ND &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '10 (fail) - must match and be up-to-date in O && A && !B && O==A case' '
rm -f .git/index ND &&
cp .orig-A/ND ND &&
echo extra >>ND &&
git update-index --add ND &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '11 - must match and be up-to-date in O && A && B && O!=A && O!=B && A!=B case' '
rm -f .git/index MM &&
cp .orig-A/MM MM &&
git update-index --add MM &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '11 (fail) - must match and be up-to-date in O && A && B && O!=A && O!=B && A!=B case' '
rm -f .git/index MM &&
cp .orig-A/MM MM &&
git update-index --add MM &&
echo extra >>MM &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '11 (fail) - must match and be up-to-date in O && A && B && O!=A && O!=B && A!=B case' '
rm -f .git/index MM &&
cp .orig-A/MM MM &&
echo extra >>MM &&
git update-index --add MM &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '12 - must match A in O && A && B && O!=A && A==B case' '
rm -f .git/index SS &&
cp .orig-A/SS SS &&
git update-index --add SS &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '12 - must match A in O && A && B && O!=A && A==B case' '
rm -f .git/index SS &&
cp .orig-A/SS SS &&
git update-index --add SS &&
echo extra >>SS &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '12 (fail) - must match A in O && A && B && O!=A && A==B case' '
rm -f .git/index SS &&
cp .orig-A/SS SS &&
echo extra >>SS &&
git update-index --add SS &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '13 - must match A in O && A && B && O!=A && O==B case' '
rm -f .git/index MN &&
cp .orig-A/MN MN &&
git update-index --add MN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '13 - must match A in O && A && B && O!=A && O==B case' '
rm -f .git/index MN &&
cp .orig-A/MN MN &&
git update-index --add MN &&
echo extra >>MN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '14 - must match and be up-to-date in O && A && B && O==A && O!=B case' '
rm -f .git/index NM &&
cp .orig-A/NM NM &&
git update-index --add NM &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '14 - may match B in O && A && B && O==A && O!=B case' '
rm -f .git/index NM &&
cp .orig-B/NM NM &&
git update-index --add NM &&
echo extra >>NM &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '14 (fail) - must match and be up-to-date in O && A && B && O==A && O!=B case' '
rm -f .git/index NM &&
cp .orig-A/NM NM &&
git update-index --add NM &&
echo extra >>NM &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '14 (fail) - must match and be up-to-date in O && A && B && O==A && O!=B case' '
rm -f .git/index NM &&
cp .orig-A/NM NM &&
echo extra >>NM &&
git update-index --add NM &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '15 - must match A in O && A && B && O==A && O==B case' '
rm -f .git/index NN &&
cp .orig-A/NN NN &&
git update-index --add NN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '15 - must match A in O && A && B && O==A && O==B case' '
rm -f .git/index NN &&
cp .orig-A/NN NN &&
git update-index --add NN &&
echo extra >>NN &&
read_tree_must_succeed -m $tree_O $tree_A $tree_B &&
check_result
'
test_expect_success '15 (fail) - must match A in O && A && B && O==A && O==B case' '
rm -f .git/index NN &&
cp .orig-A/NN NN &&
echo extra >>NN &&
git update-index --add NN &&
read_tree_must_fail -m $tree_O $tree_A $tree_B
'
test_expect_success '16 - A matches in one and B matches in another.' '
rm -f .git/index F16 &&
echo F16 >F16 &&
git update-index --add F16 &&
tree0=$(git write-tree) &&
echo E16 >F16 &&
git update-index F16 &&
tree1=$(git write-tree) &&
read_tree_must_succeed -m $tree0 $tree1 $tree1 $tree0 &&
git ls-files --stage
'
test_done
|
import styled from 'styled-components'
export const Container = styled.header`
width: 100%;
background: linear-gradient(217deg, #0ccff2, rgba(255, 0, 0, 0) 70.71%),
linear-gradient(127deg, #00e68a, rgba(0, 255, 0, 0) 70.71%),
linear-gradient(336deg, #0cf214, rgba(0, 0, 255, 0) 70.71%);
box-shadow: 0 0 12px 4px #0ccff2;
padding: 25px 0;
border-bottom: 1px solid rgb(255 255 255 / 10%);
`
export const Content = styled.div`
max-width: 992px;
width: 90%;
margin: 0 auto;
display: flex;
align-items: center;
justify-content: space-between;
color: #fff;
h1 {
font-size: 1.8rem;
}
@media (max-width: 500px) {
h1 {
margin-right: 2rem;
}
}
`
|
#!/bin/bash
export __dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export __dir="$( dirname ${__dir} )"
command -v virtualenv >/dev/null 2>&1 || { echo >&2 "I require \`virtualenv\` but it's not installed. Aborting."; exit 1; }
if [ ! -d ${__dir}/__envi ]
then
virtualenv ${__dir}/__envi
fi
. ${__dir}/__envi/bin/activate
set -x
${__dir}/__envi/bin/pip install --upgrade distribute
${__dir}/__envi/bin/pip install --upgrade -r ${__dir}/requirements.txt
|
<reponame>filcloud/filecoin-specs<gh_stars>1-10
package chainsync
import block "github.com/filecoin-project/specs/systems/filecoin_blockchain/struct/block"
func (self *BlockSyncer_I) OnNewBlock(block block.Block) error {
panic("TODO")
// err := self.validateBlockSyntax(block)
// if err {
// return err
// }
// self.blockchainSubsystem_.HandleBlock(block)
return nil
}
// The syntactic stage may be validated without reference to additional data (see block)
func (bs *BlockSyncer_I) validateBlockSyntax(block block.Block) error {
panic("TODO")
// if !block.Epoch().WithinFinality
// if !block.MinerAddress().VerifySyntax(StorageMinerActor.Address.Protocol()) {
// return ErrInvalidBlockSyntax("bad miner address syntax")
// }
// if !(len(block.Tickets) > 0) {
// return ErrInvalidBlockSyntax("no tickets")
// }
// for _, tix := range block.Tickets {
// if !tix.ValidateSyntax() {
// return ErrInvalidBlockSyntax("bad ticket syntax")
// }
// }
// if !block.ElectionProof.ValidateSyntax() {
// return ErrInvalidBlockSyntax("bad election proof syntax")
// }
// if !block.ParentTipset().ValidateSyntax() {
// return ErrInvalidBlockSyntax("invalid parent tipset")
// }
// if !block.ParentWeight() > 0 {
// return ErrInvalidBlockSyntax("parent weight < 0")
// }
// if !block.Height() > 0 {
// return ErrInvalidBlockSyntax("height < 0")
// }
// // if !block.StateTree().ValidateSyntax() {
// // return false
// // }
// for _, msg := range block.Messages() {
// if !msg.ValidateSyntax() {
// return ErrInvalidBlockSyntax("msg syntax invalid")
// }
// }
// // TODO msg receipts
// if block.Timestamp_ > bs.blockchainSubsystem.Clock.Now() {
// return ErrInvalidBlockSyntax("bad timestamp")
// }
return nil
}
func (g *AncestryGraph_I) AddBlock(block block.Block) {
panic("TODO")
}
func (g *BlockValidationGraph_I) ConsiderBlock(block block.Block) {
panic("TODO")
// g.UnconnectedBlocks.AddBlock(block)
// g.tryConnectBlockToFringe(block)
}
func (g *BlockValidationGraph_I) tryConnectBlockToFringe(block block.Block) {
panic("TODO")
// try to connect the block, and then try connecting its descendents.
//
// this algorithm should be breadth-first because we need to process the fringe
// in order. Depth-first search may consider blocks whose parents are still
// yet to be added
// blocks := Queue < Block >
// blocks.Enqueue(block)
// for block := range blocks.Dequeue() {
// if !g.ValidationFringe.HasTipset(block.Parents()) {
// continue // ignore this one. not all of its parents are in fringe
// }
// children := g.UnconnectedBlocks.Children[block]
// g.UnconnectedBlocks.Parents.Remove(block)
// g.UnconnectedBlocks.Children.Remove(block)
// g.ValidationFringe.AddBlock(block)
// blocks.EnqueueAll(children)
// }
}
func (b *BlockPropagator_I) PropagateBlock(block block.SignedBlock) {
panic("TODO")
// NetworkBroadcast(BlockPubSub, block)
}
|
from django.core.exceptions import ValidationError
from django.test import TestCase
from cyder.base.eav import validators as v
class TestValidators(TestCase):
def _valid(self, validator, value_list):
for value in value_list:
validator(value)
def _invalid(self, validator, value_list):
for value in value_list:
self.assertRaises(ValidationError, lambda: validator(value))
def test_flag(self):
self._valid(v.flag, ('on', 'off', 'true', 'false', 'oN', 'True'))
self._invalid(v.flag, ('truth', 'yes', 'no', '0', '1'))
def test_uint8(self):
self._valid(v.uint8, ('0', '10', '255', '0x0', '0xa', '0xFF'))
self._invalid(v.uint8, ('-1', 'a', '256', '-0x1', '0x-1', '0x100'))
def test_int8(self):
self._valid(
v.int8, ('-128', '-10', '0', '10', '127', '0x0', '0xa', '0x7F'))
self._invalid(v.int8, ('-129', '1a', '128'))
def test_domain(self):
self._valid(v.domain, ('com', 'example.com', 'example.com.'))
self._invalid(v.domain, (
'.com', # missing label
'example.com..', # too many dots
'example..com', # too many dots
))
def test_host(self):
self._valid(v.host, (
'foo', 'example.com', 'example.com.', '1.2.3.4', '127.0.0.1',
))
self._invalid(v.host, (
'.com', # missing label
'example.com..', # too many dots
'example..com', # too many dots
))
def test_domain_list(self):
self._valid(v.domain_list, (
'"example.com"',
'"example.com", "example.org." ,"example.edu"',
))
self._invalid(v.domain_list, (
'example.com', # no quotes
'"example.com" "example.org." "example.edu"', # no commas
))
def test_host_pair(self):
self._valid(v.host_pair, (
'example.com example.org',
'1.2.3.4 example.com',
'example.com 1.2.3.4', # multiple spaces
'1.2.3.4 127.0.0.1',
))
self._invalid(v.host_pair, (
'example.com, example.org', # comma
'example..com example.org', # invalid host
'example.com', # too few hosts
'1.2.3.4 127.0.0.1 192.168.0.1', # too many hosts
))
def test_host_pair_list(self):
self._valid(v.host_pair_list, (
'example.com example.org',
'example.com example.org, example.edu example.net',
'1.2.3.4 127.0.0.1, 10.0.0.1 192.168.0.1',
'example.com 1.2.3.4, example.org 127.0.0.1',
))
self._invalid(v.host_pair_list, (
'example.com', # too few hosts in a pair
'example.com, example.org example.edu', # too few hosts in a pair
'1.2.3.4 127.0.0.1 192.168.0.1', # too many hosts in a pair
))
def test_flag_host_list(self):
self._valid(v.flag_host_list, (
'true example.com, example.org',
'false example.com, example.org, 127.0.0.1',
'on 127.0.0.1',
))
self._invalid(v.flag_host_list, (
'true', # no hosts
'false example.com example.org', # no space between hosts
'yes, 127.0.0.1', # comma after flag
))
def test_text(self):
self._valid(v.text, (
'foo', 'foo bar', "'foo bar'", '!@#$%^&*()_',
# hex byte sequence in text is valid but not special
'3A:B0', 'DE:AD:be:ef',
))
self._invalid(v.text, (
'"foo"', # double quote
'foo"bar', # double quote
))
def test_string(self):
self._valid(v.string, (
'foo', 'foo bar', 'foo bar baz', "'foo bar baz'",
# hex byte sequence in string is valid and special
'3A:B0', 'DE:AD:be:ef',
))
self._invalid(v.string, (
'"foo"', # double quotes
'foo"bar', # double quote
))
def test_identifier(self):
self._valid(v.identifier, (
'f', 'foobar', 'FooBar', 'Foo_Bar_', 'Foo-Bar-', 'foo9bar',
'9foobar', 'foobar9',
'9-9', '9_9', # don't ask
))
self._invalid(v.identifier, (
'99', # all digits
'foo:bar', # ':'
'"foobar"', # double quote
"'foobar", # single quote
))
def test_flag_optional_text(self):
self._valid(v.flag_optional_text, (
'true',
'off',
'true "foobar"',
'false "foo bar"',
'On "!@#$%^&*_"',
))
self._invalid(v.flag_optional_text, (
'truth', # invalid flag
'true foobar', # no quotes around text
'false, "foobar"', # comma between flag and text
))
|
from myproject import feature
from myproject.pkgA import A_feature
from myproject.pkgA.utils import utils
from myproject.pkgA.utils.again import again
from myproject.pkgB import B_feature
feature.nice_test()
A_feature.nice_test()
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-ST/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-ST/1024+0+512-SS-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
package org.hiro.things.potiontype;
import org.hiro.Potions;
import org.hiro.character.Player;
import org.hiro.character.StateEnum;
import org.hiro.things.Potion;
import org.hiro.things.PotionEnum;
public class ConfusePotion extends Potion {
public ConfusePotion(){
super();
}
@Override
public void quaff(Player player){
Potions.do_pot(PotionEnum.Confuse, !player.containsState(StateEnum.ISHALU));
}
}
|
<gh_stars>0
#include "BRDFReconstructionModelWithZ.h"
|
import React from 'react';
import axios from 'axios';
class MyComponent extends React.Component {
constructor(props) {
super(props);
this.state = {
data: null
};
}
componentDidMount() {
axios.get('https://example.com/api/data')
.then(result => {
this.setState({ data: result.data });
})
}
render() {
let dataDisplay = null;
if (this.state.data) {
dataDisplay = <table>
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Email</th>
</tr>
</thead>
<tbody>
{
this.state.data.map(entry =>
<td>{entry.id}</td>
<td>{entry.name}</td>
<td>{entry.email}</td>
)
}
</tbody>
</table>
}
return <div>{dataDisplay}</div>;
}
} |
<reponame>ShogoAkiyama/metaworld.pytorch
from collections import deque
import numpy as np
import torch
from torch import nn
def update_params(optim, loss, retain_graph=False):
optim.zero_grad()
loss.backward(retain_graph=retain_graph)
optim.step()
def disable_gradients(network):
for param in network.parameters():
param.requires_grad = False
def _soft_update(target, source, tau):
target.data.copy_(target.data * (1.0 - tau) + source.data * tau)
def soft_update(target, source, tau):
assert isinstance(target, nn.Module) or isinstance(target, torch.Tensor)
if isinstance(target, nn.Module):
for t, s in zip(target.parameters(), source.parameters()):
_soft_update(t, s, tau)
elif isinstance(target, torch.Tensor):
_soft_update(target, source, tau)
else:
raise NotImplementedError
def assert_action(action):
assert isinstance(action, np.ndarray)
assert not np.isnan(np.sum(action)), 'Action has a Nan value.'
class RunningMeanStats:
def __init__(self, n=10):
assert isinstance(n, int) and n > 0
self._stats = deque(maxlen=n)
def append(self, x):
self._stats.append(x)
def get(self):
return np.mean(self._stats)
|
import Head from 'next/head';
import React, { useEffect, useState } from 'react';
import Footer from './Footer';
import Nav from './Nav';
declare const window: any;
type Props = {
children: any;
};
type State = {};
class Layout extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
}
render() {
return (
<>
<Head>
<title>ZITADEL • B2B Demo</title>
</Head>
<div className="dark">
<div className="relative flex flex-col min-h-screen w-full bg-zitadelblue-800 text-white">
<Nav></Nav>
<main className="flex-grow w-full pt-14">
{this.props.children}
</main>
<Footer></Footer>
</div>
</div>
</>
);
}
}
export default Layout;
|
from sklearn import svm
import numpy as np
import pickle
from sklearn.cross_validation import train_test_split
def loadDataSet(filename):
dataMat = []
labelMat = []
with open(filename) as fr:
for line in fr.readlines():
lineArr = line.strip().split('\t')
dataMat.append([float(lineArr[0]), float(lineArr[1]), float(lineArr[2]),float(lineArr[3]),float(lineArr[4]), float(lineArr[5]), float(lineArr[6]), float(lineArr[7]), float(lineArr[8]), float(lineArr[9]), float(lineArr[10]), float(lineArr[11]), float(lineArr[12]), float(lineArr[13]), float(lineArr[14]), float(lineArr[15])])
if float(lineArr[-1]) == 1.0:
lineArr[-1] =0
labelMat.append(float(lineArr[-1]))
print labelMat
print "_______________________________________________"
return dataMat, labelMat
if __name__ == '__main__':
x, y = loadDataSet("./data")
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.25)
#clf=svm.SVC()
clf=svm.SVC(gamma=0.001,C=1000)
clf.fit(X_train, y_train)
print "++++++++++++++++++++++++++++++++++++"
print clf
res=clf.predict(X_test)
ans=clf.predict(X_train)
print res
len_arr = len(res)
l = 0
#for i in range(0, len_arr):
# print res[i], y_test
for i in range(0,len_arr):
if y_test[i] == res[i] :
l = l+ 1
ans_len = len(ans)
m = 0
for i in range(0,ans_len):
if y_train[i] == ans[i]:
m = m+1;
print l
print len_arr
print float(l)/len_arr
print float(m)/ans_len
#save model
with open('model.pickle', 'wb') as f:
pickle.dump(clf, f)
|
#!/usr/bin/env bash
FILE_NAME=react-router-gwt-webpack
OUTPUT_DIR=../../src/main/java/io/clickhandler/web/reactRouterGwt/resources
MAP_DIR=../../src/main/java/io/clickhandler/web/reactRouterGwt/public
OUTPUT=${OUTPUT_DIR}/${FILE_NAME}
MAP_OUTPUT=${MAP_DIR}/${FILE_NAME}
echo "*** Starting Webpack Build ***";
# remove files
rm -f ${OUTPUT}.js;
rm -f ${MAP_OUTPUT}.js.map;
# run webpack
webpack --config webpack.config.js --progress
# copy files
echo "Copying Output...";
mkdir -p ${OUTPUT_DIR}
mkdir -p ${MAP_DIR}
cp build/webpack-output.js ${OUTPUT}.js;
cp build/webpack-output.js.map ${MAP_OUTPUT}.js.map;
# cleanup
rm -rf build;
echo "*** Webpack Build Complete ***"; |
###############################################################################
# !/bin/python
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""State Machine Handler module"""
def convert_string_to_list(comma_delimited_list: str) -> list:
"""
Converts the comma delimited list of string to a list type and skips adding
empty strings to the list.
:param comma_delimited_list:
:return: list
"""
empty_string = ''
return [value.strip() for value in comma_delimited_list.split(',')
if value != empty_string]
def get_delimiter(value) -> str:
""" match the descriptive string and return special character
:param value: descriptive string
:return: special character
"""
if value.strip() == 'Colon (:)':
return ':'
elif value.strip() == 'Dot (.)':
return '.'
elif value.strip() == 'Underscore (_)':
return '_'
elif value.strip() == 'Pipe (|)':
return '|'
|
#!/bin/bash
cpuTemp0=$(cat /sys/class/thermal/thermal_zone0/temp)
cpuTemp1=$(($cpuTemp0/1000))
cpuTemp2=$(($cpuTemp0/100))
cpuTempM=$(($cpuTemp2 % $cpuTemp1))
echo CPU temp"="$cpuTemp1"."$cpuTempM"'C"
echo GPU $(/opt/vc/bin/vcgencmd measure_temp)
|
import {Component, OnDestroy, OnInit} from '@angular/core';
import {IConfiguration, IContextMenu, ITreeData, TreeModel} from '../../../../main';
import {Observable} from 'rxjs/Observable';
import {IOuterNode} from '../../../../src/interfaces/IOuterNode';
import {TREE_ONE_ID, TreeOneNodeService} from './treeOneNode.service';
import {TreeInitializerService} from '../../../../src/service/initializer.service';
@Component({
selector: 'app-tree-one',
templateUrl: './treeOne.component.html',
})
export class TreeOneComponent implements OnInit, OnDestroy {
public folders: Observable<ITreeData>;
public contextMenu: IContextMenu[] = [];
public treeConfiguration: IConfiguration = {
showAddButton: true,
disableMoveNodes: false,
treeId: TREE_ONE_ID,
dragZone: TREE_ONE_ID,
dropZone: [TREE_ONE_ID],
isAnimation: true
};
public treeModel: TreeModel;
public constructor(private treeInitializerService: TreeInitializerService,
private treeOneNodeService: TreeOneNodeService) {
}
public ngOnInit(): void {
const nodes: IOuterNode[] = JSON.parse(localStorage.getItem('treeOne')) || [];
this.treeModel = this.treeInitializerService.init(this.treeConfiguration, this.treeOneNodeService, nodes);
// this.treeModel.initPath(['e95569ed-afd4-bdf3-1b3f-448afad49d04', '69d2d75d-c0e7-ee94-2067-8554efaa7a1c', '8ba1210b-e503-f9a8-4b85-c315ca828844']);
}
public ngOnDestroy(): void {
this.treeModel.destroy();
}
}
|
// using System;
// using System.Collections.Generic;
// using System.Linq;
// using Microsoft.Msagl.Core.DataStructures;
// using Microsoft.Msagl.Core.Geometry.Curves;
// using Microsoft.Msagl.Routing.Visibility;
// namespace Microsoft.Msagl.Routing {
// internal class MultipleSourceMultipleTargetsShortestPathOnVisibilityGraph {
// //we are not using the A* algorithm since it does not make much sense for muliple targets
// //but we use the upper bound heuristic
// readonly IEnumerable<VisibilityVertex> sources;
// readonly Set<VisibilityVertex> targets;
// VisibilityVertex _current;
// VisibilityVertex closestTarget;
// double upperBound = Number.POSITIVE_INFINITY;
// VisibilityGraph _visGraph;
// internal MultipleSourceMultipleTargetsShortestPathOnVisibilityGraph(IEnumerable<VisibilityVertex> sourceVisVertices,
// IEnumerable < VisibilityVertex > targetVisVertices, VisibilityGraph visibilityGraph)
// {
// _visGraph = visibilityGraph;
// visibilityGraph.ClearPrevEdgesTable();
// foreach(var v of visibilityGraph.Vertices())
// {
// v.Distance = Double.PositiveInfinity;
// }
// sources = sourceVisVertices;
// targets = new Set<VisibilityVertex>(targetVisVertices);
// }
// // Returns a path
// // <
// internal IEnumerable < VisibilityVertex > GetPath(){
// var pq = new GenericBinaryHeapPriorityQueue<VisibilityVertex>();
// foreach(var v of sources) {
// v.Distance = 0;
// pq.Enqueue(v, 0);
// }
// while (!pq.IsEmpty()) {
// _current = pq.Dequeue();
// if (targets.Contains(_current))
// break;
// foreach(var e of _current.OutEdges.Where(PassableOutEdge))
// ProcessNeighbor(pq, e, e.Target);
// foreach(var e of _current.InEdges.Where(PassableInEdge))
// ProcessNeighbor(pq, e, e.Source);
// }
// return _visGraph.PreviosVertex(_current) == null ? null : CalculatePath();
// }
// bool PassableOutEdge(VisibilityEdge e) {
// return e.Source == sources ||
// targets.Contains(e.Target) ||
// !IsForbidden(e);
// }
// bool PassableInEdge(VisibilityEdge e) {
// return targets.Contains(e.Source) || e.Target == sources || !IsForbidden(e);
// }
// internal static bool IsForbidden(VisibilityEdge e) {
// return e.IsPassable != null && !e.IsPassable() || e is TollFreeVisibilityEdge;
// }
// void ProcessNeighbor(GenericBinaryHeapPriorityQueue < VisibilityVertex > pq, VisibilityEdge l,
// VisibilityVertex v)
// {
// var len = l.length;
// var c = _current.Distance + len;
// if (c >= upperBound)
// return;
// if (targets.Contains(v)) {
// upperBound = c;
// closestTarget = v;
// }
// if (v != sources && _visGraph.PreviosVertex(v) == null) {
// v.Distance = c;
// _visGraph.SetPreviousEdge(v, l);
// pq.Enqueue(v, c);
// }
// else if (c < v.Distance) {
// //This condition should never hold for the dequeued nodes.
// //However because of a very rare case of an epsilon error it might!
// //In this case DecreasePriority will fail to find "v" and the algorithm will continue working.
// //Since v is not in the queue changing its .Distance will not mess up the queue.
// //Changing v.Prev is fine since we come up with a path with an insignificantly
// //smaller distance.
// v.Distance = c;
// _visGraph.SetPreviousEdge(v, l);
// pq.DecreasePriority(v, c);
// }
// }
// IEnumerable < VisibilityVertex > CalculatePath() {
// if (closestTarget == null)
// return null;
// var ret = new Array<VisibilityVertex>();
// var v = closestTarget;
// do {
// ret.Add(v);
// v = _visGraph.PreviosVertex(v);
// } while (v.Distance > 0);
// ret.Add(v);
// ret.Reverse();
// return ret;
// }
// }
// }
|
// Source : https://leetcode.com/problems/shuffle-an-array/
// Author : <NAME>
/**
* @param {number[]} nums
*/
var Solution = function(nums) {
this.nums = nums;
};
/**
* Resets the array to its original configuration and return it.
* @return {number[]}
*/
Solution.prototype.reset = function() {
return this.nums;
};
/**
* Returns a random shuffling of the array.
* @return {number[]}
*/
Solution.prototype.shuffle = function() {
// @see https://github.com/hanzichi/underscore-analysis/issues/15
function shuffle(a) {
var length = a.length;
var shuffled = Array(length);
for (var index = 0, rand; index < length; index++) {
rand = ~~(Math.random() * (index + 1));
if (rand !== index)
shuffled[index] = shuffled[rand];
shuffled[rand] = a[index];
}
return shuffled;
}
return shuffle(this.nums);
};
/**
* Your Solution object will be instantiated and called as such:
* var obj = Object.create(Solution).createNew(nums)
* var param_1 = obj.reset()
* var param_2 = obj.shuffle()
*/
|
#!/usr/bin/env python3
import rospy
import setup_path
import airsim
import cv2
import numpy as np
import os
import sys
import math
import setup_path
import argparse
import pprint
import time
from geometry_msgs.msg import Point
from visualization_msgs.msg import Marker
# Use below in settings.json with blocks environment
# Overall gains and parameters:
TimeStep=0.01
TotalRunTime=60
# Simple Velocity Tracker, Proportional only
Kp=0.3
TargetVel=5 # m/s
AccelScale=4.0
BrakeScale=2.0
binSize=0.2 # in meters, binning size for occupancy grid, for use on lidar data
rospy.init_node('ramp_merge', anonymous=True)
RdrMarkerPub = rospy.Publisher("radar_markers_rviz",Marker,queue_size=100)
def Plant(CaccAccel, PlantID): # Converts CaccAccel into throttle and brake inputs for each vehicle
# Saturation Stuff
if CaccAccel>AccelScale:
CaccAccel=AccelScale
elif CaccAccel<-BrakeScale:
CaccAccel=-BrakeScale
# Now rescale to [0,1] and set them
if CaccAccel>=0:
car_controls.throttle=float(CaccAccel/AccelScale)
car_controls.brake = 0
elif CaccAccel<0:
car_controls.throttle=0
car_controls.brake = -1.0*float(CaccAccel/BrakeScale)
client.setCarControls(car_controls, PlantID)
# if PlantID=="CarLLV": # Debugging stuff
# print(CaccAccel)
# print(car_controls.throttle)
return 0
def parse_lidarData(data):
# reshape array of floats to array of [X,Y,Z]
points = np.array(data.point_cloud, dtype=np.dtype('f4'))
points = np.reshape(points, (int(points.shape[0]/3), 3))
return points
def filter_and_bin(points):
# Filter out all points on the ground and then bin them to the required resolution.
toDelList=[]
for pointIdx in range(len(points)):
# print(points[pointIdx])
# print(points[pointIdx][2])
if (points[pointIdx][2]<=-2) or (points[pointIdx][2]>=0.8) or (points[pointIdx][0]>=30) \
or (points[pointIdx][1]>=30): # Z-axis from pointcloud is inverted
# print(points[pointIdx][2])
# print('APPENDING idx' + str(pointIdx))
toDelList.append(pointIdx)
# print(toDelList)
# print('Before Filtering: '+ str(len(points)))
points=np.delete(points,toDelList,axis=0)
# print('After: ' +str(len(points)))
scaleFactor=1/binSize
# First scale all points, floor them and then rescale
points=points*scaleFactor
points=np.floor(points)
points=points/scaleFactor
return points
def RadarMarkerPublisher(InputList,RadarPublisher):
# MarkerArrayIn=visualization_msgs.msg.MarkerArray()
markerTemp=Marker()
markerTemp.header.frame_id = "map"
markerTemp.type = markerTemp.CUBE_LIST
markerTemp.action = markerTemp.ADD
markerTemp.scale.x = 0.4
markerTemp.scale.y = 0.4
markerTemp.scale.z = 0.2
markerTemp.color.r = 0.0
markerTemp.color.g = 1.0
markerTemp.color.b = 1.0
markerTemp.color.a = 1.0
for itemDxj in InputList:
tempPoint=Point()
tempPoint.x=itemDxj[0]
tempPoint.y=-itemDxj[1]
tempPoint.z=0
if any(np.isnan([itemDxj[0],itemDxj[1],itemDxj[2]])):
# print('found NaN')
pass
else:
markerTemp.points.append(tempPoint)
RadarPublisher.publish(markerTemp)
# Create all cars and setup
client = airsim.CarClient()
client.confirmConnection()
# client.enableApiControl(True, "CarRLV")
client.enableApiControl(True, "CarR1")
client.enableApiControl(True, "CarR2")
client.enableApiControl(True, "CarR3")
client.enableApiControl(True, "CarR4")
client.enableApiControl(True, "CarR5")
client.enableApiControl(True, "CarR6")
client.enableApiControl(True, "CarR7")
client.enableApiControl(True, "CarR8")
client.enableApiControl(True, "CarR9")
client.enableApiControl(True, "CarR10")
car_controls= airsim.CarControls()
car_controls.is_manual_gear = False
startTime=time.time()
RunTime=time.time()-startTime
while RunTime<TotalRunTime: # Max Run time;
RunTime=time.time()-startTime
# Get all states
StateRLV = client.getCarState("CarFPV")
# print(StateRLV)
StateR1 = client.getCarState("CarR1")
StateR2 = client.getCarState("CarR2")
StateR3 = client.getCarState("CarR3")
StateR4 = client.getCarState("CarR4")
StateR5 = client.getCarState("CarR5")
StateR6 = client.getCarState("CarR6")
StateR7 = client.getCarState("CarR7")
StateR8 = client.getCarState("CarR8")
StateR9 = client.getCarState("CarR9")
StateR10 = client.getCarState("CarR10")
accelReq= Kp*(TargetVel-StateR1.speed)
Plant(accelReq, "CarR1")
accelReq= Kp*(TargetVel-StateR2.speed)
Plant(accelReq, "CarR2")
accelReq= Kp*(TargetVel-StateR3.speed)
Plant(accelReq, "CarR3")
accelReq= Kp*(TargetVel-StateR4.speed)
Plant(accelReq, "CarR4")
accelReq= Kp*(TargetVel-StateR5.speed)
Plant(accelReq, "CarR5")
accelReq= Kp*(TargetVel-StateR6.speed)
Plant(accelReq, "CarR6")
accelReq= Kp*(TargetVel-StateR7.speed)
Plant(accelReq, "CarR7")
accelReq= Kp*(TargetVel-StateR8.speed)
Plant(accelReq, "CarR8")
accelReq= Kp*(TargetVel-StateR9.speed)
Plant(accelReq, "CarR9")
accelReq= Kp*(TargetVel-StateR10.speed)
Plant(accelReq, "CarR10")
# Now just sleep so the cars are allowed to move
time.sleep(TimeStep)
# Get Lidar Data:
lidarData = client.getLidarData( lidar_name = 'LidarSensor1', vehicle_name = 'CarFPV')
if (len(lidarData.point_cloud) < 3):
print("\tNo points received from Lidar data")
else:
points = parse_lidarData(lidarData)
points=filter_and_bin(points)
RadarMarkerPublisher(points,RdrMarkerPub)
# print("\tTime_stamp: %d number_of_points: %d" % (lidarData.time_stamp, len(points)))
# print(points[0])
# print("\t\tlidar position: %s" % (pprint.pformat(lidarData.pose.position)))
# print("\t\tlidar orientation: %s" % (pprint.pformat(lidarData.pose.orientation)))
#restore to original state
client.reset()
client.enableApiControl(False)
print('done')
|
def isDuplicatedOffer(self, name, categoryId):
filteredOffers = self.filterMyOffers(name, 'ACTIVE', 1, categoryId)
for offer in filteredOffers:
if offer['name'] == name and offer['category']['id'] == categoryId:
return True
return False |
<reponame>shreyass27/indecision_app<gh_stars>0
class IndecisionApp extends React.Component {
constructor(props) {
super(props);
this.handleRemoveAll = this.handleRemoveAll.bind(this);
this.handlePick = this.handlePick.bind(this);
this.handleAddOption = this.handleAddOption.bind(this);
this.handleDeletSingleOpton = this.handleDeletSingleOpton.bind(this);
this.state = {
options: props.options
}
}
componentDidMount() {
console.log('fetching Data');
try {
const json = localStorage.getItem('options');
this.setState(() => ({ options : json ? JSON.parse(json) : [] }));
} catch(error) {
console.log(error)
}
}
componentDidUpdate(prevProps, prevState) {
console.log('saving data');
if(prevState.options.length !== this.state.options.length) {
const json = JSON.stringify(this.state.options);
localStorage.setItem('options', json);
}
}
componentWillUnmount() {
console.log('componentWillUnmount');
}
handleRemoveAll() {
this.setState(() => ({ options: [] }));
}
handlePick() {
const randomInt = Math.floor(Math.random() * this.state.options.length);
alert(this.state.options[randomInt]);
}
handleAddOption(singleOption) {
if (!singleOption) {
return 'Enter a valid option';
} else if (this.state.options.indexOf(singleOption) > -1) {
return 'Entered Option Already Exists';
}
this.setState((prevState) => ({options: [...prevState.options, singleOption] }));
}
handleDeletSingleOpton(optionToRemove) {
console.log('handleDeletSingleOpton option', optionToRemove);
this.setState((prevState) => ({
options: prevState.options.filter((option) => optionToRemove !== option)
}))
}
render() {
const title = 'Indecision';
const subtitle = 'Put you life in a computer. Bro!!';
return (
<div>
<Header title={title} subtitle={subtitle} />
<Actions
hasOptions={this.state.options.length > 0}
handlePick={this.handlePick}
/>
<Options
options={this.state.options}
handleRemoveAll={this.handleRemoveAll}
handleDeletSingleOpton={this.handleDeletSingleOpton}
/>
<AddOptions handleAddOption={this.handleAddOption} />
</div>
)
}
}
IndecisionApp.defaultProps = {
options: []
}
const Header = (props) => {
return (
<div>
<h1>{props.title}</h1>
{ props.subtitle && <h3>{props.subtitle}</h3>}
</div>
);
};
Header.defaultProps = {
title: 'Indecision'
};
const Actions = (props) => {
return (
<div>
<button onClick={props.handlePick} disabled={!props.hasOptions}> What should I do ?</button>
</div>
);
}
const Options = (props) => {
return (
<div>
<button onClick={props.handleRemoveAll} >Remove All</button>
{ props.options.length < 1 && <p>Please Add An option to get Started</p>}
{
props.options.map(
option => <Option handleDeletSingleOpton={props.handleDeletSingleOpton} key={option} option={option}/>
)
}
</div>
);
}
const Option = (props) => {
return (
<div>
{props.option}
<button onClick={() => props.handleDeletSingleOpton(props.option)}>Remove</button>
</div>
);
}
class AddOptions extends React.Component {
constructor(props) {
super(props);
this.handleAddOption = this.handleAddOption.bind(this);
this.state = { error: undefined };
}
handleAddOption(e) {
e.preventDefault();
const option = e.target.elements.option.value.trim();
const error = this.props.handleAddOption(option);
this.setState(() => ( { error }) );
if (!error) {
e.target.elements.option.value = '';
}
}
render() {
return (
<div>
{ this.state.error && <p>{this.state.error }</p> }
<form onSubmit={this.handleAddOption}>
<input name="option" type="text"></input>
<button type="submit" >Add options</button>
</form>
</div>
)
}
}
ReactDOM.render(<IndecisionApp />, document.getElementById('app')) |
package org.insightcentre.nlp.saffron.benchmarks;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.insightcentre.nlp.saffron.data.Taxonomy;
/**
*
* @author <NAME>
*/
public class FowlkesMallows {
public static List<Set<String>> clusteringAtLevel(Taxonomy t, int level) {
if(level == 0) {
Set<String> s = new HashSet<>();
flattenTaxonomy(t, s);
return Collections.singletonList(s);
} else if(level == 1) {
List<Set<String>> clustering = new ArrayList<>();
for(Taxonomy t2 : t.children) {
Set<String> s = new HashSet<>();
for(Set<String> s2 : clusteringAtLevel(t2, level - 1)) {
s.addAll(s2);
}
s.add(t2.root);
clustering.add(s);
}
return clustering;
} else {
List<Set<String>> clustering = new ArrayList<>();
for(Taxonomy t2 : t.children) {
clustering.addAll(clusteringAtLevel(t2, level -1));
}
return clustering;
}
}
public static double fowlkesMallows(Taxonomy t1, Taxonomy t2) {
int depth = Math.max(t1.depth(), t2.depth());
double score = 0.0;
for(int level = 1; level <= depth; level++) {
score += compareClustering(clusteringAtLevel(t1, level),
clusteringAtLevel(t2, level)) * (level);
}
return score * 2.0 / depth / (depth - 1);
}
private static void flattenTaxonomy(Taxonomy t, Set<String> s) {
s.add(t.root);
for(Taxonomy t2 : t.children) {
flattenTaxonomy(t2, s);
}
}
private static double compareClustering(List<Set<String>> c1, List<Set<String>> c2) {
int n11 = 0; // the number of object pairs that are in the same cluster in both
//int total = 0;
int c1pairs = 0;
for(Set<String> s1 : c1) {
c1pairs += s1.size() * (s1.size() - 1) / 2;
//total += s1.size();
}
//total = total * (total - 1) / 2;
int c2pairs = 0;
for(Set<String> s2 : c2) {
c2pairs += s2.size() * (s2.size() - 1) / 2;
}
for(Set<String> s1 : c1) {
for(Set<String> s2: c2) {
Set<String> s3 = new HashSet<>(s1);
s3.retainAll(s2);
if(s3.size() > 1)
n11 += s3.size() * (s3.size() - 1) / 2;
}
}
//int n10 = c1pairs - n11; // the number of object pairs that are in the same cluster in c1 but not c2
//int n01 = c2pairs - n11; // the number of object pairs that are in the same cluster in c2 but not c1
//int n00 = total - n10 - n01 - n11; // the number of object pairs that are in different clusters in both
if(c1pairs > 0 && c2pairs > 0)
return (double)n11 / Math.sqrt((double)c1pairs * c2pairs);
else
return 0.0;
}
}
|
<filename>frontend/src/staff/upselectdataset.js<gh_stars>0
import {Selecteddataset} from '../components/messages';
import {EventAggregator} from 'aurelia-event-aggregator';
import {Router} from 'aurelia-router';
import {ProjectApi} from '../components/projectapi';
export class Upselectdataset {
static inject = [EventAggregator,Router,ProjectApi];
constructor(ea,ro,pa){
this.ea = ea;
this.router = ro;
this.pa= pa;
this.ea.subscribe(Selecteddataset,msg =>this.selectedDataset(msg.dataset));
}
selectedDataset(dataset) {
console.log('Upselectdataset.selectedDataset()');
this.pa.selectedDataset=dataset;
this.router.navigate("upselectdata");
}
}
|
package io.opensphere.csvcommon.common.datetime;
import java.util.Collection;
import java.util.List;
import io.opensphere.core.common.configuration.date.DateFormat;
import io.opensphere.core.common.configuration.date.DateFormatsConfig;
/**
* Interface to an object that provides the configuration of known date formats,
* and their regular expressions that represent them.
*
*/
public interface ConfigurationProvider
{
/**
* Gets the collection of configured known date formats.
*
* @return The collection of known date formats.
*/
DateFormatsConfig getDateFormats();
/**
* Saves the new format to the configuration.
*
* @param format The saved format.
*/
void saveFormat(DateFormat format);
/**
* Saves the new formats to the configuration.
*
* @param formats The formats to save.
*/
void saveFormats(Collection<DateFormat> formats);
/**
* Gets the column names to exclude.
*
* @return The list of partial column names to exclude.
*/
List<String> getExcludeColumns();
}
|
<gh_stars>100-1000
var searchData=
[
['normalized_20lms_20filters',['Normalized LMS Filters',['../group__LMS__NORM.html',1,'']]]
];
|
package com.watayouxiang.mediaplayer.core;
import android.content.Context;
import android.util.AttributeSet;
import java.util.Locale;
public class BasePlayer extends AliSDK implements BasePlayerOperation {
// ============================================================================
// 需要默认值的参数
// ============================================================================
private boolean mAutoRotation;//播放器自动旋转开关
private Orientation mOrientation;//播放器方向
private boolean mGestureBrightness;//手势调节亮度开关
private boolean mGestureVolume;//手势调节音量开关
private boolean mGestureSeekTo;//手势调节进度开关
private boolean mGestureToggle;//手势切换"开始/暂停"开关
private boolean mBackgroundPlay;//台后播放开关
// ============================================================================
// 临时变量
// ============================================================================
private float mStartScrollBrightnessPercent;//开始滚动前的亮度
private float mStartScrollVolumePercent;//开始滚动前的音量
private long mStartScrollVideoPosition;//开始滚动前的视频进度
private long mStartScrollVideoDuration;//开始滚动前的视频长度
private long mSeekPosition;//待调节的视频进度
private boolean mResumePlay;//返回前台是否需要播放
public BasePlayer(Context context) {
super(context);
}
public BasePlayer(Context context, AttributeSet attrs) {
super(context, attrs);
}
public BasePlayer(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected void onInitEvent(Context context) {
super.onInitEvent(context);
//默认配置
setOrientation(Orientation.Portrait);
setAutoRotation(true);
setGestureBrightness(true);
setGestureVolume(true);
setGestureSeekTo(true);
setGestureToggle(true);
setBackgroundPlay(false);
}
protected void onOrientationChange(Orientation orientation) {
addLog("onOrientationChange:" + orientation);
}
protected void onBrightnessStart() {
addLog("onBrightnessStart");
}
protected void onBrightnessUpdate(float brightnessPercent) {
addLog("onBrightnessUpdate: brightnessPercent=" + brightnessPercent);
}
protected void onBrightnessEnd() {
addLog("onBrightnessEnd");
}
protected void onVolumeStart() {
addLog("onVolumeStart");
}
protected void onVolumeUpdate(float volumePercent) {
addLog("onVolumeUpdate: volumePercent=" + volumePercent);
}
protected void onVolumeEnd() {
addLog("onVolumeEnd");
}
protected void onSeekStart() {
addLog("onSeekStart");
}
protected void onSeekUpdate(long videoDuration, long videoPosition, long seekPosition) {
addLog(String.format(Locale.getDefault(),
"onSeekUpdate: videoDuration=%d, videoPosition=%d, seekPosition=%d",
videoDuration, videoPosition, seekPosition));
}
protected void onSeekEnd(long seekPosition) {
addLog("onSeekEnd: seekPosition=" + seekPosition);
}
@Override
public void onResume() {
super.onResume();
if (mResumePlay) {
start();
mResumePlay = false;
}
}
@Override
public void onPause() {
super.onPause();
if (!isBackgroundPlay() && isPlaying()) {
pause();
mResumePlay = true;
}
}
@Override
public void setOrientation(Orientation orientation) {
if (getOrientation() != orientation) {
setScreenOrientation(orientation);
if (orientation == Orientation.Portrait
|| orientation == Orientation.Portrait_Reverse) {
setFullScreen(false);
} else if (orientation == Orientation.Landscape
|| orientation == Orientation.Landscape_Reverse) {
setFullScreen(true);
}
mOrientation = orientation;
onOrientationChange(orientation);
}
}
@Override
public Orientation getOrientation() {
return mOrientation;
}
@Override
public void toggleOrientation() {
Orientation orientation = getOrientation();
if (orientation == Orientation.Portrait
|| orientation == Orientation.Portrait_Reverse) {
setOrientation(Orientation.Landscape);
} else if (orientation == Orientation.Landscape
|| orientation == Orientation.Landscape_Reverse) {
setOrientation(Orientation.Portrait);
}
}
@Override
public void setAutoRotation(boolean autoRotation) {
mAutoRotation = autoRotation;
}
@Override
public boolean isAutoRotation() {
return mAutoRotation;
}
@Override
public boolean isGestureBrightness() {
return mGestureBrightness;
}
@Override
public void setGestureBrightness(boolean gestureBrightness) {
mGestureBrightness = gestureBrightness;
}
@Override
public boolean isGestureVolume() {
return mGestureVolume;
}
@Override
public void setGestureVolume(boolean gestureVolume) {
mGestureVolume = gestureVolume;
}
@Override
public boolean isGestureSeekTo() {
return mGestureSeekTo;
}
@Override
public void setGestureSeekTo(boolean gestureSeekTo) {
mGestureSeekTo = gestureSeekTo;
}
@Override
public boolean isGestureToggle() {
return mGestureToggle;
}
@Override
public void setGestureToggle(boolean gestureToggle) {
mGestureToggle = gestureToggle;
}
@Override
public boolean isBackgroundPlay() {
return mBackgroundPlay;
}
@Override
public void setBackgroundPlay(boolean backgroundPlay) {
mBackgroundPlay = backgroundPlay;
}
@Override
protected void onStartScroll(ScrollMode mode) {
super.onStartScroll(mode);
if (mode == ScrollMode.VERTICAL_LEFT) {
if (isGestureBrightness()) {
mStartScrollBrightnessPercent = getBrightnessPercent();
onBrightnessStart();
}
} else if (mode == ScrollMode.VERTICAL_RIGHT) {
if (isGestureVolume()) {
mStartScrollVolumePercent = getVolumePercent();
onVolumeStart();
}
} else if (mode == ScrollMode.HORIZONTAL) {
if (isGestureSeekTo()) {
mStartScrollVideoPosition = getPosition();
mStartScrollVideoDuration = getDuration();
onSeekStart();
}
}
}
@Override
protected void onScrolling(ScrollMode mode, float percent) {
super.onScrolling(mode, percent);
if (mode == ScrollMode.VERTICAL_LEFT) {
if (isGestureBrightness()) {
float brightnessPercent = mStartScrollBrightnessPercent + percent;
if (brightnessPercent > 1) {
brightnessPercent = 1;
} else if (brightnessPercent < 0) {
brightnessPercent = 0;
}
setBrightnessPercent(brightnessPercent);
onBrightnessUpdate(brightnessPercent);
}
} else if (mode == ScrollMode.VERTICAL_RIGHT) {
if (isGestureVolume()) {
float volumePercent = mStartScrollVolumePercent + percent;
if (volumePercent > 1) {
volumePercent = 1;
} else if (volumePercent < 0) {
volumePercent = 0;
}
setVolumePercent(volumePercent);
onVolumeUpdate(volumePercent);
}
} else if (mode == ScrollMode.HORIZONTAL) {
if (isGestureSeekTo()) {
long deltaPosition = (long) (mStartScrollVideoDuration * percent);
mSeekPosition = mStartScrollVideoPosition + deltaPosition;
if (mSeekPosition > mStartScrollVideoDuration) {
mSeekPosition = mStartScrollVideoDuration;
} else if (mSeekPosition < 0) {
mSeekPosition = 0;
}
if (mStartScrollVideoDuration > 0) {
onSeekUpdate(mStartScrollVideoDuration, mStartScrollVideoPosition, mSeekPosition);
}
}
}
}
@Override
protected void onEndScroll(ScrollMode mode, float percent) {
super.onEndScroll(mode, percent);
if (mode == ScrollMode.VERTICAL_LEFT) {
if (isGestureBrightness()) {
onBrightnessEnd();
}
} else if (mode == ScrollMode.VERTICAL_RIGHT) {
if (isGestureVolume()) {
onVolumeEnd();
}
} else if (mode == ScrollMode.HORIZONTAL) {
if (isGestureSeekTo()) {
seekTo((int) mSeekPosition);
onSeekEnd(mSeekPosition);
}
}
}
@Override
protected void onDoubleTap() {
super.onDoubleTap();
if (isGestureToggle()) {
toggle();
}
}
@Override
protected void onPhoneOrientationChange(Orientation orientation) {
super.onPhoneOrientationChange(orientation);
if (mAutoRotation) {
setOrientation(orientation);
}
}
}
|
package com.iplante.imdb.movies.controller;
import com.iplante.imdb.movies.service.MovieService;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.Pageable;
import org.springframework.http.HttpStatus;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.when;
/**
* Unit tests for {@link MovieController}.
*
* @author <NAME>
* @version 1
* @since 8/28/20
*/
@ExtendWith(MockitoExtension.class)
public class MovieControllerTest {
private final static long MOVIE_ID = 1L;
@InjectMocks
private MovieController movieController;
@Mock
private MovieService movieService;
@Test
void getCastMoviesTest() {
final var pageable = Pageable.unpaged();
when(movieService.getMovieCast(MOVIE_ID, pageable)).thenReturn("Mocked body");
final var result = movieController.getMovieCast(MOVIE_ID, pageable);
assertThat(result).isNotNull();
assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(result.getBody()).isEqualTo("Mocked body");
}
}
|
import { getWeatherData } from '../../client/openWeatherMap/openWeatherClient';
const MIN_VALUE_NEEDED_AN_UMBRELLA = 0.13;
const ADD_HOURS_PADDING = 3;
const getDateAddingHoursToCurrentTime = (hours: number): Date => {
const dateExtended = new Date(Date.now());
dateExtended.setTime(dateExtended.getTime() + hours * 60 * 60 * 1000);
return dateExtended;
};
export const shouldUseUmbrella = async (
lat: number,
lon: number
): Promise<boolean> => {
const weatherData = await getWeatherData(lat, lon);
const limitDateChecked = getDateAddingHoursToCurrentTime(
12 + ADD_HOURS_PADDING
);
const minimumDateChecked = getDateAddingHoursToCurrentTime(
ADD_HOURS_PADDING
);
return weatherData.list
.filter((dataPoint) => {
const dateOfDataPoint = new Date(dataPoint.dt * 1000);
return (
minimumDateChecked <= dateOfDataPoint &&
dateOfDataPoint <= limitDateChecked
);
})
.some(
({ rain, snow }) =>
(rain && rain['3h'] >= MIN_VALUE_NEEDED_AN_UMBRELLA) ||
(snow && snow['3h'] >= MIN_VALUE_NEEDED_AN_UMBRELLA)
);
};
|
def remove_duplicates(file_path: str) -> None:
with open(file_path, 'r') as file:
lines = file.readlines()
unique_lines = list(set(line.strip().lower() for line in lines))
with open(file_path, 'w') as file:
file.write('\n'.join(unique_lines)) |
def optimized_func(n):
sum = 0
for i in range(n):
sum += (i*(i+1)*(2*i+1))//6
return sum |
def solve_quadratic_eq(a, b, c):
# Calculate the two solutions of the equation
solution1 = (-b + math.sqrt(b**2 - 4*a*c)) / (2 * a)
solution2 = (-b - math.sqrt(b**2 - 4*a*c)) / (2 * a)
# Return both solutions
return solution1, solution2
a = 1
b = 5
c = 6
solution1, solution2 = solve_quadratic_eq(a,b,c)
print("Solutions to the Quadratic Equation: ", solution1, solution2) # Output: Solutions to the Quadratic Equation: -2.0 -3.0 |
#!/usr/bin/env bash
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2021-06-01 15:51:14 +0100 (Tue, 01 Jun 2021)
#
# https://github.com/HariSekhon/bash-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/HariSekhon
#
set -euo pipefail
[ -n "${DEBUG:-}" ] && set -x
srcdir="$(dirname "${BASH_SOURCE[0]}")"
# shellcheck disable=SC1090
. "$srcdir/lib/utils.sh"
# shellcheck disable=SC2034,SC2154
usage_description="
Lists GCP service account members eg. to find GKE Workload Identity integrations
Output:
<service_account> <role> <member>
eg.
jenkins-agent@MyProject.iam.gserviceaccount.com roles/iam.workloadIdentityUser serviceAccount:MyProject.svc.id.goog[jenkins/jenkins-agent]
"
# used by usage() in lib/utils.sh
# shellcheck disable=SC2034
usage_args="[<project_id>]"
help_usage "$@"
if [ $# -eq 1 ]; then
export CLOUDSDK_CORE_PROJECT="$1"
elif [ $# -eq 0 ]; then
:
else
usage
fi
serviceaccounts="$(gcloud iam service-accounts list --format='value(name.basename())')"
for serviceaccount in $serviceaccounts; do
gcloud iam service-accounts get-iam-policy "$serviceaccount" --format json |
jq -r "
.bindings[]? |
{ \"role\": .role, \"member\": .members[] } |
[ \"$serviceaccount\", .role, .member ] |
@tsv
"
done
|
import re
def extract_license_info(license_header: str) -> tuple:
match = re.search(r'Copyright (\d{4}) <(.*?)>', license_header)
if match:
year = int(match.group(1))
organization = match.group(2)
return (year, organization)
else:
return None # Handle invalid input or no match case |
def separate_charnum(str):
chars = []
nums = []
for char in str:
if char.isalpha():
chars.append(char)
else:
nums.append(int(char))
return chars, nums
str = "ab12cd3"
chars, nums = separate_charnum(str)
print(chars)
print(nums) |
#!/bin/bash
# autodiscover of the FAI server
# (c) Thomas Lange 2015
. /lib/dracut-lib.sh
opt=$(getargs fai.discover)
if [ -z $opt ] ; then
return
fi
port=$(getarg fai.port=)
[ -z ${port} ] && port=4711 # default value
export FAI_MONITOR_PORT=$port
# this script is called with all network interfaces
nic=$1
ivar=/run/initramfs/fai-variables
declare -a faisrv
declare -A version
declare -a para
scan_net() {
# scan local network for FAI server
local iprange=$1
shift
local port=$1
local iplist
local host
iplist=$(nmap -n -e $nic --host-timeout 10 --open -oG - -p $port $iprange 2>/dev/null | grep -v '#' | cut -d' ' -f2 | sort -n | uniq)
# check all IPs, if a faiserver is listening there
for host in $iplist; do
getvar FAI_VERSION $host
if [ -n "$FAI_VERSION" ]; then
faiserver+=" $host"
version[$host]="$FAI_VERSION"
fi
done
}
getvar() {
# get a variable from the fai-monitor
# use $monserver and $FAI_MONITOR_PORT is no arguments are used
local ret
local srv
local var
local port
var=$1; shift
srv=$1; shift
port=$1; shift
: ${port:=$FAI_MONITOR_PORT}
: ${srv:=$monserver}
ret=$(echo "VARIABLE $var" | nc $srv $port)
if [[ $ret =~ ^OK\ ]]; then
val=${ret#OK }
eval $var=\$val
else
eval "$var="
fi
}
iprange=$(ip addr show up | grep -w 'inet'|grep -v 127.0.0.1| cut -d t -f 2 | cut -d ' ' -f 2 |head -1)
echo "Scanning $iprange for FAI server (port $FAI_MONITOR_PORT)"
while [ 1 ]; do
dialog --infobox "\n Scanning $iprange for FAI server (port $FAI_MONITOR_PORT)\n" 5 59
scan_net $iprange $FAI_MONITOR_PORT
faisrv=($faiserver)
if [ ${#faisrv[@]} -eq 0 ]; then
dialog --colors --yesno "\n \Zr\Z1No FAI server found.\Zn\n\n\n Retry scan?\n" 10 28
if [ $? -eq 1 ]; then
die "No FAI server found."
root=
return
fi
elif [ ${#faisrv[@]} -gt 1 ]; then
echo "More than one FAI server found."
# build parameter list for the menu
for i in "${faisrv[@]}"; do
# resolve IP to host name
hn=$(getent hosts $i)
hn=${hn#* }
hn=${hn%%.*}
str=$(printf "%-20s" $hn)
para+=("$i" "$str FAI ${version[$i]}")
done
exec 3>&1
selection=$(dialog --colors --no-cancel --menu " More than one server found. \n Please select a server:\n\n" 15 70 10 "${para[@]}" "Rescan" "Rescan the network" 2>&1 >&3)
if [ "$selection" = "Rescan" ]; then
faiserver=""
para=()
unset version
declare -A version
else
# one server was selected
monserver=${selection}
break
fi
else
# we found excatly one FAI server
monserver=${faisrv[0]}
break
fi
done
FAI_VERSION=${version[$monserver]}
getvar FAI_FLAGS
getvar NFSROOT
getvar FAI_CONFIG_SRC
if [ -z "FAI_CONFIG_SRC" ]; then
getvar FAI_CONFIGDIR
FAI_CONFIG_SRC="nfs://$monserver/$FAI_CONFIGDIR"
fi
dialog --infobox "\n\n FAI server $FAI_VERSION found at $monserver\n\n Using root=$monserver:$NFSROOT\n" 9 50
sleep 4
root=$monserver:$NFSROOT
netroot=nfs:$monserver:$NFSROOT:
echo "root=$root"
echo "FAI_VERSION=$FAI_VERSION" >> $ivar
echo FAI_FLAGS=$FAI_FLAGS >> $ivar
echo "root=$monserver:$NFSROOT" >> $ivar
echo "monserver=$monserver" >> $ivar
echo "FAI_CONFIG_SRC=$FAI_CONFIG_SRC" >> $ivar
# Done, all good!
rootok=1
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2015 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.usage.refinement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.common.log.LogManager;
import org.sosy_lab.cpachecker.cfa.model.CFAEdge;
import org.sosy_lab.cpachecker.core.counterexample.CounterexampleInfo;
import org.sosy_lab.cpachecker.core.interfaces.ConfigurableProgramAnalysis;
import org.sosy_lab.cpachecker.core.interfaces.Precision;
import org.sosy_lab.cpachecker.core.interfaces.Statistics;
import org.sosy_lab.cpachecker.core.interfaces.StatisticsProvider;
import org.sosy_lab.cpachecker.core.interfaces.WrapperCPA;
import org.sosy_lab.cpachecker.core.reachedset.ReachedSet;
import org.sosy_lab.cpachecker.cpa.arg.ARGBasedRefiner;
import org.sosy_lab.cpachecker.cpa.arg.ARGReachedSet;
import org.sosy_lab.cpachecker.cpa.arg.ARGState;
import org.sosy_lab.cpachecker.cpa.predicate.BAMBlockFormulaStrategy;
import org.sosy_lab.cpachecker.cpa.predicate.BAMPredicateAbstractionRefinementStrategy;
import org.sosy_lab.cpachecker.cpa.predicate.BAMPredicateCPA;
import org.sosy_lab.cpachecker.cpa.predicate.BAMPredicateRefiner;
import org.sosy_lab.cpachecker.cpa.predicate.BlockFormulaStrategy;
import org.sosy_lab.cpachecker.cpa.predicate.PredicateAbstractionManager;
import org.sosy_lab.cpachecker.cpa.predicate.PredicateCPARefinerFactory;
import org.sosy_lab.cpachecker.cpa.predicate.PredicatePrecision;
import org.sosy_lab.cpachecker.exceptions.CPAException;
import org.sosy_lab.cpachecker.util.Pair;
import org.sosy_lab.cpachecker.util.Precisions;
import org.sosy_lab.cpachecker.util.predicates.pathformula.PathFormulaManager;
import org.sosy_lab.cpachecker.util.predicates.smt.Solver;
import org.sosy_lab.cpachecker.util.statistics.StatCounter;
import org.sosy_lab.cpachecker.util.statistics.StatisticsWriter;
public class PredicateRefinerAdapter extends GenericSinglePathRefiner {
ARGBasedRefiner refiner;
LogManager logger;
private final UsageStatisticsRefinementStrategy strategy;
private ARGReachedSet ARGReached;
private final Map<Set<CFAEdge>, PredicatePrecision> falseCache = new HashMap<>();
private final Map<Set<CFAEdge>, PredicatePrecision> falseCacheForCurrentIteration = new HashMap<>();
//private final Multimap<SingleIdentifier, Set<CFAEdge>> idCached = LinkedHashMultimap.create();
private final Set<Set<CFAEdge>> trueCache = new HashSet<>();
private final Set<Set<CFAEdge>> potentialLoopTraces = new HashSet<>();
//Statistics
private StatCounter solverFailures = new StatCounter("Solver failures");
private StatCounter numberOfrepeatedPaths = new StatCounter("Number of repeated paths");
private StatCounter numberOfrefinedPaths = new StatCounter("Number of refined paths");
private StatCounter numberOfBAMupdates = new StatCounter("Number of BAM updates");
public PredicateRefinerAdapter(ConfigurableRefinementBlock<Pair<ExtendedARGPath, ExtendedARGPath>> wrapper,
ConfigurableProgramAnalysis pCpa, LogManager pLogger) throws InvalidConfigurationException {
super(wrapper);
if (!(pCpa instanceof WrapperCPA)) {
throw new InvalidConfigurationException(BAMPredicateRefiner.class.getSimpleName() + " could not find the PredicateCPA");
}
@SuppressWarnings("resource")
BAMPredicateCPA predicateCpa = ((WrapperCPA) pCpa).retrieveWrappedCpa(BAMPredicateCPA.class);
if (predicateCpa == null) {
throw new InvalidConfigurationException(BAMPredicateRefiner.class.getSimpleName() + " needs an BAMPredicateCPA");
}
logger = pLogger;
PathFormulaManager pfmgr = predicateCpa.getPathFormulaManager();
BlockFormulaStrategy blockFormulaStrategy = new BAMBlockFormulaStrategy(pfmgr);
strategy =
new UsageStatisticsRefinementStrategy(
predicateCpa.getConfiguration(),
logger,
predicateCpa.getSolver(),
predicateCpa.getPredicateManager());
refiner = new PredicateCPARefinerFactory(pCpa)
.setBlockFormulaStrategy(blockFormulaStrategy)
.create(strategy);
}
@Override
public RefinementResult call(ExtendedARGPath pInput) throws CPAException, InterruptedException {
RefinementResult result;
Set<CFAEdge> currentPath = new HashSet<>(pInput.getInnerEdges());
if (trueCache.contains(currentPath)) {
//Somewhen we have already refined this path as true
result = RefinementResult.createTrue();
} else {
Set<CFAEdge> edgeSet = new HashSet<>(currentPath);
if (falseCache.containsKey(edgeSet)) {
PredicatePrecision previousPreds = falseCache.get(edgeSet);
Precision currentPrecision = getCurrentPrecision();
PredicatePrecision currentPreds = Precisions.extractPrecisionByType(currentPrecision, PredicatePrecision.class);
if (previousPreds.calculateDifferenceTo(currentPreds) == 0) {
if (potentialLoopTraces.contains(edgeSet)) {
//Second time, we obtain it
numberOfrepeatedPaths.inc();
logger.log(Level.WARNING, "Path is repeated, BAM is looped");
pInput.getUsageInfo().setAsLooped();
result = RefinementResult.createTrue();
potentialLoopTraces.remove(edgeSet);
} else {
result = performPredicateRefinement(pInput);
logger.log(Level.WARNING, "Path is repeated, hope BAM can handle it itself");
//BAM can refine with updated predicate refiner, congratulate him.
numberOfBAMupdates.inc();
potentialLoopTraces.add(edgeSet);
}
} else {
//rerefine it to obtain new states
logger.log(Level.WARNING, "Path is repeated, but predicates are missed");
result = performPredicateRefinement(pInput);
//We expect the same result
//but in case of loop the transformation path -> set is not correct, so, there can be a true result
//assert result.isFalse() : "Current result is " + result;
}
//pInput.failureFlag = true;
} else {
if (falseCacheForCurrentIteration.containsKey(edgeSet)) {
//We refined it for other usage
//just return the result;
result = RefinementResult.createFalse();
} else {
/*if (!totalARGCleaning) {
subtreesRemover.addStateForRemoving((ARGState)target.getKeyState());
for (ARGState state : strategy.lastAffectedStates) {
subtreesRemover.addStateForRemoving(state);
}
}*/
result = performPredicateRefinement(pInput);
}
}
}
return result;
}
private RefinementResult performPredicateRefinement(ExtendedARGPath path) throws CPAException, InterruptedException {
RefinementResult result;
try {
numberOfrefinedPaths.inc();
CounterexampleInfo cex = refiner.performRefinementForPath(ARGReached, path);
Set<CFAEdge> edgeSet = new HashSet<>(path.getInnerEdges());
if (!cex.isSpurious()) {
trueCache.add(edgeSet);
result = RefinementResult.createTrue();
} else {
result = RefinementResult.createFalse();
result.addInfo(PredicateRefinerAdapter.class, getLastAffectedStates());
result.addPrecision(getLastPrecision());
falseCacheForCurrentIteration.put(edgeSet, getLastPrecision());
}
} catch (IllegalStateException e) {
//msat_solver return -1 <=> unknown
//consider its as true;
logger.log(Level.WARNING, "Solver exception: " + e.getMessage());
solverFailures.inc();
result = RefinementResult.createUnknown();
}
return result;
}
@Override
protected void handleUpdateSignal(Class<? extends RefinementInterface> pCallerClass, Object pData) {
if (pCallerClass.equals(IdentifierIterator.class)) {
if (pData instanceof ReachedSet) {
//Updating new reached set
updateReachedSet((ReachedSet)pData);
}
}
}
@Override
protected void handleFinishSignal(Class<? extends RefinementInterface> pCallerClass) {
if (pCallerClass.equals(IdentifierIterator.class)) {
//false cache may contain other precision
//It happens if we clean it for other Id and rerefine it now
//Just replace old precision
falseCacheForCurrentIteration.forEach(falseCache::put);
falseCacheForCurrentIteration.clear();
ARGReached = null;
strategy.lastAffectedStates.clear();
strategy.lastAddedPrecision = null;
}
}
@Override
protected void printAdditionalStatistics(StatisticsWriter pOut) {
pOut.beginLevel()
.put(numberOfrefinedPaths)
.put(numberOfrepeatedPaths)
.put(solverFailures)
.put(numberOfBAMupdates)
.put("Size of false cache", falseCache.size());
}
@Override
public void collectStatistics(Collection<Statistics> pStats) {
if (refiner instanceof StatisticsProvider) {
((StatisticsProvider)refiner).collectStatistics(pStats);
}
super.collectStatistics(pStats);
}
private List<ARGState> getLastAffectedStates() {
return strategy.lastAffectedStates;
}
private PredicatePrecision getLastPrecision() {
return strategy.lastAddedPrecision;
}
private Precision getCurrentPrecision() {
return ARGReached.asReachedSet().getPrecision(ARGReached.asReachedSet().getFirstState());
}
private void updateReachedSet(ReachedSet pReached) {
ARGReached = new ARGReachedSet(pReached);
}
protected static class UsageStatisticsRefinementStrategy extends BAMPredicateAbstractionRefinementStrategy {
private List<ARGState> lastAffectedStates = new ArrayList<>();
private PredicatePrecision lastAddedPrecision;
public UsageStatisticsRefinementStrategy(final Configuration config, final LogManager logger,
final Solver pSolver,
final PredicateAbstractionManager pPredAbsMgr) throws InvalidConfigurationException {
super(config, logger, pSolver, pPredAbsMgr);
}
@Override
protected void finishRefinementOfPath(
ARGState pUnreachableState,
List<ARGState> pAffectedStates,
ARGReachedSet pReached,
List<ARGState> abstractionStatesTrace,
boolean pRepeatedCounterexample)
throws CPAException, InterruptedException {
super.finishRefinementOfPath(pUnreachableState, pAffectedStates, pReached, abstractionStatesTrace, pRepeatedCounterexample);
lastAffectedStates.clear();
lastAffectedStates.addAll(pAffectedStates);
}
@Override
protected PredicatePrecision addPredicatesToPrecision(PredicatePrecision basePrecision) {
PredicatePrecision newPrecision = super.addPredicatesToPrecision(basePrecision);
lastAddedPrecision = (PredicatePrecision) newPrecision.subtract(basePrecision);
return newPrecision;
}
@Override
protected void updateARG(PredicatePrecision pNewPrecision, ARGState pRefinementRoot, ARGReachedSet pReached) throws InterruptedException {
//Do not update ARG for race analysis
}
}
}
|
#!/usr/bin/env bash
set -e
set -o pipefail
projectPath=$(cd "$(dirname "${0}")" && cd ../ && pwd)
docker run --rm \
--volume "$projectPath":/code \
--volume "$(basename "$projectPath")-target":/code/target \
--volume cargo-registry:/usr/local/cargo/registry \
cosmwasm/workspace-optimizer:0.12.3
|
#!/bin/bash
rm -rf UltraGpuImage/UltraGpuImage.xcodeproj \
UltraGpuImage-iOS/UltraGpuImage-iOS.xcodeproj \
iOSExample/iOSExample.xcodeproj \
iOSExample/iOSExample.xcworkspace && \
cd UltraGpuImage && xcodegen && cd .. && \
cd UltraGpuImage-iOS && xcodegen && cd .. && \
cd iOSExample && xcodegen && pod install && cd .. && \
rm -rf iOSExample/iOSExample.xcworkspace
|
"""
Sort the following set of strings in descending order
"""
strings = ["cat", "dog", "elephant", "bat"]
strings.sort(key=len, reverse=True)
print(strings) # prints ["elephant", "bat", "cat", "dog"] |
<reponame>yakovliam/knowbeforeyougo<filename>client/src/main/java/com/yakovliam/knowbeforeyougo/client/io/scanner/WifiScanner.java
package com.yakovliam.knowbeforeyougo.client.io.scanner;
import com.yakovliam.knowbeforeyougo.client.SpringApplicationContext;
import com.yakovliam.knowbeforeyougo.client.config.ClientYAMLConfig;
import com.yakovliam.knowbeforeyougo.client.io.CommandExecutorService;
import com.yakovliam.knowbeforeyougo.client.io.ExecutorFunction;
import com.yakovliam.knowbeforeyougo.client.model.InterfaceMode;
import com.yakovliam.knowbeforeyougo.client.model.TerminalCommand;
import com.yakovliam.knowbeforeyougo.client.model.WirelessInterface;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class WifiScanner {
private static final String SCAN_COMMAND = "sudo timeout 35s airodump-ng -w dump --output-format csv %s";
private static final String SCAN_DUMP_FILE_NAME = "dump-01.csv";
private final WirelessInterface wirelessInterface;
/**
* Wifi Scanner
*
* @param wirelessInterface interface
*/
public WifiScanner(WirelessInterface wirelessInterface) {
this.wirelessInterface = wirelessInterface;
}
/**
* Scans the Wi-Fi network for nearby devices
* Returns a string containing a CSV file with Wi-Fi scan results
*
* @return a string containing a CSV file with Wi-Fi scan results
*/
public String scan() throws RuntimeException, IOException {
if (wirelessInterface.getMode().equals(InterfaceMode.MANAGED)) {
throw new RuntimeException("Wireless Interface mode must be Monitor to complete the action of scanning.");
}
// execute command
String command = String.format(SCAN_COMMAND, wirelessInterface.getHandle());
String userPassword = SpringApplicationContext.getApplicationContext().getBean(ClientYAMLConfig.class)
.getClientProperties()
.getUserPassword();
CommandExecutorService.getInstance().executeCommand(new TerminalCommand().
setCommand(command)
.setSudo(true)
.setUserPassword(<PASSWORD>Password), new ExecutorFunction().withWhenFailed((o) -> {
System.out.println("---- FAILED -----\n" + o);
}
).withWhenSucceeded((o) -> {
System.out.println("---- SUCCEEDED -----\n" + o);
}));
Path csvPath = new File(System.getProperty("user.dir"), SCAN_DUMP_FILE_NAME).toPath();
// get file contents
String contents = Files.readString(csvPath);
// delete file
Files.delete(csvPath);
return contents;
}
}
|
<reponame>ParinModi2/Free-Door
var userDao = require("../DatabaseObjs/UserObj");
var userobj = new userDao();
var ejs = require("ejs");
function User() {
}
User.prototype.validateUser = function(callback,request)
{
console.log("user function ");
userobj.validateUser(function(err,res) {
callback(err,res);
},request.emailId,request.password);
};
User.prototype.viewCustomers = function(callback,request)
{
console.log("view customers function ");
userobj.viewCustomers(function(err,res) {
callback(err,res);
});
};
User.prototype.createUser = function(callback,request)
{
console.log("signUp function ");
userobj.createUser(function(err,res) {
callback(err,res);
},request.emailId,request.firstName,request.lastName,request.mobile);
};
User.prototype.updateUser = function(callback,request)
{
console.log("Update User function ");
userobj.updateUser(function(err,res) {
callback(err,res);
},request.emailId,request.fname,request.lname,request.password,request.mobileNum);
};
User.prototype.remove = function(callback,emailId){
userobj.removeUser(function(err,res){
callback(err,res);
},emailId);
};
User.prototype.getUserById = function(callback,userId){
userobj.getUserById(function(err,res){
callback(err,res);
},userId);
};
module.exports = User; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.