text
stringlengths 3
1.05M
|
|---|
const Discord = require('discord.js');
module.exports = {
'name': 'list',
'aliases': [
'list-connections',
'list-desktops',
'lis'
],
'desc': 'List available VNC Connections.',
'run': function(msg) {
const conns = Object.entries(this.vncServersConfig.servers);
const embed = new Discord.MessageEmbed()
.setColor('#0099ff')
.setTitle('Available Remote Desktops')
.setDescription('Select one with `' + this.dscServersConfig.get(msg.guild.id).prefix + 'select [id]`.')
.addFields(
...conns.map(conn => {
return {
'name': '**' + conn[1].name + '** [' + conn[0] + ']',
'value': conn[1].desc || '*no description provided*'
}
})
);
msg.channel.send(embed);
}
};
|
from leaguepedia_parser.leaguepedia_parser import LeaguepediaParser
|
/*
======================================================================
PROGRAM FW File Viewer - A retro TUI file viewing tool
@author : Velorek
@version : 0.1
Last modified : 08/09/2021 - Screen resize limits
======================================================================*/
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "rterm.h"
#include "scbuf.h"
#include "tm.h"
#include "keyb.h"
#include "fileb.h"
#include "opfile.h"
#include "listc.h"
#include "about.h"
#define MAX_TEXT1 150
#define MAX_TEXT2 255
#define LIMITHEIGHT 15
#define LIMITWIDTH 50
#define TITLE_LENGTH 26
#define HORIZONTAL_SHIFT 500 //maximum horizontal scroll
//FUNCTION PROTOTYPES
void main_screen();
void cleanArea(int raw);
void tick2();
void tick1(int *num, int *i);
int refresh_screen();
int special_keys(char ch);
int handleopenFile(FILE ** filePtr, char *fwfileName);
void credits();
void filetoDisplay(FILE *filePtr, int scupdate);
void scroll(FILE *filePtr);
char* check_arguments(int argc, char *argv[]);
long checkScrollValues();
void drop_down(char *kglobal);
char horizontal_menu();
int fileInfoDialog();
void about_info();
char *setfile();
int help_info();
void update_indicators();
/* -----------------------------------*/
//GLOBAL VARIABLES
int scW, scH, old_scW, old_scH; //screen dimensions
char currentPath[MAX_TEXT1];
char fwfileName[MAX_TEXT2];
char msg[TITLE_LENGTH] = "=-[fw]:FILE VIEWER v0.1-=\0";
char kglobal = 0; // Global variable for menu animation
LISTCHOICE *mylist, data; //menus handler
SCROLLDATA openFileData; //openFile dialog
LISTCHOICE *mylist, data; //menus handler
FILE *filePtr=NULL;
//BOOL-like variables
int update = 0; //Bool global variable to control when screen is written to buffer
int displayLogo = 0; //Display logo control
int time_since_keypressed = 0;
int status = 0;
//FILE SCROLL POINTERS
long linesinFile =0;
int hdisplayLength=0; //horizontal scroll
int currentColumn = 0; //horizontal scroll
int displayLength = 0; //vertical scroll
long scrollLimit = 0; //vertical scroll
long currentLine = 0; //verticall scroll
int scrollActive = 0; //vertical scroll
/* -----------------------------------*/
//MAIN
int main(int argc, char *argv[]){
NTIMER mytimer1,mytimer2,mytimer3;
char ch=0;
int i=0;
int num=0;
int keypressed=0;
/*--------------------------INIT VALUES------------------------------*/
/* INIT TIMER */
//mytimer1.ticks must be set to 0 at start.
mytimer1.ms = 30; // Timer 1 - Title animation
mytimer1.ticks = 0;
mytimer2.ms = 100;
mytimer2.ticks = 0; // Timer 2 - Time animation
mytimer3.ms = 100;
mytimer3.ticks = 0; // Timer 3 - Screen display control
pushTerm(); //record previous terminal settings
create_screen();
get_terminal_dimensions(&scH, &scW);
if (scW >79 && scH > 23) displayLogo = 1;
else displayLogo = 0;
displayLength = scH - 5;
old_scH = scH;
old_scW = scW;
resetch();
main_screen();
hidecursor();
/*-------------------------------------------------------------------*/
/*------------------------CHECK ARGUMENTS----------------------------*/
check_arguments(argc, argv);
/*------------------------MAIN PROGRAM LOOP---------------------------*/
do{
//TIMER 1
if (timerC(&mytimer1) == 1) {
//Title animation
tick1(&num, &i);
}
//TIMER 2
if (timerC(&mytimer2) == 1) {
//Time animation
tick2();
}
//TIMER 3
if (timerC(&mytimer3) == 1) {
//Update screen control and screen refresh
//if(time_since_keypressed > 5) time_since_keypressed = 0;
if (keypressed == 0) time_since_keypressed++;
else
time_since_keypressed = 0;
//check screen dimensions & update if changed
refresh_screen(0);
}
//CHECK KEYS
keypressed = kbhit();
if (keypressed == 1){
keypressed = 0;
time_since_keypressed = 0;
ch = readch();
//ESC-KEY related keys
if (special_keys(ch) == -1) status = -1;
//FAIL-SAFE ARROW KEYS
if (filePtr != NULL){
if (ch =='a') {
//Left-arrow key
if(currentColumn > 0) {currentColumn--; cleanArea(1); scroll(filePtr);}
}
if (ch =='d') {
//Right-arrow key
if(currentColumn < HORIZONTAL_SHIFT) {currentColumn++; cleanArea(1); scroll(filePtr);}
}
if (ch =='w') {
//Up-arrow key
if(currentLine >0) {currentLine--; if (currentColumn > 1) cleanArea(1);scroll(filePtr);}
}
if (ch == 's') {
//Down-arrow key
if (scrollActive == 1){
if (currentLine<scrollLimit) currentLine++;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
}
}
}
if (ch == K_CTRL_C) status = -1;
if (ch == K_CTRL_L) {
filetoDisplay(filePtr, 1);
if(horizontal_menu() == K_ESCAPE) {
//Exit horizontal menu with ESC 3x
kglobal = K_ESCAPE;
main_screen();
}
drop_down(&kglobal);
}
} else{
if (filePtr != NULL && update == 1 && time_since_keypressed>1) {
//Screen buffer is updated here! Screenshot of what is shown on screen
filetoDisplay(filePtr, 1);
update = 0;
time_since_keypressed = 0;
}
ch = 0;
keypressed = 0;
}
} while (status != -1);
/*--------------------------------------------------------------------*/
credits();
return 0;
}
/* --------------------------------------*/
//DISPLAY
/* --------------------------------------*/
void main_screen(){
int i=0;
screen_color(BH_BLUE);
for (i=1;i<scW; i++){
write_ch(i,1,' ',B_BLUE,F_BLUE);
write_ch(i,2,' ',B_WHITE,F_WHITE);
write_ch(i,3,' ',B_BLACK,F_BLACK);
write_ch(i,scH-1,' ',B_BLACK,F_BLACK);
write_ch(i,scH,' ',B_WHITE,F_WHITE);
}
write_ch(scW,1,' ',B_BLUE,F_BLUE);
write_ch(scW,2,' ',B_WHITE,F_WHITE);
write_ch(scW,3,' ',B_BLACK,F_BLACK);
write_ch(scW,scH-1,' ',B_BLACK,F_BLACK);
write_str(1,2,"File Help",B_WHITE,F_BLACK);
write_ch(12,2,NVER_LINE, B_WHITE,F_BLACK);
write_ch(1,2,'F',B_WHITE,F_BLUE);
write_ch(7,2,'H',B_WHITE,F_BLUE);
write_str(1,scH,"F2: MENUS ALT-O: OPEN ALT-X/CTRL-C: EXIT" , B_WHITE,F_BLACK);
write_ch(11,scH,NVER_LINE , B_WHITE,F_BLACK);
write_ch(25,scH,NVER_LINE , B_WHITE,F_BLACK);
write_str((scW/2) - 10, 1,msg,B_BLUE,F_WHITE);
update_indicators();
if (displayLogo == 1){
for (i=0; i<ABOUT_LINES; i++)
write_str((scW/2) - (80/2), ((scH/2) - (ABOUT_LINES/2)) + i, about_msg[i], BH_BLUE, F_WHITE);
}
update_screen();
}
void tick1(int *num, int *i){
//Title animation
if (*num==0) {
outputcolor(B_BLUE,F_YELLOW);
gotoxy(((scW/2)-TITLE_LENGTH/2)+*i+3,1);
printf("%c\n",msg[*i]);
*num = 1;
*i = *i + 1;
} else {
outputcolor(B_BLUE,F_WHITE);
gotoxy(((scW/2)-TITLE_LENGTH/2)+*i+2,1);
printf("%c\n",msg[*i-1]);
*num = 0;
}
if (*i>TITLE_LENGTH) {
*i = 0;
*num=0;
outputcolor(B_BLUE,F_BLUE);
gotoxy(((scW/2)-10),1);
printf("%s\n",msg);
}
}
void tick2(){
//Time
time_t mytime = time(NULL);
char *time_str = ctime(&mytime);
time_str[strlen(time_str) - 1] = '\0';
//display system time
outputcolor(B_WHITE,F_BLACK);
gotoxy(scW - strlen(time_str), 2);
printf("%s\n",time_str);
}
int refresh_screen() {
/* Query terminal dimensions again and check if resize
has been produced */
get_terminal_dimensions(&scH, &scW);
if (scW < LIMITWIDTH || scH < LIMITHEIGHT) { return 0;} //resize limit
if (scW >79 && scH > 23) displayLogo = 1;
else displayLogo = 0;
displayLength = scH - 5;
hdisplayLength = scW;
if (filePtr != NULL && scrollActive == 1) scrollLimit = checkScrollValues(); //Update scroll values
if(scH != old_scH || scW != old_scW){
free_buffer(); //delete structure from memory for resize
create_screen(); //create new structure
main_screen(); //Refresh screen in case of resize
if (linesinFile > displayLength) scrollActive = 1;
else scrollActive = 0;
update=1;
update_indicators();
old_scH = scH;
old_scW = scW;
return 1;
}
return 0;
}
void update_indicators(){
int i;
for (i=1;i<scW; i++){
write_ch(i,2,' ',B_WHITE,F_WHITE);
write_ch(i,3,' ',B_BLACK,F_BLACK);
}
write_ch(scW,2,' ',B_WHITE,F_WHITE);
write_ch(scW,3,' ',B_BLACK,F_BLACK);
write_ch(12,2,NVER_LINE, B_WHITE,F_BLACK);
if (strcmp(currentPath,"\0") == 0) {
write_str(1,3,"No file open!",B_BLACK,F_WHITE);
} else
{
write_str(14,2,fwfileName,B_WHITE,F_BLACK);
write_str(1,3,currentPath,B_BLACK,F_WHITE);
}
write_str(1, 2, "File Help", B_WHITE, F_BLACK);
write_str(1, 2, "F", B_WHITE, F_BLUE);
write_str(7, 2, "H", B_WHITE, F_BLUE);
update_screen();
}
/*-----------------------------------------*/
/* Manage keys that send a ESC sequence */
/*-----------------------------------------*/
int special_keys(char ch) {
/* MANAGE SPECIAL KEYS */
/*
New implementation: Trail of chars found in keyb.c
If K_ESCAPE is captured read a trail up to 5 characters from the console.
This is to control the fact that some keys may change
according to the terminal and expand the editor's possibilities.
Eg: F2 can be either 27 79 81 or 27 91 91 82.
- Note : if (currentColumn > 1) cleanArea(1);
When horizontal scroll is active all the screen is cleaned when moving.
*/
char chartrail[5];
if(ch == K_ESCAPE) {
read_keytrail(chartrail); //Read trail after ESC key
//Check key trails for special keys.
//FUNCTION KEYS : F1 - F4
if(strcmp(chartrail, K_F2_TRAIL) == 0 ||
strcmp(chartrail, K_F2_TRAIL2) == 0) {
//update screen
filetoDisplay(filePtr, 0);
if(horizontal_menu() == K_ESCAPE) {
//Exit horizontal menu with ESC 3x
kglobal = K_ESCAPE;
main_screen();
}
drop_down(&kglobal);
} else if(strcmp(chartrail, K_F1_TRAIL) == 0 ||
strcmp(chartrail, K_F1_TRAIL2) == 0) {
help_info();
// ARROW KEYS
} else if((strcmp(chartrail, K_LEFT_TRAIL) == 0) && filePtr != NULL ) {
//Left-arrow key
if(currentColumn > 0) {currentColumn--; cleanArea(1); scroll(filePtr);}
} else if((strcmp(chartrail, K_RIGHT_TRAIL) == 0) && filePtr != NULL ) {
//Right-arrow key
if(currentColumn < HORIZONTAL_SHIFT) {currentColumn++; cleanArea(1); scroll(filePtr);}
} else if((strcmp(chartrail, K_UP_TRAIL) == 0) && filePtr != NULL ) {
//Up-arrow key
if(currentLine >0) {currentLine--; if (currentColumn > 1) cleanArea(1);scroll(filePtr);}
} else if((strcmp(chartrail, K_DOWN_TRAIL) == 0) && filePtr != NULL ) {
//Down-arrow key
if (scrollActive == 1){
if (currentLine<scrollLimit) currentLine++;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
}
} else if((strcmp(chartrail, K_PAGEDOWN_TRAIL) == 0) && filePtr != NULL ) {
if (currentLine + displayLength < scrollLimit) currentLine = currentLine + displayLength;
else currentLine = scrollLimit;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
} else if((strcmp(chartrail, K_PAGEUP_TRAIL) == 0) && filePtr != NULL ) {
if (currentLine - displayLength > 1) currentLine = currentLine - displayLength;
else currentLine = 0;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
} else if((strcmp(chartrail, K_HOME_TRAIL) == 0 ||
strcmp(chartrail, K_HOME_TRAIL2) == 0) && filePtr != NULL ) {
currentLine = 0;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
} else if((strcmp(chartrail, K_END_TRAIL) == 0 ||
strcmp(chartrail, K_END_TRAIL2) == 0) && filePtr != NULL ) {
currentLine = scrollLimit;
if (currentColumn > 1) cleanArea(1);
scroll(filePtr);
} else if(strcmp(chartrail, K_ALT_F) == 0) {
data.index=FILE_MENU;
drop_down(&kglobal); //animation
} else if(strcmp(chartrail, K_ALT_H) == 0) {
data.index=HELP_MENU;
drop_down(&kglobal); //animation
} else if(strcmp(chartrail, K_ALT_I) == 0) {
filetoDisplay(filePtr, 0);
fileInfoDialog();
} else if(strcmp(chartrail, K_ALT_A) == 0) {
filetoDisplay(filePtr, 0);
about_info();
} else if(strcmp(chartrail, K_ALT_S) == 0) {
filetoDisplay(filePtr, 0);
setfile();
} else if(strcmp(chartrail, K_ALT_X) == 0) {
return -1;
} else if(strcmp(chartrail, K_ALT_O) == 0) {
filetoDisplay(filePtr, 1);
openFileDialog(&openFileData);
if (strcmp(openFileData.path, "\0") != 0 && file_exists(openFileData.path)){
strcpy(currentPath, "\0");
cleanString(currentPath, MAX_TEXT1);
cleanString(fwfileName, MAX_TEXT2);
strcpy(currentPath, openFileData.fullPath);
strcpy(fwfileName, openFileData.path);
handleopenFile(&filePtr, fwfileName);
update_indicators();
}
}
}
return 0;
}
/* --------------------------------------*/
//FILE OPERATIONS
/* --------------------------------------*/
int handleopenFile(FILE ** filePtr, char *fwfileName) {
//long checkF = 0;
//int ok = 0;
currentLine = 0;
currentColumn=0;
scrollLimit = 0;
openFile(filePtr, fwfileName, "r");
//Check for binary characters to determine filetype
//checkF = checkFile(*filePtr);
linesinFile = countLinesFile(*filePtr);
if (linesinFile > displayLength) scrollActive = 1;
else scrollActive = 0;
update=1;
return 0;
}
//this routine copies file content to screen buffer so that windows and dialogs
//can be displayed and the content they cover can be later retrieved
void filetoDisplay(FILE *filePtr, int scupdate){
long lineCounter = 0, i=1, whereinfile=0;
double progress=0;
int k=0;
int wherex = 0;
char ch;
time_t mytime = time(NULL);
char *time_str = ctime(&mytime);
//Update screen buffer
if (filePtr != NULL) {
cleanArea(0);
rewind(filePtr); //Make sure we are at the beginning
whereinfile=gotoLine(filePtr,currentLine);
if (whereinfile>1) fseek(filePtr, whereinfile, 0);
while (!feof(filePtr)) {
ch = getc(filePtr);
wherex = labs(i-currentColumn);
if (ch != END_LINE_CHAR && ch != '\0') {
if (ch==9){ //for convenience TAB char is shown in green
//with horizontal scroll
if (i> currentColumn) write_ch(wherex,lineCounter+4,'>',BH_GREEN,F_WHITE);
i++;
} else if (ch==13){
//windows 0x0d is transform to 0x20
ch=32;
}
else{
if (i> currentColumn) write_ch(wherex,lineCounter+4,ch,BH_BLUE,FH_WHITE);
i++;
}
}
if(ch == END_LINE_CHAR) {
//next line
for (k=i; k<scW; k++){
write_ch(k,lineCounter+4,' ',BH_BLUE,F_BLUE);
}
lineCounter++;
i=1;
}
//break when it reaches end of vertical displaying area
if (lineCounter > scH-6) break;
}
//to delete the last 0x0A character on screen
write_ch(i-1,lineCounter+4,' ',BH_BLUE,F_BLUE);
//display metrics
write_str(1,scH-1,"- Lines: | - Progress: % | - H: /500", B_BLACK, F_WHITE);
progress = ((double) currentLine / (double) scrollLimit) * 100;
write_num(10,scH-1,linesinFile,10, B_BLACK, F_YELLOW);
if (scrollActive ==1) write_num(32,scH-1,(int)progress,3, B_BLACK, F_YELLOW);
else write_num(32,scH-1,100,3, B_BLACK, F_YELLOW);
write_num(45,scH-1,currentColumn,3, B_BLACK, F_YELLOW);
//display system time
time_str[strlen(time_str) - 1] = '\0';
write_str(scW - strlen(time_str),2,time_str,B_WHITE,F_BLACK);
//clean viewing area
//write to screen buffer
if (scupdate==1) update_screen();
}
}
//this routine does a scroll through file and output directly to screen
void scroll(FILE *filePtr){
long lineCounter = 0, i=1, whereinfile=0;
double progress;
char ch;
int k;
int wherex = 0;
if (filePtr != NULL) {
//RAW output for smoother scroll
rewind(filePtr); //Make sure we are at the beginning
whereinfile=gotoLine(filePtr,currentLine);
if (whereinfile >1) fseek(filePtr, whereinfile, 0);
while (!feof(filePtr)) {
ch = getc(filePtr);
outputcolor(FH_WHITE,BH_BLUE);
wherex = labs(i-currentColumn);
if (wherex < scW-1) gotoxy(labs(i-currentColumn),lineCounter+4);
if (ch != END_LINE_CHAR && ch != '\0') {
if (ch==9){
//for convenience TAB char is shown in green
outputcolor(F_WHITE,BH_GREEN);
if (i> currentColumn) printf(">");
i++;
} else if (ch==13){
//windows 0x0d0a transformed to 0x20
ch = 32;
} else{
//currenColumn is for horizontal scroll
if (i> currentColumn) {
printf("%c",ch);}
i++;
}
}
if(ch == END_LINE_CHAR) {
//next line
printf("%c",32);
for (k=i; k<=scW; k++){
outputcolor(F_BLUE,BH_BLUE);
gotoxy(k,lineCounter+4);
printf("%c",32);
}
lineCounter++;
i=1;
}
//break when it reaches the end of vertical displaying area
if (lineCounter > scH-6) break;
}
//delete last 0x0a
gotoxy(i-1,lineCounter+4);
outputcolor(F_BLUE,BH_BLUE);
printf(" ");
//metrics
gotoxy(1,scH-1);
outputcolor(F_WHITE,B_BLACK);
printf("- Lines: | - Progress: %c | - H: /500",37);
progress = ((double) currentLine / (double) scrollLimit) * 100;
gotoxy(10,scH-1);
outputcolor(F_YELLOW,B_BLACK);
printf("%ld", linesinFile);
gotoxy(32,scH-1);
outputcolor(F_YELLOW,B_BLACK);
if (scrollActive == 1) printf("%d", (int)progress);
else printf("100");
gotoxy(45,scH-1);
outputcolor(F_YELLOW,B_BLACK);
printf("%d", currentColumn);
}
}
//clean viewing area
void cleanArea(int raw){
int i,j;
if (raw == 1) {
for(j=4; j<scH-1;j++)
for (i=1; i<=scW; i++){
outputcolor(F_BLUE,BH_BLUE);
gotoxy(i,j);
printf("%c",32);
}
}
else{
for(j=4; j<scH-1;j++)
for (i=1; i<=scW; i++){
write_ch(i,j,' ',F_BLUE,BH_BLUE);
}
}
}
char *check_arguments(int argc, char *argv[]){
char *ok=NULL;
//check arguments or display open file dialog
if(argc > 1) {
//Does the file exist? Open or create?
if(file_exists(argv[1]) == 1) {
//open file in arguments
clearString(currentPath, MAX_TEXT);
strcpy(fwfileName, argv[1]);
ok=getcwd(currentPath, sizeof(currentPath)); //Get path
strcat(currentPath, "/");
strcat(currentPath, argv[1]);
handleopenFile(&filePtr, fwfileName);
} else {
//display open file dialog if file does not exist
strcpy(currentPath, "\0");
openFileDialog(&openFileData);
if (strcmp(openFileData.path, "\0") != 0 && file_exists(openFileData.path)){
strcpy(currentPath, openFileData.fullPath);
strcpy(fwfileName, openFileData.path);
handleopenFile(&filePtr, fwfileName);
} else
{
//no file selected or file does not exist
strcpy(currentPath, "No file open!");
strcpy(fwfileName, "No file open!");
}
}
} else{
//display open file dialog if no arguments are given
strcpy(currentPath, "\0");
openFileDialog(&openFileData);
if (strcmp(openFileData.path, "\0") != 0 && file_exists(openFileData.path)){
strcpy(currentPath, openFileData.fullPath);
strcpy(fwfileName, openFileData.path);
handleopenFile(&filePtr, fwfileName);
} else
{
//no file selected or file does not exist
strcpy(currentPath, "No file open!");
strcpy(fwfileName, "No file open!");
}
}
if (strcmp(currentPath,"\0") == 0) {
write_str(1,3,"No file open!",B_BLACK,F_WHITE);
} else
{
write_str(14,2,fwfileName,B_WHITE,F_BLACK);
write_str(1,3,currentPath,B_BLACK,F_WHITE);
}
update_screen();
return ok;
}
long checkScrollValues(){
return (linesinFile - displayLength);
}
/* --------------------------------------*/
//DROP-DOWN MENUS
/* --------------------------------------*/
/*--------------------------*/
/* Display horizontal menu */
/*--------------------------*/
char horizontal_menu() {
char temp_char;
kglobal=-1;
loadmenus(mylist, HOR_MENU);
temp_char = start_hmenu(&data);
free_list(mylist);
write_str(1, 2, "File Help", B_WHITE, F_BLACK);
write_str(1, 2, "F", B_WHITE, F_BLUE);
write_str(7, 2, "H", B_WHITE, F_BLUE);
update_screen();
return temp_char;
}
/*-------------------------*/
/* Display File menu */
/*-------------------------*/
void filemenu() {
int i=0;
data.index = OPTION_NIL;
loadmenus(mylist, FILE_MENU);
write_str(1, 2, "File", MENU_SELECTOR, MENU_FOREGROUND1);
draw_window(1, 3, 13, 9, MENU_PANEL, MENU_FOREGROUND0,0, 1,0);
for (i=2; i<13; i++)
write_ch(i,7,NHOR_LINE,B_WHITE,F_BLACK);
kglobal = start_vmenu(&data);
close_window();
update_indicators();
update_screen();
free_list(mylist);
if(data.index == OPTION_1) {
//File info
fileInfoDialog();
}
if(data.index == OPTION_3) {
//External Module - Open file dialog.
openFileDialog(&openFileData);
if (strcmp(openFileData.path, "\0") != 0 && file_exists(openFileData.path)){
strcpy(currentPath, "\0");
cleanString(currentPath, MAX_TEXT1);
cleanString(fwfileName, MAX_TEXT2);
strcpy(currentPath, openFileData.fullPath);
strcpy(fwfileName, openFileData.path);
handleopenFile(&filePtr, fwfileName);
update_indicators();
}
}
if(data.index == OPTION_2) {
setfile();
}
if(data.index == OPTION_4) {
status = -1;
}
data.index = OPTION_NIL;
}
char *setfile(){
char *ok=NULL;
int count=0;
char tempFile[MAX_TEXT];
count = inputWindow("New File:", tempFile, "Set file name");
if(count > 0) {
cleanString(currentPath, MAX_TEXT1);
cleanString(fwfileName, MAX_TEXT2);
strcpy(fwfileName, tempFile);
if (file_exists(fwfileName)){
handleopenFile(&filePtr, fwfileName);
ok=getcwd(currentPath, sizeof(currentPath)); //Get path
strcat(currentPath, "/");
strcat(currentPath, fwfileName);
} else
{
infoWindow(mylist, "File does not exist!", "File Information");
cleanString(currentPath, MAX_TEXT1);
cleanString(fwfileName, MAX_TEXT2);
currentPath[0] ='\0';
fwfileName[0] ='\0';
filePtr = NULL;
scrollActive = 0;
cleanArea(0);
displayLogo = 1;
main_screen();
}
update_indicators();
}
return ok;
}
/*--------------------------*/
/* Display Help menu */
/*--------------------------*/
void helpmenu() {
data.index = OPTION_NIL;
loadmenus(mylist, HELP_MENU);
write_str(7, 2, "Help", MENU_SELECTOR, MENU_FOREGROUND1);
draw_window(7, 3, 16, 6, MENU_PANEL, MENU_FOREGROUND0, 0,1,0);
kglobal = start_vmenu(&data);
close_window();
write_str(1, 2, "File Help", B_WHITE, F_BLACK);
write_str(1, 2, "F", B_WHITE, F_BLUE);
write_str(7, 2, "H", B_WHITE, F_BLUE);
update_screen();
free_list(mylist);
if(data.index == OPTION_1) {
//Help info
help_info();
}
if(data.index == OPTION_2) {
//About info
about_info();
}
data.index = -1;
}
/*----------------------*/
/* Drop_Down Menu Loop */
/*----------------------*/
void drop_down(char *kglobal) {
/*
Drop_down loop animation.
K_LEFTMENU/K_RIGHTMENU -1 is used when right/left arrow keys are used
so as to break vertical menu and start the adjacent menu
kglobal is changed by the menu functions.
*/
do {
if(*kglobal == K_ESCAPE) {
//Exit drop-down menu with ESC
*kglobal = 0;
break;
}
if(data.index == FILE_MENU) {
filemenu();
if(*kglobal == K_LEFTMENU) {
data.index = HELP_MENU;
}
if(*kglobal == K_RIGHTMENU) {
data.index = HELP_MENU;
}
}
if(data.index == HELP_MENU) {
helpmenu();
if(*kglobal == K_LEFTMENU) {
data.index = FILE_MENU;
}
if(*kglobal == K_RIGHTMENU) {
data.index = FILE_MENU;
}
}
} while(*kglobal != K_ENTER);
}
/*------------------*/
/* File Info Dialog */
/*------------------*/
int fileInfoDialog() {
long size = 0, lines = 0;
int i;
char sizeStr[20];
char linesStr[20];
char tempMsg[150];
char pathtxt[60];
if(filePtr != NULL) {
size = getfileSize(filePtr);
lines = countLinesFile(filePtr);
if(size <= 0)
size = 0;
if(lines <= 0)
lines = 0;
sprintf(sizeStr, "%ld", size);
sprintf(linesStr, "%ld", lines);
strcpy(tempMsg, "[+] File Data:\n- ");
strcat(tempMsg, sizeStr);
strcat(tempMsg, " bytes.\n- ");
strcat(tempMsg, "");
strcat(tempMsg, linesStr);
strcat(tempMsg, " lines.\n[");
for (i=0;i<60;i++){
if (i!=30) pathtxt[i] = currentPath[i];
else pathtxt[30] = '\n';
}
pathtxt[59] = CHAR_NIL;
strcat(tempMsg, pathtxt);
strcat(tempMsg, "]");
alertWindow(mylist, tempMsg, "File Information");
} else {
infoWindow(mylist, "No file open!", "File Information");
}
return 0;
}
void about_info(){
char msg[200];
msg[0] = '\0';
strcat(msg, ALINE1);
strcat(msg, ALINE2);
strcat(msg, ALINE3);
strcat(msg, ALINE4);
alertWindow(mylist, msg,"ABOUT");
}
int help_info() {
int ok = 0;
char msg[500];
msg[0] = '\0';
strcat(msg, HELP1); //located in user_inter.h
strcat(msg, HELP2); //located in user_inter.h
strcat(msg, HELP3); //located in user_inter.h
strcat(msg, HELP4); //located in user_inter.h
strcat(msg, HELP5); //located in user_inter.h
strcat(msg, HELP6); //located in user_inter.h
strcat(msg, HELP7); //located in user_inter.h
strcat(msg, HELP8); //located in user_inter.h
strcat(msg, HELP9); //located in user_inter.h
strcat(msg, HELP10); //located in user_inter.h
strcat(msg, HELP11); //located in user_inter.h
strcat(msg, "\0");
helpWindow(mylist, msg, "HELP");
refresh_screen();
return ok;
}
/* --------------------------------------*/
//CREDITS
/* --------------------------------------*/
void credits(){
NTIMER mytimer1;
int i=0, j=0;
char cmsg[31] = "\nFile vieWer. Coded by v3l0r3k\n";
if(filePtr != NULL) {
closeFile(filePtr);
}
mytimer1.ms = 10; // Timer 1 - Credits animation
mytimer1.ticks = 0;
cleanArea(1);
if (displayLogo ==1){
for (i=0; i<ABOUT_LINES; i++){
outputcolor(F_WHITE, BH_BLUE);
gotoxy((scW/2) - (80/2), ((scH/2) - (ABOUT_LINES/2)) + i);
printf("%s",about_msg[i]);
}
}
if (openFileData.itemIndex != 0) {
free(openFileData.item);
free(openFileData.path);
}
free_buffer();
resetTerm();
outputcolor(B_BLUE,F_WHITE);
gotoxy(((scW/2)-10),1);
printf("%s\n",msg);
outputcolor(B_BLACK,F_BLACK);
gotoxy(1,scH-2);
for (i=0; i<scW;i++)
printf(" ");
gotoxy(1,scH-1);
for (i=0; i<scW;i++)
printf(" ");
gotoxy(1,scH);
for (i=0; i<scW;i++)
printf(" ");
i=0;
j=0;
do{
if (timerC(&mytimer1) == 1) {
if (mytimer1.ticks<23){
outputcolor(F_WHITE,B_BLACK);
gotoxy(i,scH-2);
if (i<30) printf("%c\n",cmsg[i]);
i++;
} else {
gotoxy(j,scH-2);
//outputcolor(B_BLACK,F_WHITE);
outputcolor(FH_BLUE,B_BLACK);
if (j<30) {
if (j== 1 || j== 9) printf("%c\n",cmsg[j]);
if (j> 22) {outputcolor(FH_BLACK,B_BLACK); printf("%c\n",cmsg[j]);}
}
j++;
}
}
} while (mytimer1.ticks<53);
printf("\n\r%c",127);
showcursor();
resetAnsi(0);
showcursor();
}
|
/**
* @ag-grid-community/core - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v25.0.0
* @link http://www.ag-grid.com/
* @license MIT
*/
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var array_1 = require("./array");
var AgPromiseStatus;
(function (AgPromiseStatus) {
AgPromiseStatus[AgPromiseStatus["IN_PROGRESS"] = 0] = "IN_PROGRESS";
AgPromiseStatus[AgPromiseStatus["RESOLVED"] = 1] = "RESOLVED";
})(AgPromiseStatus = exports.AgPromiseStatus || (exports.AgPromiseStatus = {}));
var AgPromise = /** @class */ (function () {
function AgPromise(callback) {
var _this = this;
this.status = AgPromiseStatus.IN_PROGRESS;
this.resolution = null;
this.waiters = [];
callback(function (value) { return _this.onDone(value); }, function (params) { return _this.onReject(params); });
}
AgPromise.all = function (promises) {
return new AgPromise(function (resolve) {
var remainingToResolve = promises.length;
var combinedValues = new Array(remainingToResolve);
array_1.forEach(promises, function (promise, index) {
promise.then(function (value) {
combinedValues[index] = value;
remainingToResolve--;
if (remainingToResolve === 0) {
resolve(combinedValues);
}
});
});
});
};
AgPromise.resolve = function (value) {
if (value === void 0) { value = null; }
return new AgPromise(function (resolve) { return resolve(value); });
};
AgPromise.prototype.then = function (func) {
var _this = this;
return new AgPromise(function (resolve) {
if (_this.status === AgPromiseStatus.RESOLVED) {
resolve(func(_this.resolution));
}
else {
_this.waiters.push(function (value) { return resolve(func(value)); });
}
});
};
AgPromise.prototype.resolveNow = function (ifNotResolvedValue, ifResolved) {
return this.status === AgPromiseStatus.RESOLVED ? ifResolved(this.resolution) : ifNotResolvedValue;
};
AgPromise.prototype.onDone = function (value) {
this.status = AgPromiseStatus.RESOLVED;
this.resolution = value;
array_1.forEach(this.waiters, function (waiter) { return waiter(value); });
};
AgPromise.prototype.onReject = function (params) {
console.warn('TBI');
};
return AgPromise;
}());
exports.AgPromise = AgPromise;
//# sourceMappingURL=promise.js.map
|
#ifndef CalibTracker_SiStripESProducer_DummyCondDBWriter_h
#define CalibTracker_SiStripESProducer_DummyCondDBWriter_h
// user include files
#include "FWCore/Framework/interface/EDAnalyzer.h"
#include "FWCore/Framework/interface/ESWatcher.h"
#include "FWCore/Framework/interface/Run.h"
#include "FWCore/Framework/interface/EventSetup.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "CondCore/DBOutputService/interface/PoolDBOutputService.h"
#include "FWCore/Utilities/interface/Exception.h"
#include <string>
template <typename TObject, typename TObjectO, typename TRecord>
class DummyCondDBWriter : public edm::EDAnalyzer {
public:
explicit DummyCondDBWriter(const edm::ParameterSet& iConfig);
~DummyCondDBWriter() override;
void analyze(const edm::Event& e, const edm::EventSetup& es) override{};
void endRun(const edm::Run& run, const edm::EventSetup& es) override;
private:
edm::ParameterSet iConfig_;
edm::ESWatcher<TRecord> watcher_;
edm::ESGetToken<TObject, TRecord> token_;
};
template <typename TObject, typename TObjectO, typename TRecord>
DummyCondDBWriter<TObject, TObjectO, TRecord>::DummyCondDBWriter(const edm::ParameterSet& iConfig)
: iConfig_(iConfig),
token_(esConsumes<edm::Transition::EndRun>(
edm::ESInputTag{"", iConfig.getUntrackedParameter<std::string>("label", "")})) {
edm::LogInfo("DummyCondDBWriter") << "DummyCondDBWriter constructor for typename " << typeid(TObject).name()
<< " and record " << typeid(TRecord).name() << std::endl;
}
template <typename TObject, typename TObjectO, typename TRecord>
DummyCondDBWriter<TObject, TObjectO, TRecord>::~DummyCondDBWriter() {
edm::LogInfo("DummyCondDBWriter") << "DummyCondDBWriter::~DummyCondDBWriter()" << std::endl;
}
template <typename TObject, typename TObjectO, typename TRecord>
void DummyCondDBWriter<TObject, TObjectO, TRecord>::endRun(const edm::Run& run, const edm::EventSetup& es) {
std::string rcdName = iConfig_.getParameter<std::string>("record");
if (!watcher_.check(es)) {
edm::LogInfo("DummyCondDBWriter") << "not needed to store objects with Record " << rcdName << " at run "
<< run.run() << std::endl;
return;
}
auto obj = std::make_unique<TObjectO>(es.getData(token_));
cond::Time_t Time_;
//And now write data in DB
edm::Service<cond::service::PoolDBOutputService> dbservice;
if (dbservice.isAvailable()) {
std::string openIovAt = iConfig_.getUntrackedParameter<std::string>("OpenIovAt", "beginOfTime");
if (openIovAt == "beginOfTime")
Time_ = dbservice->beginOfTime();
else if (openIovAt == "currentTime")
Time_ = dbservice->currentTime();
else
Time_ = iConfig_.getUntrackedParameter<uint32_t>("OpenIovAtTime", 1);
dbservice->writeOne(*obj, Time_, rcdName);
} else {
edm::LogError("SiStripFedCablingBuilder") << "Service is unavailable" << std::endl;
}
}
#endif
|
# coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-262
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class FirmwareUpgradeStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'account_moid': 'str',
'ancestors': 'list[MoBaseMoRef]',
'create_time': 'datetime',
'mod_time': 'datetime',
'moid': 'str',
'object_type': 'str',
'owners': 'list[str]',
'parent': 'MoBaseMoRef',
'tags': 'list[MoTag]',
'version_context': 'MoVersionContext',
'download_error': 'str',
'download_percentage': 'int',
'download_stage': 'str',
'download_status': 'str',
'ep_power_status': 'str',
'overall_error': 'str',
'overall_percentage': 'int',
'overallstatus': 'str',
'pending_type': 'str',
'upgrade': 'FirmwareUpgradeRef'
}
attribute_map = {
'account_moid': 'AccountMoid',
'ancestors': 'Ancestors',
'create_time': 'CreateTime',
'mod_time': 'ModTime',
'moid': 'Moid',
'object_type': 'ObjectType',
'owners': 'Owners',
'parent': 'Parent',
'tags': 'Tags',
'version_context': 'VersionContext',
'download_error': 'DownloadError',
'download_percentage': 'DownloadPercentage',
'download_stage': 'DownloadStage',
'download_status': 'DownloadStatus',
'ep_power_status': 'EpPowerStatus',
'overall_error': 'OverallError',
'overall_percentage': 'OverallPercentage',
'overallstatus': 'Overallstatus',
'pending_type': 'PendingType',
'upgrade': 'Upgrade'
}
def __init__(self, account_moid=None, ancestors=None, create_time=None, mod_time=None, moid=None, object_type=None, owners=None, parent=None, tags=None, version_context=None, download_error=None, download_percentage=None, download_stage=None, download_status=None, ep_power_status='none', overall_error=None, overall_percentage=None, overallstatus='none', pending_type='none', upgrade=None):
"""
FirmwareUpgradeStatus - a model defined in Swagger
"""
self._account_moid = None
self._ancestors = None
self._create_time = None
self._mod_time = None
self._moid = None
self._object_type = None
self._owners = None
self._parent = None
self._tags = None
self._version_context = None
self._download_error = None
self._download_percentage = None
self._download_stage = None
self._download_status = None
self._ep_power_status = None
self._overall_error = None
self._overall_percentage = None
self._overallstatus = None
self._pending_type = None
self._upgrade = None
if account_moid is not None:
self.account_moid = account_moid
if ancestors is not None:
self.ancestors = ancestors
if create_time is not None:
self.create_time = create_time
if mod_time is not None:
self.mod_time = mod_time
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
if owners is not None:
self.owners = owners
if parent is not None:
self.parent = parent
if tags is not None:
self.tags = tags
if version_context is not None:
self.version_context = version_context
if download_error is not None:
self.download_error = download_error
if download_percentage is not None:
self.download_percentage = download_percentage
if download_stage is not None:
self.download_stage = download_stage
if download_status is not None:
self.download_status = download_status
if ep_power_status is not None:
self.ep_power_status = ep_power_status
if overall_error is not None:
self.overall_error = overall_error
if overall_percentage is not None:
self.overall_percentage = overall_percentage
if overallstatus is not None:
self.overallstatus = overallstatus
if pending_type is not None:
self.pending_type = pending_type
if upgrade is not None:
self.upgrade = upgrade
@property
def account_moid(self):
"""
Gets the account_moid of this FirmwareUpgradeStatus.
The Account ID for this managed object.
:return: The account_moid of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._account_moid
@account_moid.setter
def account_moid(self, account_moid):
"""
Sets the account_moid of this FirmwareUpgradeStatus.
The Account ID for this managed object.
:param account_moid: The account_moid of this FirmwareUpgradeStatus.
:type: str
"""
self._account_moid = account_moid
@property
def ancestors(self):
"""
Gets the ancestors of this FirmwareUpgradeStatus.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:return: The ancestors of this FirmwareUpgradeStatus.
:rtype: list[MoBaseMoRef]
"""
return self._ancestors
@ancestors.setter
def ancestors(self, ancestors):
"""
Sets the ancestors of this FirmwareUpgradeStatus.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:param ancestors: The ancestors of this FirmwareUpgradeStatus.
:type: list[MoBaseMoRef]
"""
self._ancestors = ancestors
@property
def create_time(self):
"""
Gets the create_time of this FirmwareUpgradeStatus.
The time when this managed object was created.
:return: The create_time of this FirmwareUpgradeStatus.
:rtype: datetime
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""
Sets the create_time of this FirmwareUpgradeStatus.
The time when this managed object was created.
:param create_time: The create_time of this FirmwareUpgradeStatus.
:type: datetime
"""
self._create_time = create_time
@property
def mod_time(self):
"""
Gets the mod_time of this FirmwareUpgradeStatus.
The time when this managed object was last modified.
:return: The mod_time of this FirmwareUpgradeStatus.
:rtype: datetime
"""
return self._mod_time
@mod_time.setter
def mod_time(self, mod_time):
"""
Sets the mod_time of this FirmwareUpgradeStatus.
The time when this managed object was last modified.
:param mod_time: The mod_time of this FirmwareUpgradeStatus.
:type: datetime
"""
self._mod_time = mod_time
@property
def moid(self):
"""
Gets the moid of this FirmwareUpgradeStatus.
A unique identifier of this Managed Object instance.
:return: The moid of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this FirmwareUpgradeStatus.
A unique identifier of this Managed Object instance.
:param moid: The moid of this FirmwareUpgradeStatus.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this FirmwareUpgradeStatus.
The fully-qualified type of this managed object, e.g. the class name.
:return: The object_type of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this FirmwareUpgradeStatus.
The fully-qualified type of this managed object, e.g. the class name.
:param object_type: The object_type of this FirmwareUpgradeStatus.
:type: str
"""
self._object_type = object_type
@property
def owners(self):
"""
Gets the owners of this FirmwareUpgradeStatus.
An array of owners which represent effective ownership of this object.
:return: The owners of this FirmwareUpgradeStatus.
:rtype: list[str]
"""
return self._owners
@owners.setter
def owners(self, owners):
"""
Sets the owners of this FirmwareUpgradeStatus.
An array of owners which represent effective ownership of this object.
:param owners: The owners of this FirmwareUpgradeStatus.
:type: list[str]
"""
self._owners = owners
@property
def parent(self):
"""
Gets the parent of this FirmwareUpgradeStatus.
The direct ancestor of this managed object in the containment hierarchy.
:return: The parent of this FirmwareUpgradeStatus.
:rtype: MoBaseMoRef
"""
return self._parent
@parent.setter
def parent(self, parent):
"""
Sets the parent of this FirmwareUpgradeStatus.
The direct ancestor of this managed object in the containment hierarchy.
:param parent: The parent of this FirmwareUpgradeStatus.
:type: MoBaseMoRef
"""
self._parent = parent
@property
def tags(self):
"""
Gets the tags of this FirmwareUpgradeStatus.
An array of tags, which allow to add key, value meta-data to managed objects.
:return: The tags of this FirmwareUpgradeStatus.
:rtype: list[MoTag]
"""
return self._tags
@tags.setter
def tags(self, tags):
"""
Sets the tags of this FirmwareUpgradeStatus.
An array of tags, which allow to add key, value meta-data to managed objects.
:param tags: The tags of this FirmwareUpgradeStatus.
:type: list[MoTag]
"""
self._tags = tags
@property
def version_context(self):
"""
Gets the version_context of this FirmwareUpgradeStatus.
The versioning info for this managed object
:return: The version_context of this FirmwareUpgradeStatus.
:rtype: MoVersionContext
"""
return self._version_context
@version_context.setter
def version_context(self, version_context):
"""
Sets the version_context of this FirmwareUpgradeStatus.
The versioning info for this managed object
:param version_context: The version_context of this FirmwareUpgradeStatus.
:type: MoVersionContext
"""
self._version_context = version_context
@property
def download_error(self):
"""
Gets the download_error of this FirmwareUpgradeStatus.
Provides the download failure message
:return: The download_error of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._download_error
@download_error.setter
def download_error(self, download_error):
"""
Sets the download_error of this FirmwareUpgradeStatus.
Provides the download failure message
:param download_error: The download_error of this FirmwareUpgradeStatus.
:type: str
"""
self._download_error = download_error
@property
def download_percentage(self):
"""
Gets the download_percentage of this FirmwareUpgradeStatus.
Provides the image downloaded percentage from image source
:return: The download_percentage of this FirmwareUpgradeStatus.
:rtype: int
"""
return self._download_percentage
@download_percentage.setter
def download_percentage(self, download_percentage):
"""
Sets the download_percentage of this FirmwareUpgradeStatus.
Provides the image downloaded percentage from image source
:param download_percentage: The download_percentage of this FirmwareUpgradeStatus.
:type: int
"""
self._download_percentage = download_percentage
@property
def download_stage(self):
"""
Gets the download_stage of this FirmwareUpgradeStatus.
Provides the latest download phase like downloading, flashing, downloaded
:return: The download_stage of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._download_stage
@download_stage.setter
def download_stage(self, download_stage):
"""
Sets the download_stage of this FirmwareUpgradeStatus.
Provides the latest download phase like downloading, flashing, downloaded
:param download_stage: The download_stage of this FirmwareUpgradeStatus.
:type: str
"""
self._download_stage = download_stage
@property
def download_status(self):
"""
Gets the download_status of this FirmwareUpgradeStatus.
Provides the download status like downloading, downloaded or failed
:return: The download_status of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._download_status
@download_status.setter
def download_status(self, download_status):
"""
Sets the download_status of this FirmwareUpgradeStatus.
Provides the download status like downloading, downloaded or failed
:param download_status: The download_status of this FirmwareUpgradeStatus.
:type: str
"""
self._download_status = download_status
@property
def ep_power_status(self):
"""
Gets the ep_power_status of this FirmwareUpgradeStatus.
Provides the server power status at the end of upgrade
:return: The ep_power_status of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._ep_power_status
@ep_power_status.setter
def ep_power_status(self, ep_power_status):
"""
Sets the ep_power_status of this FirmwareUpgradeStatus.
Provides the server power status at the end of upgrade
:param ep_power_status: The ep_power_status of this FirmwareUpgradeStatus.
:type: str
"""
allowed_values = ["none", "powered on", "powered off"]
if ep_power_status not in allowed_values:
raise ValueError(
"Invalid value for `ep_power_status` ({0}), must be one of {1}"
.format(ep_power_status, allowed_values)
)
self._ep_power_status = ep_power_status
@property
def overall_error(self):
"""
Gets the overall_error of this FirmwareUpgradeStatus.
Provides the failure message when download or upgrade fails
:return: The overall_error of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._overall_error
@overall_error.setter
def overall_error(self, overall_error):
"""
Sets the overall_error of this FirmwareUpgradeStatus.
Provides the failure message when download or upgrade fails
:param overall_error: The overall_error of this FirmwareUpgradeStatus.
:type: str
"""
self._overall_error = overall_error
@property
def overall_percentage(self):
"""
Gets the overall_percentage of this FirmwareUpgradeStatus.
Provides the overall percentage of the upgrade inclusive of download
:return: The overall_percentage of this FirmwareUpgradeStatus.
:rtype: int
"""
return self._overall_percentage
@overall_percentage.setter
def overall_percentage(self, overall_percentage):
"""
Sets the overall_percentage of this FirmwareUpgradeStatus.
Provides the overall percentage of the upgrade inclusive of download
:param overall_percentage: The overall_percentage of this FirmwareUpgradeStatus.
:type: int
"""
self._overall_percentage = overall_percentage
@property
def overallstatus(self):
"""
Gets the overallstatus of this FirmwareUpgradeStatus.
Provides the overall status, e.g., downloading, upgrading, successful, failure and pending-for-reboot
:return: The overallstatus of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._overallstatus
@overallstatus.setter
def overallstatus(self, overallstatus):
"""
Sets the overallstatus of this FirmwareUpgradeStatus.
Provides the overall status, e.g., downloading, upgrading, successful, failure and pending-for-reboot
:param overallstatus: The overallstatus of this FirmwareUpgradeStatus.
:type: str
"""
allowed_values = ["none", "started", "download initiating", "download initiated", "downloading", "downloaded", "upgrade initiating", "upgrade initiated", "upgrading", "upgraded", "success", "failed", "pending"]
if overallstatus not in allowed_values:
raise ValueError(
"Invalid value for `overallstatus` ({0}), must be one of {1}"
.format(overallstatus, allowed_values)
)
self._overallstatus = overallstatus
@property
def pending_type(self):
"""
Gets the pending_type of this FirmwareUpgradeStatus.
Provides the current pending upgrade status for the on-next boot based upgrades
:return: The pending_type of this FirmwareUpgradeStatus.
:rtype: str
"""
return self._pending_type
@pending_type.setter
def pending_type(self, pending_type):
"""
Sets the pending_type of this FirmwareUpgradeStatus.
Provides the current pending upgrade status for the on-next boot based upgrades
:param pending_type: The pending_type of this FirmwareUpgradeStatus.
:type: str
"""
allowed_values = ["none", "pending for next reboot"]
if pending_type not in allowed_values:
raise ValueError(
"Invalid value for `pending_type` ({0}), must be one of {1}"
.format(pending_type, allowed_values)
)
self._pending_type = pending_type
@property
def upgrade(self):
"""
Gets the upgrade of this FirmwareUpgradeStatus.
:return: The upgrade of this FirmwareUpgradeStatus.
:rtype: FirmwareUpgradeRef
"""
return self._upgrade
@upgrade.setter
def upgrade(self, upgrade):
"""
Sets the upgrade of this FirmwareUpgradeStatus.
:param upgrade: The upgrade of this FirmwareUpgradeStatus.
:type: FirmwareUpgradeRef
"""
self._upgrade = upgrade
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, FirmwareUpgradeStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import React from 'react';
import createSvg from './utils/createSvg';
export default createSvg(<path d="M11 15h2v2h-2zm0-8h2v6h-2zm.99-5C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zM12 20c-4.42 0-8-3.58-8-8s3.58-8 8-8 8 3.58 8 8-3.58 8-8 8z" />, 'ErrorOutline', '0 0 24 24');
|
#!/usr/bin/env python
# noinspection PyUnresolvedReferences
import vtkmodules.vtkInteractionStyle
# noinspection PyUnresolvedReferences
import vtkmodules.vtkRenderingOpenGL2
from vtkmodules.vtkCommonColor import vtkNamedColors
from vtkmodules.vtkIOXML import vtkXMLPolyDataReader
from vtkmodules.vtkRenderingCore import (
vtkActor,
vtkPolyDataMapper,
vtkRenderWindow,
vtkRenderWindowInteractor,
vtkRenderer
)
def get_program_parameters():
import argparse
description = 'Read a polydata file.'
epilogue = ''''''
parser = argparse.ArgumentParser(description=description, epilog=epilogue,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('filename', help='Torso.vtp')
args = parser.parse_args()
return args.filename
def main():
colors = vtkNamedColors()
filename = get_program_parameters()
# Read all the data from the file
reader = vtkXMLPolyDataReader()
reader.SetFileName(filename)
reader.Update()
# Visualize
mapper = vtkPolyDataMapper()
mapper.SetInputConnection(reader.GetOutputPort())
actor = vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(colors.GetColor3d('NavajoWhite'))
renderer = vtkRenderer()
renderWindow = vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderer.AddActor(actor)
renderer.SetBackground(colors.GetColor3d('DarkOliveGreen'))
renderer.GetActiveCamera().Pitch(90)
renderer.GetActiveCamera().SetViewUp(0, 0, 1)
renderer.ResetCamera()
renderWindow.SetSize(600, 600)
renderWindow.Render()
renderWindow.SetWindowName('ReadPolyData')
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
|
"""
Sprites are game objects.
To use a sprite you use :meth:`BaseScene.add <ppb.BaseScene.add>` to add it
to a scene. When contained in an active scene, the engine will call the various
:mod:`event <ppb.events>` handlers on the sprite.
When defining your own custom sprites, we suggest you start with
:class:`~ppb.Sprite`. By subclassing :class:`~ppb.Sprite`, you get a number of
features automatically. You then define your event handlers as methods on your
new class to produce behaviors.
All sprites in ppb are built from composition via mixins or subclassing via
traditional Python inheritance.
If you don't need the built in features of :class:`~ppb.Sprite` see
:class:`BaseSprite`.
"""
from inspect import getfile
from pathlib import Path
from typing import Union
from ppb_vector import Vector, VectorLike
import ppb
__all__ = (
"BaseSprite",
"Sprite",
"RotatableMixin",
"SquareShapeMixin",
"RectangleShapeMixin",
"RectangleSprite",
"RenderableMixin",
)
class BaseSprite:
"""
The base Sprite class. All sprites should inherit from this (directly or
indirectly).
The things that define a BaseSprite:
* A position vector
* A layer
BaseSprite provides an :py:meth:`__init__()` method that sets attributes
based on kwargs to make rapid prototyping easier.
"""
#: (:py:class:`ppb.Vector`): Location of the sprite
position: Vector = Vector(0, 0)
#: The layer a sprite exists on.
layer: int = 0
def __init__(self, **kwargs):
"""
:class:`BaseSprite` does not accept any positional arguments, and uses
keyword arguments to set arbitrary state to the :class:`BaseSprite`
instance. This allows rapid prototyping.
Example: ::
sprite = BaseSprite(speed=6)
print(sprite.speed)
This sample will print the numeral 6.
You may add any arbitrary data values in this fashion. Alternatively,
it is considered best practice to subclass :class:`BaseSprite` and set
the default values of any required attributes as class attributes.
Example: ::
class Rocket(ppb.sprites.BaseSprite):
velocity = Vector(0, 1)
def on_update(self, update_event, signal):
self.position += self.velocity * update_event.time_delta
"""
super().__init__()
self.position = Vector(self.position)
# Initialize things
for k, v in kwargs.items():
# Abbreviations
if k == 'pos':
k = 'position'
# Castings
if k == 'position':
v = Vector(v)
setattr(self, k, v)
class RenderableMixin:
"""
A class implementing the API expected by ppb.systems.renderer.Renderer.
The render expects a width and height (see :class:`RectangleMixin`) and will
skip rendering if a sprite has no shape. You can use
:class:`RectangleMixin`, :class:`SquareMixin`, or set the values yourself.
Additionally, if :attr:`~RenderableMixin.image` is ``None``, the sprite will not
be rendered. If you just want a basic shape to be rendered, see
:mod:`ppb.assets`.
"""
#: (:py:class:`ppb.Image`): The image asset
image = ... # TODO: Type hint appropriately
size = 1
blend_mode: 'ppb.flags.BlendMode' # One of four blending modes
opacity: int # An opacity value from 0-255
color: 'ppb.utils.Color' # A 3-tuple color with values 0-255
def __image__(self):
"""
Returns the sprite's image attribute if provided, or sets a default
one.
"""
if self.image is ...:
klass = type(self)
prefix = Path(klass.__module__.replace('.', '/'))
try:
klassfile = getfile(klass)
except TypeError:
prefix = Path('.')
else:
if Path(klassfile).name != '__init__.py':
prefix = prefix.parent
if prefix == Path('.'):
self.image = ppb.Image(f"{klass.__name__.lower()}.png")
else:
self.image = ppb.Image(f"{prefix!s}/{klass.__name__.lower()}.png")
return self.image
class RotatableMixin:
"""
A rotation mixin. Can be included with sprites.
.. warning:: rotation does not affect underlying shape (the corners are still in the same place), it only rotates
the sprites image and provides a facing.
"""
_rotation = 0
# This is necessary to make facing do the thing while also being adjustable.
#: The baseline vector, representing the "front" of the sprite
basis = Vector(0, -1)
# Considered making basis private, the only reason to do so is to
# discourage people from relying on it as data.
@property
def facing(self):
"""
The direction the "front" is facing.
Can be set to an arbitrary facing by providing a facing vector.
"""
return Vector(*self.basis).rotate(self.rotation).normalize()
@facing.setter
def facing(self, value):
self.rotation = self.basis.angle(value)
@property
def rotation(self):
"""
The amount the sprite is rotated, in degrees
"""
return self._rotation
@rotation.setter
def rotation(self, value):
self._rotation = value % 360
def rotate(self, degrees):
"""
Rotate the sprite by a given angle (in degrees).
"""
self.rotation += degrees
class RectangleShapeMixin:
"""
A Mixin that provides a rectangular area to sprites.
Classes derived from RectangleShapeMixin default to the same size and
shape as all ppb Sprites: A 1 game unit by 1 game unit square. Just set
the width and height in your constructor (Or as
:class:`class attributes <BaseSprite>`) to change this default.
.. note:: The concrete class using :class:`RectangleShapeMixin` must have a
``position`` attribute.
"""
#: The width of the sprite.
width: int = 1
#: The height of the sprite.
height: int = 1
# Following class properties for type hinting only. Your concrete sprite
# should already have one.
position: Vector
@property
def left(self) -> float:
"""
The x-axis position of the left side of the object.
Can be set to a number.
"""
return self.position.x - self.width / 2
@left.setter
def left(self, value: Union[float, int]):
self.position = Vector(value + (self.width / 2), self.position.y)
@property
def right(self) -> float:
"""
The x-axis position of the right side of the object.
Can be set to a number.
"""
return self.position.x + self.width / 2
@right.setter
def right(self, value: Union[float, int]):
self.position = Vector(value - (self.width / 2), self.position.y)
@property
def top(self) -> float:
"""
The y-axis position of the top of the object.
Can be set to a number.
"""
return self.position.y + self.height / 2
@top.setter
def top(self, value: Union[int, float]):
self.position = Vector(self.position.x, value - (self.height / 2))
@property
def bottom(self) -> float:
"""
The y-axis position of the bottom of the object.
Can be set to a number.
"""
return self.position.y - self.height / 2
@bottom.setter
def bottom(self, value: Union[float, int]):
self.position = Vector(self.position.x, value + (self.height / 2))
@property
def top_left(self) -> Vector:
"""
The coordinates of the top left corner of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.left, self.top)
@top_left.setter
def top_left(self, vector: Vector):
vector = Vector(vector)
x = vector.x + (self.width / 2)
y = vector.y - (self.height / 2)
self.position = Vector(x, y)
@property
def top_right(self) -> Vector:
"""
The coordinates of the top right corner of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.right, self.top)
@top_right.setter
def top_right(self, vector: Vector):
vector = Vector(vector)
x = vector.x - (self.width / 2)
y = vector.y - (self.height / 2)
self.position = Vector(x, y)
@property
def bottom_left(self) -> Vector:
"""
The coordinates of the bottom left corner of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.left, self.bottom)
@bottom_left.setter
def bottom_left(self, vector: Vector):
vector = Vector(vector)
x = vector.x + (self.width / 2)
y = vector.y + (self.height / 2)
self.position = Vector(x, y)
@property
def bottom_right(self) -> Vector:
return Vector(self.right, self.bottom)
@bottom_right.setter
def bottom_right(self, vector: Vector):
"""
The coordinates of the bottom right corner of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
vector = Vector(vector)
x = vector.x - (self.width / 2)
y = vector.y + (self.height / 2)
self.position = Vector(x, y)
@property
def bottom_middle(self) -> Vector:
"""
The coordinates of the midpoint of the bottom of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.position.x, self.bottom)
@bottom_middle.setter
def bottom_middle(self, value: VectorLike):
value = Vector(value)
self.position = Vector(value.x, value.y + self.height / 2)
@property
def left_middle(self) -> Vector:
"""
The coordinates of the midpoint of the left side of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.left, self.position.y)
@left_middle.setter
def left_middle(self, value: VectorLike):
value = Vector(value)
self.position = Vector(value.x + self.width / 2, value.y)
@property
def right_middle(self) -> Vector:
"""
The coordinates of the midpoint of the right side of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.right, self.position.y)
@right_middle.setter
def right_middle(self, value: VectorLike):
value = Vector(value)
self.position = Vector(value.x - self.width / 2, value.y)
@property
def top_middle(self) -> Vector:
"""
The coordinates of the midpoint of the top of the object.
Can be set to a :class:`ppb_vector.Vector`.
"""
return Vector(self.position.x, self.top)
@top_middle.setter
def top_middle(self, value: VectorLike):
value = Vector(value)
self.position = Vector(value.x, value.y - self.height / 2)
@property
def center(self) -> Vector:
"""
The coordinates of the center point of the object. Equivalent to the
:attr:`~BaseSprite.position`.
Can be set to a :class:`ppb_vector.Vector`.
"""
return self.position
@center.setter
def center(self, vector: Vector):
self.position = Vector(vector)
class SquareShapeMixin(RectangleShapeMixin):
"""
A Mixin that provides a square area to sprites.
Extends the interface of :class:`RectangleShapeMixin` by using the
:attr:`~SquareShapeMixin.size` attribute to determine
:meth:`~SquareShapeMixin.width` and :meth:`~SquareShapeMixin.height`.
Setting either :meth:`~SquareShapeMixin.width` or
:meth:`~SquareShapeMixin.height` sets the
:attr:`~SquareShapeMixin.size` and maintains the square shape at the new
size.
The default size of :class:`SquareShapeMixin` is 1 game unit.
Please see :class:`RectangleShapeMixin` for additional details.
"""
#: The width and height of the object. Setting size changes the
#: :meth:`height` and :meth:`width` of the sprite.
size = 1
@property
def width(self):
"""
The width of the sprite.
Setting the width of the sprite changes :attr:`size` and :meth:`height`.
"""
return self.size
@width.setter
def width(self, value: Union[float, int]):
self.size = value
@property
def height(self):
"""
The height of the sprite.
Setting the height of the sprite changes the :attr:`size` and
:meth:`width`.
"""
return self.size
@height.setter
def height(self, value: Union[float, int]):
self.size = value
class Sprite(SquareShapeMixin, RenderableMixin, RotatableMixin, BaseSprite):
"""
The default Sprite class.
Sprite defines no additional methods or attributes, but is made up of
:class:`BaseSprite` with the mixins :class:`~ppb.sprites.RotatableMixin`,
:class:`~ppb.sprites.RenderableMixin`, and
:class:`~ppb.sprites.SquareShapeMixin`.
For most use cases, this is probably the class you want to subclass to make
your game objects.
If you need rectangular sprites instead of squares, see
:class:`RectangleSprite`.
"""
class RectangleSprite(RectangleShapeMixin, RenderableMixin, RotatableMixin, BaseSprite):
"""
A rectangle sprite.
Similarly to :class:`~ppb.Sprite`, :class:`RectangleSprite` does not
introduce any new methods or attributes. It's made up of :class:`BaseSprite`
with the mixins :class:`RotatableMixin`, :class:`RenderableMixin`, and
:class:`RectangleShapeMixin`.
"""
|
export default {
key: 'A',
suffix: 'sus2',
positions: [
{
frets: '2452',
fingers: '1341',
barres: 2,
capo: true
},
{
frets: '4457',
fingers: '1124',
barres: 4,
capo: true
},
{
frets: '9977',
fingers: '3411',
barres: 7,
capo: true
},
{
frets: '9bcc',
fingers: '1233',
barres: 12
}
]
};
|
"use strict";
import I from "immutable";
import assert from "assert";
import React from "react/addons";
let { TestUtils } = React.addons;
import FilterInput from "../../../src/js/components/filter/FilterInput";
import MultiSelect from "../../../src/js/components/filter/MultiSelect";
import Preset from "../../../src/js/components/filter/Preset";
import Filter from "../../../src/js/components/filter/Filter";
describe("Filter", () => {
const setFilter = () => {};
const setStartDate = () => {};
const setEndDate = () => {};
const restrictTo = () => {};
const onSelect = () => {};
const filters = I.fromJS({
filterBy: "hi",
startDate: "2015-01-01",
endDate: "2016-01-01",
restrictions: {
name: {
key: "name",
options: ["jim", "tim"]
}
}
});
const selections = I.fromJS({
name: ["jim"]
});
const presetConfig = [
{description: "preset", onSelect: []}
];
const ShallowRenderer = TestUtils.createRenderer();
ShallowRenderer.render(
<Filter filters={filters} selections={selections}
setFilter={setFilter} setStartDate={setStartDate}
setEndDate={setEndDate}
restrictTo={restrictTo}
presetConfig={presetConfig}
currentPage={1} totalPages={4}
/>
);
const renderedOutput = ShallowRenderer.getRenderOutput();
const youngers = renderedOutput.props.children.filter(el => el.type === "div");
const columns = youngers.filter(el => el.props.className.indexOf("table-manip-col") !== -1);
it("#renders a preset row and 2 columns if passed no children", () => {
assert.equal(columns.length, 2);
});
it("#renders a FilterInput in the first row of the first column", () => {
const row = columns[0].props.children.filter(el =>
el.props.className && el.props.className.indexOf("table-manip-row") !== -1
).pop();
assert.equal(row.props.children.filter(el => el.type === FilterInput).length, 1);
});
it("#renders a MultiSelect for each element in filters.restrictions", () => {
const kids = columns[1].props.children;
assert.equal(
kids.filter(el => el.type === MultiSelect).length,
filters.get("restrictions").size
);
});
it("#renders a Preset component for each element in presetConfig, with description and onSelect props", () => {
const kids = youngers.filter(el => el.props.className.indexOf("table-manip-presets") !== -1);
const presets = kids[0].props.children.filter(el => el.type === Preset);
assert.equal(
presets.length,
Object.keys(presetConfig).length
);
assert.equal(presets[0].props.description, presetConfig[0].description);
assert.deepEqual(presets[0].props.onSelect, presetConfig[0].onSelect);
});
});
|
#!/usr/bin/env python3
"""
Use black to clean this badly-formatted, barely-readable script.
"""
from this import s as S
def rot13(text):
""" str: Encrypt text badly. """
rot = {**rotmap(65), **rotmap(97)}.get
return "".join(rot(x, x) for x in text)
def rotmap(start):
"""
dict[char,char]: Map chars (from start to start+26) to rotated characters.
"""
ints = range(start, start + 26)
rots = (start + i % 26 for i in range(13, 13 + 26))
return dict(zip(map(chr, ints), map(chr, rots)))
class EnterpriseQualityInstantiateHashFromEmptyHashError(RuntimeError):
"""
https://github.com/EnterpriseQualityCoding
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
super().__init__(*self.args, **self.kwargs)
def _execute_initialization_framework_with_args_and_with_kwargs(self):
self(*self.args, **self.kwargs)
def enterpriseQualityInstantiateHashFromEmptyHash_hash_v2(hash_options_hash={}):
""" instantiate hash from initalized empty hash instance """
enterpriseQuality_hash_empty = {}
hash_options_hash_is_empty = ~bool(hash_options_hash)
if not hash_options_hash_is_empty:
for hash_options_hash_key, hash_options_hash_val in hash_options_hash.items():
hash_options_hash_key_local = hash_options_hash_key
hash_options_hash_key_local_v2 = hash_options_hash_key_local
return "".join([x for x in hash_options_hash_key_local_v2])
else:
raise ValueError("err")
enterpriseQuality_hash_instantiated = enterpriseQuality_hash_empty.copy()
for instantiated_hash_key, instantiated_hash_value in list(
[tuple(y, x) for y, x in enterpriseQuality_hash_instantiated.items()]
):
try:
enterpriseQuality_hash_instantiated[
instantiated_hash_key
] = instantiated_hash_value
except:
raise EnterpriseQualityInstantiateHashFromEmptyHashError(
"enterpriseQualityInstantiateHashFromEmptyHash_hash_v1 error in entrepresiQualityInstantiateHashFromEmptyHash_hash-v2 instantiation loop"
)
finally:
pass
return enterpriseQuality_hash_instantiated.copy()
if __name__ == "__main__":
print(rot13(S))
|
function binarySearch(array, target) {
if (!array.length) return false;
let mid = Math.floor(array.length / 2);
if (array[mid] === target) return true;
if (target < array[mid]) {
return binarySearch(array.slice(0, mid), target);
} else {
return binarySearch(array.slice(mid + 1), target);
}
}
function binarySearchIndex(array, target) {
if (!array.length) return -1;
let mid = Math.floor(array.length / 2);
if (array[mid] === target) return mid;
if (target < array[mid]) {
return binarySearchIndex(array.slice(0, mid), target);
} else {
const res = binarySearchIndex(array.slice(mid + 1), target);
return res === -1 ? res : mid + 1 + res;
}
}
module.exports = {
binarySearch,
binarySearchIndex,
};
|
function fizzBuzz(input){
if((input % 5 == 0) && (input % 3 == 0)){
return "FizzBuzz";
}else if(input % 3 == 0){
return "Fizz";
}else if(input % 5 == 0){
return "Buzz";
}else{
return input;
}
}
|
const path = require( "path" );
const fs = require( "fs-extra" );
class Script {
constructor( world ) {
this.world = world;
}
createWellKnownScript( scriptId ) {
const { repo } = this.world.config.app;
try {
const pristinePath = path.resolve( __dirname, `../feature-data-pristine/script--${scriptId}.json` );
const fileSystemPath = path.resolve( repo, `./scripts/${scriptId}.json` );
const script = require( pristinePath );
const { paths } = script;
return this.world.path.createWellKnownPaths( paths ).then( () =>
new Promise( ( resolve, reject ) =>
fs.ensureFile( fileSystemPath, ensureError => ensureError ? reject( ensureError )
: fs.writeFile( fileSystemPath, JSON.stringify( script, null, 1 ), e => e ? reject( e )
: resolve()
)
)
)
);
} catch( e ) {
return Promise.reject( e );
}
}
}
module.exports = Script;
|
import os
from .excavation import Excavation, DuplicateArtifact
from .artifact import DuplicateName
from .record import Record
PYREVENG3 = os.environ.get("AUTOARCHAEOLOGIST_PYREVENG3")
if not PYREVENG3 or not os.path.isdir(PYREVENG3):
PYREVENG3 = str(os.environ.get("HOME")) + "/PyReveng3/"
if not PYREVENG3 or not os.path.isdir(PYREVENG3):
PYREVENG3 = str(os.environ.get("HOME")) + "/Proj/PyReveng3/"
if not PYREVENG3 or not os.path.isdir(PYREVENG3):
PYREVENG3 = None
|
import pytest
@pytest.fixture(scope="function", autouse=True)
def set_up():
import qcdb
qcdb.driver.pe.clean_nu_options()
# psi4.set_output_file("pytest_output.dat", True)
|
///
/// Copyright (c) 2016 Dropbox, Inc. All rights reserved.
///
/// Auto-generated by Stone, do not modify.
///
#import <Foundation/Foundation.h>
#import "DBSerializableProtocol.h"
@class DBTEAMLOGSharedFolderTransferOwnershipType;
NS_ASSUME_NONNULL_BEGIN
#pragma mark - API Object
///
/// The `SharedFolderTransferOwnershipType` struct.
///
/// This class implements the `DBSerializable` protocol (serialize and
/// deserialize instance methods), which is required for all Obj-C SDK API route
/// objects.
///
@interface DBTEAMLOGSharedFolderTransferOwnershipType : NSObject <DBSerializable, NSCopying>
#pragma mark - Instance fields
/// (no description).
@property (nonatomic, readonly, copy) NSString *description_;
#pragma mark - Constructors
///
/// Full constructor for the struct (exposes all instance variables).
///
/// @param description_ (no description).
///
/// @return An initialized instance.
///
- (instancetype)initWithDescription_:(NSString *)description_;
- (instancetype)init NS_UNAVAILABLE;
@end
#pragma mark - Serializer Object
///
/// The serialization class for the `SharedFolderTransferOwnershipType` struct.
///
@interface DBTEAMLOGSharedFolderTransferOwnershipTypeSerializer : NSObject
///
/// Serializes `DBTEAMLOGSharedFolderTransferOwnershipType` instances.
///
/// @param instance An instance of the
/// `DBTEAMLOGSharedFolderTransferOwnershipType` API object.
///
/// @return A json-compatible dictionary representation of the
/// `DBTEAMLOGSharedFolderTransferOwnershipType` API object.
///
+ (nullable NSDictionary<NSString *, id> *)serialize:(DBTEAMLOGSharedFolderTransferOwnershipType *)instance;
///
/// Deserializes `DBTEAMLOGSharedFolderTransferOwnershipType` instances.
///
/// @param dict A json-compatible dictionary representation of the
/// `DBTEAMLOGSharedFolderTransferOwnershipType` API object.
///
/// @return An instantiation of the `DBTEAMLOGSharedFolderTransferOwnershipType`
/// object.
///
+ (DBTEAMLOGSharedFolderTransferOwnershipType *)deserialize:(NSDictionary<NSString *, id> *)dict;
@end
NS_ASSUME_NONNULL_END
|
Platformer.Player = CLASS({
preset : () => {
return SkyEngine.StateSet;
},
params : () => {
return {
accelY : Platformer.Global.gravity,
stateNodes : {
idle : SkyEngine.Sprite({
y : -92,
srcs : [
Platformer.R('image/alienGreen_stand.png')
]
}),
walk : SkyEngine.Sprite({
y : -92,
srcs : [
Platformer.R('image/alienGreen_walk1.png'),
Platformer.R('image/alienGreen_walk2.png')
],
fps : 10,
isHiding : true
}),
jump : SkyEngine.Sprite({
y : -92,
srcs : [
Platformer.R('image/alienGreen_jump.png')
],
isHiding : true
})
},
baseState : 'idle'
};
},
init : (inner, self, params) => {
//REQUIRED: params
//REQUIRED: params.lands
let lands = params.lands;
let jumpSound = SOUND({
mp3 : Platformer.R('sound/jump.mp3'),
ogg : Platformer.R('sound/jump.ogg')
});
SkyEngine.Screen.cameraFollowX({
target : self
});
self.addCollider(SkyEngine.Rect({
y : -92,
width : 80,
height : 184
}));
let shw = self.getCollider().getWidth() / 2;
let sh = self.getCollider().getHeight();
// 충돌 타일과 부딪힌 경우
self.onMeet(SkyEngine.CollisionTile, (tile) => {
let hw = tile.getCollider().getWidth() / 2;
let hh = tile.getCollider().getHeight() / 2;
// 아래로 부딪힘
if (
self.getBeforeY() <= tile.getY() - hh &&
self.getX() - shw < tile.getX() + hw &&
tile.getX() - hw < self.getX() + shw) {
self.setY(tile.getY() - hh);
self.stopDown();
if (self.getState() === 'jump') {
// 이동중이고, 가속도가 없어야 합니다. (가속도가 있다는 것은 멈추는 중인 상황)
if (self.getSpeedX() !== 0 && self.getAccelX() === 0) {
self.setState('walk');
} else {
self.setState('idle');
}
}
}
// 위로 부딪힘
else if (
self.getBeforeY() - sh >= tile.getY() + hh &&
self.getX() - shw < tile.getX() + hw &&
tile.getX() - hw < self.getX() + shw) {
self.setY(tile.getY() + hh + sh);
self.stopUp();
}
// 좌우로 부딪힘
else {
// 왼쪽으로 부딪힘
if (
self.getBeforeX() - shw >= tile.getX() + hw &&
self.getY() - sh < tile.getY() + hh &&
tile.getY() - hh < self.getY()) {
self.setX(tile.getX() + hw + shw);
self.stuckLeft();
}
// 오른쪽으로 부딪힘
if (
self.getBeforeX() + shw <= tile.getX() - hw &&
self.getY() - sh < tile.getY() + hh &&
tile.getY() - hh < self.getY()) {
self.setX(tile.getX() - hw - shw);
self.stuckRight();
}
}
});
// 충돌 타일과 떨어진 경우
self.onPart(SkyEngine.CollisionTile, (tile) => {
let hw = tile.getCollider().getWidth() / 2;
let hh = tile.getCollider().getHeight() / 2;
// 왼쪽 타일로부터 떨어진 경우
if (tile.getX() + hw <= self.getX() - shw) {
self.unstuckLeft();
// 떨어지는 경우
if (tile.getY() - hh <= self.getY()) {
self.setAccelY(3000);
}
}
// 오른쪽 타일로부터 떨어진 경우
else if (self.getX() + shw <= tile.getX() - hw) {
self.unstuckRight();
// 떨어지는 경우
if (tile.getY() - hh <= self.getY()) {
self.setAccelY(3000);
}
}
// 왼쪽도 오른쪽도 아니면, 점프한 경우
else {
self.setAccelY(3000);
}
});
// 화면 밖으로 나가면 사망
self.on('offscreen', () => {
if (self.getY() > 1000) {
}
});
// 키를 눌렀다.
let keydownEvent = EVENT('keydown', (e) => {
if (e.getKey() === 'ArrowLeft') {
self.moveLeft(500);
self.setScaleX(-1);
if (self.getState() !== 'jump') {
self.setState('walk');
}
}
if (e.getKey() === 'ArrowRight') {
self.moveRight(500);
self.setScaleX(1);
if (self.getState() !== 'jump') {
self.setState('walk');
}
}
if (e.getKey() === ' ' && self.getState() !== 'jump' && self.getSpeedY() === 0) {
self.setSpeedY(-1200);
self.setAccelY(Platformer.Global.gravity);
self.setState('jump');
jumpSound.play();
}
});
// 키를 뗐다.
let keyupEvent = EVENT('keyup', (e) => {
if (self.getScaleX() === -1 && e.getKey() === 'ArrowLeft') {
if (self.getSpeedX() < 0) {
self.stopLeft(2500);
}
if (self.getState() !== 'jump') {
self.setState('idle');
}
}
if (self.getScaleX() === 1 && e.getKey() === 'ArrowRight') {
if (self.getSpeedX() > 0) {
self.stopRight(2500);
}
if (self.getState() !== 'jump') {
self.setState('idle');
}
}
});
self.on('remove', () => {
jumpSound.stop();
jumpSound = undefined;
keydownEvent.remove();
keyupEvent.remove();
});
}
});
|
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import { Add16, Subtract16 } from '@carbon/icons-react';
import cx from 'classnames';
import PropTypes from 'prop-types';
import React, { useRef, useState } from 'react';
import { useFeatureFlag } from '../../FeatureFlags';
import { useMergedRefs } from '../../../internal/useMergedRefs';
import { useNormalizedInputProps as normalize } from '../../../internal/useNormalizedInputProps';
import { usePrefix } from '../../../internal/usePrefix';
import deprecate from '../../../prop-types/deprecate';
export const translationIds = {
'increment.number': 'increment.number',
'decrement.number': 'decrement.number',
};
const defaultTranslations = {
[translationIds['increment.number']]: 'Increment number',
[translationIds['decrement.number']]: 'Decrement number',
};
const NumberInput = React.forwardRef(function NumberInput(props, forwardRef) {
const enabled = useFeatureFlag('enable-v11-release');
const {
allowEmpty = false,
className: customClassName,
disabled = false,
defaultValue,
helperText = '',
hideLabel = false,
hideSteppers,
iconDescription = enabled ? undefined : 'choose a number',
id,
label,
invalid = false,
invalidText = enabled ? undefined : 'Provide invalidText',
isMobile,
light = false,
max,
min,
onChange,
onClick,
readOnly,
size = 'md',
step = 1,
translateWithId: t = (id) => defaultTranslations[id],
warn = false,
warnText = '',
value: controlledValue,
...rest
} = props;
const prefix = usePrefix();
const [value, setValue] = useState(() => {
if (controlledValue !== undefined) {
return controlledValue;
}
if (defaultValue !== undefined) {
return defaultValue;
}
return 0;
});
const [prevControlledValue, setPrevControlledValue] = useState(
controlledValue
);
const inputRef = useRef(null);
const ref = useMergedRefs([forwardRef, inputRef]);
const numberInputClasses = cx({
[`${prefix}--number`]: true,
[`${prefix}--number--helpertext`]: true,
[`${prefix}--number--readonly`]: readOnly,
[`${prefix}--number--light`]: light,
[`${prefix}--number--nolabel`]: hideLabel,
[`${prefix}--number--nosteppers`]: hideSteppers,
[`${prefix}--number--mobile`]: isMobile,
[`${prefix}--number--${size}`]: size,
[customClassName]: !enabled,
});
const isInputValid = getInputValidity({
allowEmpty,
invalid,
value,
max,
min,
});
const normalizedProps = normalize({
id,
readOnly,
disabled,
invalid: !isInputValid,
invalidText,
warn,
warnText,
});
const [incrementNumLabel, decrementNumLabel] = [
t('increment.number'),
t('decrement.number'),
];
const wrapperClasses = cx(`${prefix}--number__input-wrapper`, {
[`${prefix}--number__input-wrapper--warning`]: normalizedProps.warn,
});
const iconClasses = cx({
[`${prefix}--number__invalid`]:
normalizedProps.invalid || normalizedProps.warn,
[`${prefix}--number__invalid--warning`]: normalizedProps.warn,
[`${prefix}--number__readonly-icon`]: readOnly,
});
if (controlledValue !== prevControlledValue) {
setValue(controlledValue);
setPrevControlledValue(controlledValue);
}
let ariaDescribedBy = null;
if (normalizedProps.invalid) {
ariaDescribedBy = normalizedProps.invalidId;
}
if (normalizedProps.warn) {
ariaDescribedBy = normalizedProps.warnId;
}
function handleOnChange(event) {
if (disabled) {
return;
}
const state = {
value: event.target.value,
direction: value < event.target.value ? 'up' : 'down',
};
setValue(state.value);
if (onChange) {
onChange(event, state);
}
}
return (
<div className={cx(`${prefix}--form-item`, { [customClassName]: enabled })}>
<div
className={numberInputClasses}
data-invalid={normalizedProps.invalid ? true : undefined}>
<Label
disabled={normalizedProps.disabled}
hideLabel={hideLabel}
id={id}
label={label}
/>
<div className={wrapperClasses}>
<input
{...rest}
data-invalid={normalizedProps.invalid ? true : undefined}
aria-invalid={normalizedProps.invalid}
aria-describedby={ariaDescribedBy}
disabled={normalizedProps.disabled}
ref={ref}
id={id}
max={max}
min={min}
onClick={onClick}
onChange={handleOnChange}
pattern="[0-9]*"
readOnly={readOnly}
step={step}
type="number"
value={value}
/>
{normalizedProps.icon ? (
<normalizedProps.icon className={iconClasses} />
) : null}
{!hideSteppers && (
<div className={`${prefix}--number__controls`}>
<button
aria-label={decrementNumLabel || iconDescription}
className={`${prefix}--number__control-btn down-icon`}
disabled={disabled}
onClick={(event) => {
const state = {
value: clamp(max, min, value - step),
direction: 'down',
};
setValue(state.value);
if (onChange) {
onChange(event, state);
}
if (onClick) {
onClick(event, state);
}
}}
tabIndex="-1"
title={decrementNumLabel || iconDescription}
type="button">
<Subtract16 className="down-icon" />
</button>
<div className={`${prefix}--number__rule-divider`} />
<button
aria-label={incrementNumLabel || iconDescription}
className={`${prefix}--number__control-btn up-icon`}
disabled={disabled}
onClick={(event) => {
const state = {
value: clamp(max, min, value + step),
direction: 'up',
};
setValue(state.value);
if (onChange) {
onChange(event, state);
}
if (onClick) {
onClick(event, state);
}
}}
tabIndex="-1"
title={incrementNumLabel || iconDescription}
type="button">
<Add16 className="up-icon" />
</button>
<div className={`${prefix}--number__rule-divider`} />
</div>
)}
</div>
{normalizedProps.validation ? (
normalizedProps.validation
) : (
<HelperText disabled={disabled} description={helperText} />
)}
</div>
</div>
);
});
NumberInput.propTypes = {
/**
* `true` to allow empty string.
*/
allowEmpty: PropTypes.bool,
/**
* Specify an optional className to be applied to the wrapper node
*/
className: PropTypes.string,
/**
* Optional starting value for uncontrolled state
*/
defaultValue: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
/**
* Specify if the control should be disabled, or not
*/
disabled: PropTypes.bool,
/**
* Provide text that is used alongside the control label for additional help
*/
helperText: PropTypes.node,
/**
* Specify whether you want the underlying label to be visually hidden
*/
hideLabel: PropTypes.bool,
/**
* Specify whether you want the steppers to be hidden
*/
hideSteppers: PropTypes.bool,
/**
* Provide a description for up/down icons that can be read by screen readers
*/
iconDescription: PropTypes.string,
/**
* Specify a custom `id` for the input
*/
id: PropTypes.string.isRequired,
/**
* Specify if the currently value is invalid.
*/
invalid: PropTypes.bool,
/**
* Message which is displayed if the value is invalid.
*/
invalidText: PropTypes.node,
/**
* `true` to use the mobile variant.
*/
isMobile: deprecate(
PropTypes.bool,
`The \`isMobile\` prop no longer needed as the default NumberInput styles are now identical to the mobile variant styles. This prop will be removed in the next major version of \`carbon-components-react\``
),
/**
* Generic `label` that will be used as the textual representation of what
* this field is for
*/
label: PropTypes.node,
/**
* `true` to use the light version.
*/
light: PropTypes.bool,
/**
* The maximum value.
*/
max: PropTypes.number,
/**
* The minimum value.
*/
min: PropTypes.number,
/**
* The new value is available in 'imaginaryTarget.value'
* i.e. to get the value: evt.imaginaryTarget.value
*
*/
onChange: PropTypes.func,
/**
* Provide an optional function to be called when the up/down button is clicked
*/
onClick: PropTypes.func,
/**
* Specify if the component should be read-only
*/
readOnly: PropTypes.bool,
/**
* Specify the size of the Number Input.
*/
size: PropTypes.oneOf(['sm', 'md', 'lg']),
/**
* Specify how much the values should increase/decrease upon clicking on up/down button
*/
step: PropTypes.number,
/**
* Provide custom text for the component for each translation id
*/
translateWithId: PropTypes.func,
/**
* Specify the value of the input
*/
value: PropTypes.oneOfType([PropTypes.number, PropTypes.string]),
/**
* Specify whether the control is currently in warning state
*/
warn: PropTypes.bool,
/**
* Provide the text that is displayed when the control is in warning state
*/
warnText: PropTypes.node,
};
function Label({ disabled, id, hideLabel, label }) {
const prefix = usePrefix();
const className = cx({
[`${prefix}--label`]: true,
[`${prefix}--label--disabled`]: disabled,
[`${prefix}--visually-hidden`]: hideLabel,
});
if (label) {
return (
<label htmlFor={id} className={className}>
{label}
</label>
);
}
return null;
}
Label.propTypes = {
disabled: PropTypes.bool,
hideLabel: PropTypes.bool,
id: PropTypes.string,
label: PropTypes.node,
};
function HelperText({ disabled, description }) {
const prefix = usePrefix();
const className = cx(`${prefix}--form__helper-text`, {
[`${prefix}--form__helper-text--disabled`]: disabled,
});
if (description) {
return <div className={className}>{description}</div>;
}
return null;
}
HelperText.propTypes = {
description: PropTypes.node,
disabled: PropTypes.bool,
};
/**
* Determine if the given value is invalid based on the given max, min and
* conditions like `allowEmpty`. If `invalid` is passed through, it will default
* to false.
*
* @param {object} config
* @param {boolean} config.allowEmpty
* @param {boolean} config.invalid
* @param {number} config.value
* @param {number} config.max
* @param {number} config.min
* @returns {boolean}
*/
function getInputValidity({ allowEmpty, invalid, value, max, min }) {
if (invalid) {
return false;
}
if (value === '') {
return allowEmpty;
}
if (value > max || value < min) {
return false;
}
return true;
}
/**
* Clamp the given value between the upper bound `max` and the lower bound `min`
* @param {number} max
* @param {number} min
* @param {number} value
*/
function clamp(max, min, value) {
return Math.min(max, Math.max(min, value));
}
export { NumberInput };
|
import React, { Component } from 'react'
import { Link } from 'gatsby'
import Menu from '../Menu'
import logo from '../../images/posthog-logo-150x29.svg'
import { getMenuState } from '../../store/selectors'
import { connect } from 'react-redux'
class Header extends Component {
render() {
const { sidebarDocked } = this.props
return (
<div
style={{
display: 'flex',
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
backgroundColor: 'white',
}}
>
<Link
id="logo"
to="/"
style={{
//color: '#FFF',
textDecoration: 'none',
}}
>
<img alt="logo" src={logo} id="logo-image" />
</Link>
<Menu sidebarDocked={sidebarDocked} />
</div>
)
}
}
const mapStateToProps = state => {
return {
menuOpen: getMenuState(state).open,
nMenuItem: getMenuState(state).nItem,
}
}
export default connect(mapStateToProps)(Header)
|
import os
from glob import glob
import sys
import shutil
import pytest
from ..helpers import skip_if_windows, skip_if_not_windows
from jedi.evaluate import sys_path
from jedi.api.environment import create_environment
def test_paths_from_assignment(Script):
def paths(src):
script = Script(src, path='/foo/bar.py')
expr_stmt = script._module_node.children[0]
return set(sys_path._paths_from_assignment(script._get_module(), expr_stmt))
# Normalize paths for Windows.
path_a = os.path.abspath('/foo/a')
path_b = os.path.abspath('/foo/b')
path_c = os.path.abspath('/foo/c')
assert paths('sys.path[0:0] = ["a"]') == {path_a}
assert paths('sys.path = ["b", 1, x + 3, y, "c"]') == {path_b, path_c}
assert paths('sys.path = a = ["a"]') == {path_a}
# Fail for complicated examples.
assert paths('sys.path, other = ["a"], 2') == set()
def test_venv_and_pths(venv_path):
pjoin = os.path.join
CUR_DIR = os.path.dirname(__file__)
site_pkg_path = pjoin(venv_path, 'lib')
if os.name == 'nt':
site_pkg_path = pjoin(site_pkg_path, 'site-packages')
else:
site_pkg_path = glob(pjoin(site_pkg_path, 'python*', 'site-packages'))[0]
shutil.rmtree(site_pkg_path)
shutil.copytree(pjoin(CUR_DIR, 'sample_venvs', 'pth_directory'), site_pkg_path)
virtualenv = create_environment(venv_path)
venv_paths = virtualenv.get_sys_path()
ETALON = [
# For now disable egg-links. I have no idea how they work... ~ dave
#pjoin('/path', 'from', 'egg-link'),
#pjoin(site_pkg_path, '.', 'relative', 'egg-link', 'path'),
site_pkg_path,
pjoin(site_pkg_path, 'dir-from-foo-pth'),
'/foo/smth.py:module',
# Not sure why it's added twice. It has to do with site.py which is not
# something we can change. However this obviously also doesn't matter.
'/foo/smth.py:from_func',
'/foo/smth.py:from_func',
]
# Ensure that pth and egg-link paths were added.
assert venv_paths[-len(ETALON):] == ETALON
# Ensure that none of venv dirs leaked to the interpreter.
assert not set(sys.path).intersection(ETALON)
_s = ['/a', '/b', '/c/d/']
@pytest.mark.parametrize(
'sys_path_, module_path, expected, is_package', [
(_s, '/a/b', ('b',), False),
(_s, '/a/b/c', ('b', 'c'), False),
(_s, '/a/b.py', ('b',), False),
(_s, '/a/b/c.py', ('b', 'c'), False),
(_s, '/x/b.py', None, False),
(_s, '/c/d/x.py', ('x',), False),
(_s, '/c/d/x.py', ('x',), False),
(_s, '/c/d/x/y.py', ('x', 'y'), False),
# If dots are in there they also resolve. These are obviously illegal
# in Python, but Jedi can handle them. Give the user a bit more freedom
# that he will have to correct eventually.
(_s, '/a/b.c.py', ('b.c',), False),
(_s, '/a/b.d/foo.bar.py', ('b.d', 'foo.bar'), False),
(_s, '/a/.py', None, False),
(_s, '/a/c/.py', None, False),
(['/foo'], '/foo/bar/__init__.py', ('bar',), True),
(['/foo'], '/foo/bar/baz/__init__.py', ('bar', 'baz'), True),
skip_if_windows(['/foo'], '/foo/bar.so', ('bar',), False),
skip_if_windows(['/foo'], '/foo/bar/__init__.so', ('bar',), True),
skip_if_not_windows(['/foo'], '/foo/bar.pyd', ('bar',), False),
skip_if_not_windows(['/foo'], '/foo/bar/__init__.pyd', ('bar',), True),
(['/foo'], '/x/bar.py', None, False),
(['/foo'], '/foo/bar.xyz', ('bar.xyz',), False),
(['/foo', '/foo/bar'], '/foo/bar/baz', ('baz',), False),
(['/foo/bar', '/foo'], '/foo/bar/baz', ('baz',), False),
(['/'], '/bar/baz.py', ('bar', 'baz',), False),
])
def test_transform_path_to_dotted(sys_path_, module_path, expected, is_package):
# transform_path_to_dotted expects normalized absolute paths.
sys_path_ = [os.path.abspath(path) for path in sys_path_]
module_path = os.path.abspath(module_path)
assert sys_path.transform_path_to_dotted(sys_path_, module_path) \
== (expected, is_package)
|
import Vue from 'vue';
import App from './App.vue';
import router from './router';
import store from './store';
Vue.config.productionTip = false;
Vue.directive('visible', (el, binding) => {
// eslint-disable-next-line no-param-reassign
el.style.visibility = binding.value ? 'visible' : 'hidden';
});
new Vue({
router,
store,
render: (h) => h(App),
}).$mount('#app');
|
import embed from 'particle'
import _ from 'underscore'
import moment from 'moment'
import Article from '../../models/article'
import { crop } from '../../components/resizer'
import {
SAILTHRU_KEY,
SAILTHRU_SECRET,
SAILTHRU_MASTER_LIST
} from '../../config.coffee'
const sailThruClient = require('sailthru-client')
.createSailthruClient(SAILTHRU_KEY, SAILTHRU_SECRET)
export const article = async (req, res, next) => {
if (!req.user) return res.redirect('/log_in?redirect_uri=' + req.url)
const article = new Article({ id: req.params.id })
await article.fetchWithRelated({
accessToken: req.user && req.user.get('accessToken')
})
if (req.url.match('primer-digest')) res.locals.h2 = 'Collecting Digest'
res.locals.sd.ARTICLE = article.toJSON()
res.render('article', { embed, crop, article, lushSignup: true })
}
export const personalize = async (req, res) => {
if (req.user != null) {
const response = await req.user.fetch()
res.locals.sd.CURRENT_USER = _.extend(response, res.locals.sd.CURRENT_USER)
}
res.render('personalize')
}
export const setSailthruData = async (req, res) => {
sailThruClient.apiPost('user', {
id: req.user.get('email'),
lists: {
[SAILTHRU_MASTER_LIST]: 1
},
vars: {
artsy_primer: true,
artsy_primer_sign_up_time: moment().format('X')
}
}, (err, response) => {
if (!err && response.ok) {
res.status(200).send({ set: true })
} else {
res.status(500).send(response.errormsg)
}
})
}
|
#!/usr/bin/env python3
from sys import argv
from random import randint
import os
N = 10
INT_LIMIT = 1000
MAX_LENGTH = 1000
def main():
test_map = {
'0': run_general_test,
'1': run_quicksort_test,
'2': run_heapsort_test
}
test_num = argv[1]
for i in range(1, N+1):
test_func = test_map.get(test_num, lambda: None)
test_func()
print(f'test {test_num} pass {i} OK')
def run_general_test():
test_array = [
randint(-INT_LIMIT, INT_LIMIT)
for _ in range(randint(1, MAX_LENGTH))]
test_input = '{n}\\n{array}'.format(
n=len(test_array),
array=' '.join(str(x) for x in test_array))
cmd = 'echo "{test_input}" | ./a.out'.format(test_input=test_input)
user_out = os.popen(cmd).read().strip()
user_array = [int(x) for x in user_out.split()]
expected_array = sorted(test_array)
assert user_array == expected_array, 'expected {}, got {}'.format(
expected_array, user_array)
def run_quicksort_test():
def partition(arr, low, high):
i = low - 1
pivot = arr[high]
for j in range(low, high):
if arr[j] <= pivot:
i += 1
arr[i], arr[j] = arr[j], arr[i]
arr[i + 1], arr[high] = arr[high], arr[i + 1]
return i + 1
def quicksort(arr, low, high):
expected_iterations.append(test_array.copy())
if low < high:
pi = partition(arr, low, high)
quicksort(arr, low, pi-1)
quicksort(arr, pi + 1, high)
test_array = [
randint(-INT_LIMIT, INT_LIMIT)
for _ in range(randint(1, MAX_LENGTH))]
test_input = '{n}\\n{array}'.format(
n=len(test_array),
array=' '.join(str(x) for x in test_array))
cmd = 'echo "{test_input}" | ./a.out'.format(test_input=test_input)
os.popen(cmd).read()
expected_iterations = list()
quicksort(test_array, 0, len(test_array)-1)
with open('quicksort.log') as f:
user_iterations = [[int(x) for x in line.split()] for line in f.readlines()]
assert user_iterations == expected_iterations, 'quicksort mismatch'
def run_heapsort_test():
def heapify(arr, n, i):
largest = i
l = 2 * i + 1
r = 2 * i + 2
if l < n and arr[i] < arr[l]:
largest = l
if r < n and arr[largest] < arr[r]:
largest = r
if largest != i:
arr[i],arr[largest] = arr[largest],arr[i]
expected_iterations.append(test_array.copy())
heapify(arr, n, largest)
def heapsort(arr):
expected_iterations.append(test_array.copy())
n = len(arr)
for i in range(n, -1, -1):
heapify(arr, n, i)
for i in range(n-1, 0, -1):
arr[i], arr[0] = arr[0], arr[i]
expected_iterations.append(test_array.copy())
heapify(arr, i, 0)
test_array = [
randint(-INT_LIMIT, INT_LIMIT)
for _ in range(randint(1, MAX_LENGTH))]
test_input = '{n}\\n{array}'.format(
n=len(test_array),
array=' '.join(str(x) for x in test_array))
cmd = 'echo "{test_input}" | ./a.out'.format(test_input=test_input)
os.popen(cmd).read()
expected_iterations = list()
heapsort(test_array)
with open('heapsort.log') as f:
user_iterations = [[int(x) for x in line.split()] for line in f.readlines()]
assert user_iterations == expected_iterations, 'heapsort mismatch'
if __name__ == "__main__":
main()
|
import React from 'react';
import './Board.css';
import Square from './Square';
import PropTypes from 'prop-types';
const generateSquareComponents = (squares, onClickCallback) => {
// Complete this for Wave 1
// squares is a 2D Array, but
// you need to return a 1D array
// of square components
// itterate over 2D array and return 1D
const squareComponents = [];
for (let innerSquares of squares) {
for (let square of innerSquares) {
squareComponents.push(
<Square
key={square.id}
value={square.value}
id={square.id}
onClickCallback={onClickCallback}
/>
);
}
}
return squareComponents;
};
const Board = ({ squares, onClickCallback }) => {
const squareList = generateSquareComponents(squares, onClickCallback);
console.log(squareList);
return <div className="grid">{squareList}</div>;
};
Board.propTypes = {
squares: PropTypes.arrayOf(
PropTypes.arrayOf(
PropTypes.shape({
id: PropTypes.number.isRequired,
value: PropTypes.string.isRequired,
})
)
),
onClickCallback: PropTypes.func.isRequired,
};
export default Board;
|
import pytest
from urlparse import urlparse
from rest_framework import exceptions
from api.base.settings.defaults import API_BASE
from website.util import permissions
from osf.models import Registration, NodeLog
from framework.auth import Auth
from api.registrations.serializers import RegistrationSerializer, RegistrationDetailSerializer
from osf_tests.factories import (
ProjectFactory,
RegistrationFactory,
RegistrationApprovalFactory,
AuthUserFactory,
WithdrawnRegistrationFactory,
)
from tests.utils import assert_latest_log
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.mark.django_db
class TestRegistrationDetail:
@pytest.fixture()
def public_project(self, user):
return ProjectFactory(
title='Public Project',
is_public=True,
creator=user)
@pytest.fixture()
def private_project(self, user):
return ProjectFactory(title='Private Project', creator=user)
@pytest.fixture()
def public_registration(self, user, public_project):
return RegistrationFactory(
project=public_project,
creator=user,
is_public=True)
@pytest.fixture()
def private_registration(self, user, private_project):
return RegistrationFactory(project=private_project, creator=user)
@pytest.fixture()
def public_url(self, public_registration):
return '/{}registrations/{}/'.format(API_BASE, public_registration._id)
@pytest.fixture()
def private_url(self, private_registration):
return '/{}registrations/{}/'.format(
API_BASE, private_registration._id)
def test_registration_detail(
self, app, user, public_project, private_project,
public_registration, private_registration,
public_url, private_url):
non_contributor = AuthUserFactory()
# test_return_public_registration_details_logged_out
res = app.get(public_url)
assert res.status_code == 200
data = res.json['data']
registered_from = urlparse(
data['relationships']['registered_from']['links']['related']['href']
).path
assert data['attributes']['registration'] is True
assert registered_from == '/{}nodes/{}/'.format(
API_BASE, public_project._id)
# test_return_public_registration_details_logged_in
res = app.get(public_url, auth=user.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
data = res.json['data']
registered_from = urlparse(
data['relationships']['registered_from']['links']['related']['href']).path
assert data['attributes']['registration'] is True
assert registered_from == '/{}nodes/{}/'.format(
API_BASE, public_project._id)
# test_return_private_registration_details_logged_out
res = app.get(private_url, expect_errors=True)
assert res.status_code == 401
assert 'detail' in res.json['errors'][0]
# test_return_private_project_registrations_logged_in_contributor
res = app.get(private_url, auth=user.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
data = res.json['data']
registered_from = urlparse(
data['relationships']['registered_from']['links']['related']['href']).path
assert data['attributes']['registration'] is True
assert registered_from == '/{}nodes/{}/'.format(
API_BASE, private_project._id)
# test_return_private_registration_details_logged_in_non_contributor
res = app.get(
private_url,
auth=non_contributor.auth,
expect_errors=True)
assert res.status_code == 403
assert 'detail' in res.json['errors'][0]
# test_do_not_return_node_detail
url = '/{}registrations/{}/'.format(API_BASE, public_project._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
assert res.json['errors'][0]['detail'] == exceptions.NotFound.default_detail
# test_do_not_return_node_detail_in_sub_view
url = '/{}registrations/{}/contributors/'.format(
API_BASE, public_project._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
assert res.json['errors'][0]['detail'] == exceptions.NotFound.default_detail
# test_do_not_return_registration_in_node_detail
url = '/{}nodes/{}/'.format(API_BASE, public_registration._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
assert res.json['errors'][0]['detail'] == exceptions.NotFound.default_detail
# test_registration_shows_specific_related_counts
url = '/{}registrations/{}/?related_counts=children'.format(
API_BASE, private_registration._id)
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['relationships']['children']['links']['related']['meta']['count'] == 0
assert res.json['data']['relationships']['contributors']['links']['related']['meta'] == {}
# test_hide_if_registration
# Registrations are a HideIfRegistration field
node_url = '/{}nodes/{}/'.format(API_BASE, private_project._id)
res = app.get(node_url, auth=user.auth)
assert res.status_code == 200
assert 'registrations' in res.json['data']['relationships']
res = app.get(private_url, auth=user.auth)
assert res.status_code == 200
assert 'registrations' not in res.json['data']['relationships']
@pytest.mark.django_db
class TestRegistrationUpdate:
@pytest.fixture()
def read_only_contributor(self):
return AuthUserFactory()
@pytest.fixture()
def read_write_contributor(self):
return AuthUserFactory()
@pytest.fixture()
def registration_approval(self, user):
return RegistrationApprovalFactory(
state='unapproved', approve=False, user=user)
@pytest.fixture()
def unapproved_registration(self, registration_approval):
return Registration.objects.get(
registration_approval=registration_approval)
@pytest.fixture()
def unapproved_url(self, unapproved_registration):
return '/{}registrations/{}/'.format(
API_BASE, unapproved_registration._id)
@pytest.fixture()
def public_project(self, user):
return ProjectFactory(
title='Public Project',
is_public=True,
creator=user)
@pytest.fixture()
def private_project(self, user):
return ProjectFactory(title='Private Project', creator=user)
@pytest.fixture()
def public_registration(self, user, public_project):
return RegistrationFactory(
project=public_project,
creator=user,
is_public=True)
@pytest.fixture()
def private_registration(
self, user, private_project, read_only_contributor,
read_write_contributor):
private_registration = RegistrationFactory(
project=private_project, creator=user)
private_registration.add_contributor(
read_only_contributor, permissions=[
permissions.READ])
private_registration.add_contributor(
read_write_contributor, permissions=[
permissions.WRITE])
private_registration.save()
return private_registration
@pytest.fixture()
def public_url(self, public_registration):
return '/{}registrations/{}/'.format(API_BASE, public_registration._id)
@pytest.fixture()
def private_url(self, private_registration):
return '/{}registrations/{}/'.format(
API_BASE, private_registration._id)
@pytest.fixture()
def attributes(self):
return {'public': True}
@pytest.fixture()
def make_payload(self, private_registration, attributes):
def payload(
id=private_registration._id,
type='registrations',
attributes=attributes
):
return {
'data': {
'id': id,
'type': type,
'attributes': attributes
}
}
return payload
def test_update_registration(
self, app, user, read_only_contributor,
read_write_contributor, public_registration,
public_url, private_url, make_payload):
private_registration_payload = make_payload()
non_contributor = AuthUserFactory()
# test_update_private_registration_logged_out
res = app.put_json_api(
private_url,
private_registration_payload,
expect_errors=True)
assert res.status_code == 401
# test_update_private_registration_logged_in_admin
res = app.put_json_api(
private_url,
private_registration_payload,
auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['public'] is True
# test_update_private_registration_logged_in_read_only_contributor
res = app.put_json_api(
private_url,
private_registration_payload,
auth=read_only_contributor.auth,
expect_errors=True)
assert res.status_code == 403
# test_update_private_registration_logged_in_read_write_contributor
res = app.put_json_api(
private_url,
private_registration_payload,
auth=read_write_contributor.auth,
expect_errors=True)
assert res.status_code == 403
# test_update_public_registration_to_private
public_to_private_payload = make_payload(
id=public_registration._id, attributes={'public': False})
res = app.put_json_api(
public_url,
public_to_private_payload,
auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Registrations can only be turned from private to public.'
res = app.put_json_api(
public_url,
public_to_private_payload,
auth=non_contributor.auth,
expect_errors=True)
assert res.status_code == 403
assert res.json['errors'][0]['detail'] == 'You do not have permission to perform this action.'
def test_fields(
self, app, user, public_registration,
private_registration, public_url,
private_url, make_payload):
# test_public_field_has_invalid_value
invalid_public_payload = make_payload(
id=public_registration._id,
attributes={'public': 'Dr.Strange'})
res = app.put_json_api(
public_url,
invalid_public_payload,
auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '"Dr.Strange" is not a valid boolean.'
# test_fields_other_than_public_are_ignored
attribute_list = {
'public': True,
'category': 'instrumentation',
'title': 'New title',
'description': 'New description'
}
verbose_private_payload = make_payload(attributes=attribute_list)
res = app.put_json_api(
private_url,
verbose_private_payload,
auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['public'] is True
assert res.json['data']['attributes']['category'] == 'project'
assert res.json['data']['attributes']['description'] == private_registration.description
assert res.json['data']['attributes']['title'] == private_registration.title
# test_type_field_must_match
node_type_payload = make_payload(type='node')
res = app.put_json_api(
private_url,
node_type_payload,
auth=user.auth,
expect_errors=True)
assert res.status_code == 409
# test_id_field_must_match
mismatch_id_payload = make_payload(id='12345')
res = app.put_json_api(
private_url,
mismatch_id_payload,
auth=user.auth,
expect_errors=True)
assert res.status_code == 409
def test_turning_private_registrations_public(
self, app, user, make_payload):
private_project = ProjectFactory(creator=user, is_public=False)
private_registration = RegistrationFactory(
project=private_project, creator=user, is_public=False)
private_to_public_payload = make_payload(id=private_registration._id)
url = '/{}registrations/{}/'.format(API_BASE, private_registration._id)
res = app.put_json_api(url, private_to_public_payload, auth=user.auth)
assert res.json['data']['attributes']['public'] is True
private_registration.reload()
assert private_registration.is_public
def test_registration_fields_are_read_only(self):
writeable_fields = [
'type',
'public',
'draft_registration',
'registration_choice',
'lift_embargo',
'tags']
for field in RegistrationSerializer._declared_fields:
reg_field = RegistrationSerializer._declared_fields[field]
if field not in writeable_fields:
assert getattr(reg_field, 'read_only', False) is True
def test_registration_detail_fields_are_read_only(self):
writeable_fields = [
'type',
'public',
'draft_registration',
'registration_choice',
'lift_embargo',
'tags']
for field in RegistrationDetailSerializer._declared_fields:
reg_field = RegistrationSerializer._declared_fields[field]
if field not in writeable_fields:
assert getattr(reg_field, 'read_only', False) is True
def test_user_cannot_delete_registration(self, app, user, private_url):
res = app.delete_json_api(
private_url,
expect_errors=True,
auth=user.auth)
assert res.status_code == 405
def test_make_public_unapproved_registration_raises_error(
self, app, user, unapproved_registration, unapproved_url, make_payload):
attribute_list = {
'public': True,
'withdrawn': True
}
unapproved_registration_payload = make_payload(
id=unapproved_registration._id, attributes=attribute_list)
res = app.put_json_api(
unapproved_url,
unapproved_registration_payload,
auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'An unapproved registration cannot be made public.'
@pytest.mark.django_db
class TestRegistrationTags:
@pytest.fixture()
def user_admin(self):
return AuthUserFactory()
@pytest.fixture()
def user_read_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def user_non_contrib(self):
return AuthUserFactory()
@pytest.fixture()
def project_public(self, user_admin, user_read_contrib):
project_public = ProjectFactory(
title='Project One',
is_public=True,
creator=user_admin)
project_public.add_contributor(
user_admin,
permissions=permissions.CREATOR_PERMISSIONS,
save=True)
project_public.add_contributor(
user_read_contrib,
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS,
save=True)
return project_public
@pytest.fixture()
def registration_public(self, project_public, user_admin):
return RegistrationFactory(
project=project_public,
creator=user_admin,
is_public=True)
@pytest.fixture()
def registration_private(self, project_public, user_admin):
return RegistrationFactory(
project=project_public,
creator=user_admin,
is_public=False)
@pytest.fixture()
def registration_withdrawn(self, project_public, user_admin):
return RegistrationFactory(
project=project_public,
creator=user_admin,
is_public=True)
@pytest.fixture()
def withdrawn_registration(self, registration_withdrawn, user_admin):
registration_withdrawn.add_tag(
'existing-tag', auth=Auth(user=user_admin))
registration_withdrawn.save()
withdrawn_registration = WithdrawnRegistrationFactory(
registration=registration_withdrawn, user=user_admin)
withdrawn_registration.justification = 'We made a major error.'
withdrawn_registration.save()
return withdrawn_registration
@pytest.fixture()
def url_registration_public(self, registration_public):
return '/{}registrations/{}/'.format(
API_BASE, registration_public._id)
@pytest.fixture()
def url_registration_private(self, registration_private):
return '/{}registrations/{}/'.format(
API_BASE, registration_private._id)
@pytest.fixture()
def url_registration_withdrawn(
self, registration_withdrawn, withdrawn_registration):
return '/{}registrations/{}/'.format(
API_BASE, registration_withdrawn._id)
@pytest.fixture()
def new_tag_payload_public(self, registration_public):
return {
'data': {
'id': registration_public._id,
'type': 'registrations',
'attributes': {
'tags': ['new-tag'],
}
}
}
@pytest.fixture()
def new_tag_payload_private(self, registration_private):
return {
'data': {
'id': registration_private._id,
'type': 'registrations',
'attributes': {
'tags': ['new-tag'],
}
}
}
@pytest.fixture()
def new_tag_payload_withdrawn(self, registration_withdrawn):
return {
'data': {
'id': registration_withdrawn._id,
'type': 'registrations',
'attributes': {
'tags': ['new-tag', 'existing-tag'],
}
}
}
def test_registration_tags(
self, app, registration_public, registration_private,
url_registration_public, url_registration_private,
new_tag_payload_public, new_tag_payload_private,
user_admin, user_non_contrib):
# test_registration_starts_with_no_tags
res = app.get(url_registration_public)
assert res.status_code == 200
assert len(res.json['data']['attributes']['tags']) == 0
# test_registration_does_not_expose_system_tags
registration_public.add_system_tag('systag', save=True)
res = app.get(url_registration_public)
assert res.status_code == 200
assert len(res.json['data']['attributes']['tags']) == 0
# test_contributor_can_add_tag_to_public_registration
with assert_latest_log(NodeLog.TAG_ADDED, registration_public):
res = app.patch_json_api(
url_registration_public,
new_tag_payload_public,
auth=user_admin.auth)
assert res.status_code == 200
# Ensure data is correct from the PATCH response
assert len(res.json['data']['attributes']['tags']) == 1
assert res.json['data']['attributes']['tags'][0] == 'new-tag'
# Ensure data is correct in the database
registration_public.reload()
assert registration_public.tags.count() == 1
assert registration_public.tags.first()._id == 'new-tag'
# Ensure data is correct when GETting the resource again
reload_res = app.get(url_registration_public)
assert len(reload_res.json['data']['attributes']['tags']) == 1
assert reload_res.json['data']['attributes']['tags'][0] == 'new-tag'
# test_contributor_can_add_tag_to_private_registration
with assert_latest_log(NodeLog.TAG_ADDED, registration_private):
res = app.patch_json_api(
url_registration_private,
new_tag_payload_private,
auth=user_admin.auth)
assert res.status_code == 200
# Ensure data is correct from the PATCH response
assert len(res.json['data']['attributes']['tags']) == 1
assert res.json['data']['attributes']['tags'][0] == 'new-tag'
# Ensure data is correct in the database
registration_private.reload()
assert registration_private.tags.count() == 1
assert registration_private.tags.first()._id == 'new-tag'
# Ensure data is correct when GETting the resource again
reload_res = app.get(
url_registration_private,
auth=user_admin.auth)
assert len(reload_res.json['data']['attributes']['tags']) == 1
assert reload_res.json['data']['attributes']['tags'][0] == 'new-tag'
# test_non_contributor_cannot_add_tag_to_registration
res = app.patch_json_api(
url_registration_public,
new_tag_payload_public,
expect_errors=True,
auth=user_non_contrib.auth)
assert res.status_code == 403
# test_partial_update_registration_does_not_clear_tagsb
new_payload = {
'data': {
'id': registration_private._id,
'type': 'registrations',
'attributes': {
'public': True
}
}
}
res = app.patch_json_api(
url_registration_private,
new_payload,
auth=user_admin.auth)
assert res.status_code == 200
assert len(res.json['data']['attributes']['tags']) == 1
def test_tags_add_and_remove_properly(
self, app, user_admin, registration_public,
new_tag_payload_public, url_registration_public):
with assert_latest_log(NodeLog.TAG_ADDED, registration_public):
res = app.patch_json_api(
url_registration_public,
new_tag_payload_public,
auth=user_admin.auth)
assert res.status_code == 200
# Ensure adding tag data is correct from the PATCH response
assert len(res.json['data']['attributes']['tags']) == 1
assert res.json['data']['attributes']['tags'][0] == 'new-tag'
with assert_latest_log(NodeLog.TAG_REMOVED, registration_public), assert_latest_log(NodeLog.TAG_ADDED, registration_public, 1):
# Ensure removing and adding tag data is correct from the PATCH
# response
res = app.patch_json_api(
url_registration_public,
{
'data': {
'id': registration_public._id,
'type': 'registrations',
'attributes': {'tags': ['newer-tag']}
}
}, auth=user_admin.auth)
assert res.status_code == 200
assert len(res.json['data']['attributes']['tags']) == 1
assert res.json['data']['attributes']['tags'][0] == 'newer-tag'
with assert_latest_log(NodeLog.TAG_REMOVED, registration_public):
# Ensure removing tag data is correct from the PATCH response
res = app.patch_json_api(
url_registration_public,
{
'data': {
'id': registration_public._id,
'type': 'registrations',
'attributes': {'tags': []}
}
}, auth=user_admin.auth)
assert res.status_code == 200
assert len(res.json['data']['attributes']['tags']) == 0
def test_tags_for_withdrawn_registration(
self, app, registration_withdrawn, user_admin,
url_registration_withdrawn, new_tag_payload_withdrawn):
res = app.patch_json_api(
url_registration_withdrawn,
new_tag_payload_withdrawn,
auth=user_admin.auth,
expect_errors=True)
assert res.status_code == 409
assert res.json['errors'][0]['detail'] == 'Cannot add tags to withdrawn registrations.'
res = app.patch_json_api(
url_registration_withdrawn,
{
'data': {
'id': registration_withdrawn._id,
'type': 'registrations',
'attributes': {'tags': []}
}
},
auth=user_admin.auth,
expect_errors=True)
assert res.status_code == 409
assert res.json['errors'][0]['detail'] == 'Cannot remove tags of withdrawn registrations.'
|
# Generated by Django 2.0.6 on 2018-08-06 11:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('daily_dockets', '0008_auto_20180713_0913'),
]
operations = [
migrations.AlterField(
model_name='docket',
name='docket_day',
field=models.CharField(choices=[('Monday', 'Monday'), ('Tuesday', 'Tuesday'), ('Wednesday', 'Wednesday'), ('Thursday', 'Thursday'), ('Friday', 'Friday'), ('Saturday', 'Saturday'), ('Sunday', 'Sunday')], default='Monday', max_length=20),
),
]
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Attaches an existing disk resource to an instance"
class Input:
INSTANCE = "instance"
SOURCE = "source"
ZONE = "zone"
class Output:
CLIENTOPERATIONID = "clientOperationId"
DESCRIPTION = "description"
ENDTIME = "endTime"
ERROR = "error"
HTTPERRORMESSAGE = "httpErrorMessage"
HTTPERRORSTATUSCODE = "httpErrorStatusCode"
ID = "id"
INSERTTIME = "insertTime"
KIND = "kind"
NAME = "name"
OPERATIONTYPE = "operationType"
PROGRESS = "progress"
REGION = "region"
SELFLINK = "selfLink"
STARTTIME = "startTime"
STATUS = "status"
STATUSMESSAGE = "statusMessage"
TARGETID = "targetId"
TARGETLINK = "targetLink"
USER = "user"
WARNINGS = "warnings"
ZONE = "zone"
class DiskAttachInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"instance": {
"type": "string",
"title": "Name Instance",
"description": "Name of the instance resource to stop",
"order": 2
},
"source": {
"type": "string",
"title": "Source",
"description": "Valid partial or full URL to an existing persistent disk resource (e.g. projects/my-project-171212/zones/us-central1-c/disks/new-disk)",
"order": 3
},
"zone": {
"type": "string",
"title": "Zone",
"description": "The name of the zone for this request",
"order": 1
}
},
"required": [
"instance",
"source",
"zone"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class DiskAttachOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"clientOperationId": {
"type": "string",
"title": "Client OperationId",
"description": "Reserved for future use",
"order": 14
},
"description": {
"type": "string",
"title": "Description",
"description": "A textual description of the operation, which is set when the operation is created",
"order": 4
},
"endTime": {
"type": "string",
"title": "End Time",
"description": "The time that this operation was completed",
"order": 22
},
"error": {
"$ref": "#/definitions/error",
"title": "Error",
"description": "If errors are generated during processing of the operation, this field will be populated",
"order": 21
},
"httpErrorMessage": {
"type": "string",
"title": "HTTP Error Message",
"description": "If the operation fails, this field contains the HTTP error message that was returned",
"order": 15
},
"httpErrorStatusCode": {
"type": "integer",
"title": "HTTP Error Status Code",
"description": "If the operation fails, this field contains the HTTP error status code that was returned",
"order": 16
},
"id": {
"type": "string",
"title": "ID",
"description": "The unique identifier for the resource. This identifier is defined by the server",
"order": 1
},
"insertTime": {
"type": "string",
"title": "Insert Time",
"description": "The time that this operation was requested",
"order": 5
},
"kind": {
"type": "string",
"title": "Kind",
"description": "Type of the resource. Always compute#attachedDisk for operation resources",
"order": 2
},
"name": {
"type": "string",
"title": "Name",
"description": "Name of the resource",
"order": 3
},
"operationType": {
"type": "string",
"title": "Operation Type",
"description": "The type of operation, such as insert, update, or delete, and so on",
"order": 17
},
"progress": {
"type": "integer",
"title": "Progress",
"description": "An optional progress indicator that ranges from 0 to 100",
"order": 6
},
"region": {
"type": "string",
"title": "Region",
"description": "The URL of the region where the operation resides",
"order": 20
},
"selfLink": {
"type": "string",
"title": "Self Link",
"description": "Server-defined URL for the resource",
"order": 9
},
"startTime": {
"type": "string",
"title": "Start Time",
"description": "The time that this operation was started by the server",
"order": 19
},
"status": {
"type": "string",
"title": "Status",
"description": "The status of the operation, which can be one of the following: pending, running, or done",
"order": 12
},
"statusMessage": {
"type": "string",
"title": "Status Message",
"description": "An optional textual description of the current status of the operation",
"order": 13
},
"targetId": {
"type": "string",
"title": "TargetID",
"description": "The unique targetID, which identifies a specific incarnation of the target resource",
"order": 8
},
"targetLink": {
"type": "string",
"title": "Target Link",
"description": "The URL of the resource that the operation modifies",
"order": 7
},
"user": {
"type": "string",
"title": "User",
"description": "User who requested the operation",
"order": 10
},
"warnings": {
"type": "array",
"title": "Warnings",
"description": "Warning messages",
"items": {
"$ref": "#/definitions/warnings"
},
"order": 18
},
"zone": {
"type": "string",
"title": "Zone",
"description": "The URL of the zone where the operation resides. Only available when performing per-zone operations",
"order": 11
}
},
"definitions": {
"data": {
"type": "object",
"title": "data",
"properties": {
"key": {
"type": "string",
"title": "Key",
"order": 1
},
"value": {
"type": "string",
"title": "Value",
"order": 2
}
}
},
"error": {
"type": "object",
"title": "error",
"properties": {
"errors": {
"type": "array",
"title": "Errors",
"items": {
"$ref": "#/definitions/errors"
},
"order": 1
}
},
"definitions": {
"errors": {
"type": "object",
"title": "errors",
"properties": {
"code": {
"type": "string",
"title": "Code",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
}
}
},
"errors": {
"type": "object",
"title": "errors",
"properties": {
"code": {
"type": "string",
"title": "Code",
"order": 1
},
"location": {
"type": "string",
"title": "Location",
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
}
},
"warnings": {
"type": "object",
"title": "warnings",
"properties": {
"code": {
"type": "string",
"title": "Code",
"order": 1
},
"data": {
"type": "array",
"title": "Data",
"items": {
"$ref": "#/definitions/data"
},
"order": 2
},
"message": {
"type": "string",
"title": "Message",
"order": 3
}
},
"definitions": {
"data": {
"type": "object",
"title": "data",
"properties": {
"key": {
"type": "string",
"title": "Key",
"order": 1
},
"value": {
"type": "string",
"title": "Value",
"order": 2
}
}
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
|
module.exports = {
globalSetup: './setup.js',
globalTeardown: './teardown.js',
testEnvironment: './puppeteer_environment.js'
}
|
from setuptools import setup
from setuptools import find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
with open("requirements.txt", "r") as fh:
requirements = [line.strip() for line in fh]
setup(
name="file-ops",
version='0.0.0',
license='MIT',
author='M. H. Bani-Hashemian',
author_email='hossein.banihashemian@alumni.ethz.ch',
description='Efficient information extraction from large files.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/seyedb/file-ops',
package_dir={'': 'src'},
packages=find_packages('src'),
test_suite='tests',
classifiers=[
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
keywords=[],
python_requires='>=3.6',
install_requires=requirements
)
|
r"""
Emperor 3D PCoA viewer (:mod:`emperor.core`)
============================================
This module provides an Object to interact and visualize an Emperor plot
from the IPython notebook.
.. currentmodule:: emperor.core
Classes
-------
.. autosummary::
:toctree: generated/
Emperor
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, emperor development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE.md, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import division
from copy import deepcopy
from os.path import join, basename
from distutils.dir_util import copy_tree
import warnings
import numpy as np
import pandas as pd
from jinja2 import FileSystemLoader
from jinja2.environment import Environment
from skbio import OrdinationResults
from emperor import __version__ as emperor_version
from emperor.util import (get_emperor_support_files_dir,
preprocess_coords_file, resolve_stable_url,
validate_and_process_custom_axes, EmperorWarning)
# we are going to use this remote location to load external resources
REMOTE_URL = ('https://cdn.rawgit.com/biocore/emperor/%s/emperor'
'/support_files')
LOCAL_URL = "/nbextensions/emperor/support_files"
STYLE_PATH = join(get_emperor_support_files_dir(), 'templates',
'style-template.html')
LOGIC_PATH = join(get_emperor_support_files_dir(), 'templates',
'logic-template.html')
STANDALONE_PATH = join(get_emperor_support_files_dir(), 'templates',
'standalone-template.html')
JUPYTER_PATH = join(get_emperor_support_files_dir(), 'templates',
'jupyter-template.html')
class Emperor(object):
"""Display principal coordinates analysis plots
Use this object to interactively display a PCoA plot using the Emperor
GUI. IPython provides a rich display system that will let you display a
plot inline, without the need of creating a temprorary file or having to
write to disk.
Parameters
----------
ordination: skbio.OrdinationResults
Object containing the computed values for an ordination method in
scikit-bio. Currently supports skbio.stats.ordination.PCoA and
skbio.stats.ordination.RDA results.
mapping_file: pd.DataFrame
DataFrame object with the metadata associated to the samples in the
``ordination`` object, should have an index set and it should match the
identifiers in the ``ordination`` object.
feature_mapping_file: pd.DataFrame, optional
DataFrame object with the metadata associated to the features in the
``ordination`` object, should have an index set and it should match the
identifiers in the ``ordination.features`` object.
dimensions: int, optional
Number of dimensions to keep from the ordination data, defaults to 5.
Be aware that this value will determine the number of dimensions for
all computations.
remote: bool or str, optional
This parameter can have one of the following three behaviors according
to the value: (1) ``str`` - load the resources from a user-specified
remote location, (2) ``False`` - load the resources from the
nbextensions folder in the Jupyter installation or (3) ``True`` - load
the resources from the GitHub repository. This parameter defaults to
``True``. See the Notes section for more information.
jackknifed: list of OrdinationResults, optional
A list of the OrdinationResults objects with the same sample
identifiers as the identifiers in ``ordination``.
procrustes: list of OrdinationResults, optional
A list of the OrdinationResults objects with the same sample
identifiers as the identifiers in ``ordination``.
ignore_missing_samples: bool, optional
If set to `True` samples without metadata are included by setting all
metadata values to: ``This sample has not metadata``. By default an
exception will be raised if missing samples are encountered. Note, this
flag only takes effect if there's at least one overlapping sample.
Attributes
----------
jackknifed: list
List of OrdinationResults objects in the same sample-order as
``self.ordination``.
procrustes: list
List of OrdinationResults objects in the same sample-order as
``self.ordination``.
procrustes_names: list
A list of names that will be used to distinguish samples from each
ordination in a procrustes plot. The GUI will display a category
labeled ``__Procrustes_Names__``.
width: str
Width of the plot when displayed in the Jupyter notebook (in CSS
units).
height: str
Height of the plot when displayed in the Jupyter notebook (in CSS
units).
settings: dict
A dictionary of settings that is loaded when a plot is displayed.
Settings generated from the graphical user interface are stored as JSON
files that can be loaded, and directly set to this attribute.
Alternatively, each aspect of the plot can be changed with dedicated
methods, for example see ``color_by``, ``set_background_color``, etc.
This attribute can also be serialized as a JSON string and loaded from
the GUI.
feature_mf: pd.DataFrame
DataFrame object with the metadata associated to the features in the
``ordination`` object, should have an index set and it should match the
identifiers in the ``ordination.features`` property.
custom_axes : list of str, optional
Custom axes to embed in the ordination.
jackknifing_method : {'IQR', 'sdev'}, optional
Used only when plotting ellipsoids for jackknifed beta diversity
(i.e. using a directory of coord files instead of a single coord
file). Valid values are ``"IQR"`` (for inter-quartile ranges) and
``"sdev"`` (for standard deviation). This argument is ignored if
``self.jackknifed`` is ``None`` or an empty list.
Examples
--------
Create an Emperor object and display it from the Jupyter notebook:
>>> import pandas as pd, numpy as np
>>> from emperor import Emperor
>>> from skbio import OrdinationResults
Ordination plots are almost invariantly associated with a set of data, that
relates each sample to its scientific context, we refer to this as the
*sample metadata*, and represent it using Pandas DataFrames. For this
example we will need some metadata, we start by creating our metadata
object:
>>> data = [['PC.354', 'Control', '20061218', 'Control_mouse_I.D._354'],
... ['PC.355', 'Control', '20061218', 'Control_mouse_I.D._355'],
... ['PC.356', 'Control', '20061126', 'Control_mouse_I.D._356'],
... ['PC.481', 'Control', '20070314', 'Control_mouse_I.D._481'],
... ['PC.593', 'Control', '20071210', 'Control_mouse_I.D._593'],
... ['PC.607', 'Fast', '20071112', 'Fasting_mouse_I.D._607'],
... ['PC.634', 'Fast', '20080116', 'Fasting_mouse_I.D._634'],
... ['PC.635', 'Fast', '20080116', 'Fasting_mouse_I.D._635'],
... ['PC.636', 'Fast', '20080116', 'Fasting_mouse_I.D._636']]
>>> columns = ['SampleID', 'Treatment', 'DOB', 'Description']
>>> mf = pd.DataFrame(columns=columns, data=data)
Before we can use this mapping file in Emperor, we should set the index
to be `SampleID`.
>>> mf.set_index('SampleID', inplace=True)
Then let's create some artificial ordination data:
>>> ids = ('PC.636', 'PC.635', 'PC.356', 'PC.481', 'PC.354', 'PC.593',
... 'PC.355', 'PC.607', 'PC.634')
>>> eigvals = np.array([0.47941212, 0.29201496, 0.24744925,
... 0.20149607, 0.18007613, 0.14780677,
... 0.13579593, 0.1122597, 0.])
>>> eigvals = pd.Series(data=eigvals, index=ids)
>>> n = eigvals.shape[0]
>>> samples = np.random.randn(n, n)
>>> samples = pd.DataFrame(data=site, index=ids)
>>> p_explained = np.array([0.26688705, 0.1625637, 0.13775413, 0.11217216,
... 0.10024775, 0.08228351, 0.07559712, 0.06249458,
... 0.])
>>> p_explained = pd.Series(data=p_explained, index=ids)
And encapsulate it inside an ``OrdinationResults`` object:
>>> ores = OrdinationResults(eigvals, samples=samples,
... proportion_explained=p_explained)
Finally import the Emperor object and display it using Jupyter, note that
this call will have no effect under a regular Python session:
>>> Emperor(ores, mf)
Notes
-----
This object currently does not support the full range of actions that the
GUI does support and should be considered experimental at the moment.
The ``remote`` parameter is intended for different use-cases, you should
use the first option "(1) - URL" when you want to load the data from a
location different than the GitHub repository or your Jupyter notebook
resources i.e. a custom URL. The second option "(2) - ``False``" loads
resources from your local Jupyter installation, note that you **need** to
execute ``nbinstall`` at least once or the application will error, this
option is ideal for developers modifying the JavaScript source code, and in
environments of limited internet connection. Finally, the third option "(3)
- ``True``" should be used if you intend to embed an Emperor plot in a
notebook and then publish it using http://nbviewer.jupyter.org.
Raises
------
ValueError
If the remote argument is not of ``bool`` or ``str`` type.
If none of the samples in the ordination matrix are in the metadata.
If the data is one-dimensional.
KeyError
If there's samples in the ordination matrix but not in the metadata.
References
----------
.. [1] EMPeror: a tool for visualizing high-throughput microbial community
data Vazquez-Baeza Y, Pirrung M, Gonzalez A, Knight R. Gigascience.
2013 Nov 26;2(1):16.
"""
def __init__(self, ordination, mapping_file, feature_mapping_file=None,
dimensions=5, remote=True, jackknifed=None, procrustes=None,
ignore_missing_samples=False):
if ordination.samples.shape[1] < 2:
raise ValueError('Ordinations with less than two dimensions are'
' not supported.')
self.ordination = ordination
self.jackknifed = jackknifed if jackknifed is not None else []
self.procrustes = procrustes if procrustes is not None else []
self.mf = mapping_file.copy()
self.mf = self._validate_metadata(self.mf, self.ordination.samples,
ignore_missing_samples)
# if biplots are to be visualized
if self.ordination.features is not None:
self.feature_mf = \
self._validate_metadata(feature_mapping_file,
self.ordination.features,
ignore_missing_samples=False)
self._validate_ordinations()
self._html = None
if self.ordination.proportion_explained.shape[0] < dimensions:
self.dimensions = self.ordination.proportion_explained.shape[0]
else:
self.dimensions = dimensions
if isinstance(remote, bool):
if remote:
self.base_url = resolve_stable_url(emperor_version,
REMOTE_URL)
else:
self.base_url = LOCAL_URL
elif isinstance(remote, str):
self.base_url = remote
else:
raise ValueError("Unsupported type for `remote` argument, should "
"be a bool or str")
# dimensions for the div containing the plot in the context of the
# Jupyter notebook, can be a "percent" or "number of pixels".
self.width = '100%'
self.height = '500px'
self._settings = {}
self.custom_axes = []
# label each ordination by index
self.procrustes_names = []
self.jackknifing_method = 'IQR'
if self.procrustes:
self.procrustes_names = ['Ordination %d' % i
for i in range(len(self.procrustes) + 1)]
def __str__(self):
return self.make_emperor()
def _repr_html_(self):
"""Used to display a plot in the Jupyter notebook"""
# we import here as IPython shouldn't be a dependency of Emperor
# however if this method is called it will be from an IPython notebook
# otherwise the developer is responsible for calling this method
from IPython.display import display, HTML
return display(HTML(str(self)))
def _validate_metadata(self, metadata, matrix, ignore_missing_samples):
# metadata is optional for biplots, so we just create an empty table
if metadata is None:
metadata = pd.DataFrame(index=pd.Index(matrix.index, name='id'))
metadata['all'] = 'All objects'
return metadata
ordination_samples = set(matrix.index)
difference = ordination_samples - set(metadata.index)
if difference == ordination_samples:
raise ValueError('None of the sample identifiers match between the'
' metadata and the coordinates. Verify that you '
'are using metadata and coordinates corresponding'
' to the same dataset.')
if difference and not ignore_missing_samples:
raise KeyError("There are samples not included in the mapping "
"file. Override this error by using the "
"`ignore_missing_samples` argument. Offending "
"samples: %s"
% ', '.join(sorted([str(i) for i in difference])))
elif difference and ignore_missing_samples:
warnings.warn("%d out of %d samples have no metadata and are being"
" included with a placeholder value." %
(len(difference), len(ordination_samples)),
EmperorWarning)
# pad the missing samples
data = np.full((len(difference), metadata.shape[1]),
'This sample has no metadata', dtype='<U27')
pad = pd.DataFrame(index=difference, columns=self.mf.columns,
data=data)
metadata = pd.concat([metadata, pad])
# filter all metadata that we may have for which we don't have any
# coordinates this also ensures that the coordinates are in the
# same order as the metadata
metadata = metadata.loc[matrix.index]
return metadata
def _validate_ordinations(self):
# bail if the value is non or an empty list
if self.jackknifed == [] and self.procrustes == []:
return
# error if the user tries to create a jackknifed procrustes plot
if len(self.jackknifed) > 0 and len(self.procrustes) > 0:
raise ValueError('Cannot plot a procrustes and a jackknifed plot')
ordinations = self.jackknifed if self.jackknifed else self.procrustes
ok = all([isinstance(j, OrdinationResults) for j in ordinations])
if not ok:
raise TypeError('All elements in the jackknifed array should be '
'OrdinationResults instances.')
master_ids = self.ordination.samples.index
master = set(self.ordination.samples.index)
aligned = []
for i, ord_res in enumerate(ordinations):
other = set(ord_res.samples.index)
# samples must be represented identically
if master != other:
raise ValueError('The ordination at index (%d) does not '
'represent the exact same samples. Mismatches'
' are: %s.' % (i, ', '.join(master - other)))
# we need to ensure the copy we have is aligned one-to-one with the
# *master* ordination, making copies might be inefficient for large
# datasets
ord_res.samples = ord_res.samples.loc[master_ids].copy()
aligned.append(ord_res)
# need to test this carefully i.e. that when one is set the other one
# doesn't have anything or is none
if self.jackknifed:
self.jackknifed = aligned
elif self.procrustes:
self.procrustes = aligned
def copy_support_files(self, target=None):
"""Copies the support files to a target directory
Parameters
----------
target : str
The path where resources should be copied to. By default it copies
the files to ``self.base_url``.
"""
if target is None:
target = self.base_url
# copy the required resources
copy_tree(get_emperor_support_files_dir(), target)
def make_emperor(self, standalone=False):
"""Build an emperor plot
Parameters
----------
standalone : bool
Whether or not the produced plot should be a standalone HTML file.
Returns
-------
str
Formatted emperor plot.
Raises
------
KeyError
If one or more of the ``custom_axes`` names are not present in the
sample information.
ValueError
If any of the ``custom_axes`` have non-numeric values.
Notes
-----
The ``standalone`` argument is intended for the different use-cases
that Emperor can have, either as an embedded widget that lives inside,
for example, the Jupyter notebook, or alternatively as an HTML file
that refers to resources locally. In this case you will need to copy
the support files by calling the ``copy_support_files`` method.
See Also
--------
emperor.core.Emperor.copy_support_files
"""
main_template = self._get_template(standalone)
# _process_data does a lot of munging to the coordinates data and
# _to_dict puts the data into a dictionary-like object for consumption
data = self._to_dict(self._process_data(self.custom_axes,
self.jackknifing_method))
# yes, we could have used UUID, but we couldn't find an easier way to
# test that deterministically and with this approach we can seed the
# random number generator and test accordingly
plot_id = 'emperor-notebook-' + str(hex(np.random.randint(2**32)))
# need to do something about low and high
plot = main_template.render(data=data, plot_id=plot_id,
logic_template_path=basename(LOGIC_PATH),
style_template_path=basename(STYLE_PATH),
base_url=self.base_url,
width=self.width,
height=self.height)
return plot
def _to_dict(self, data):
"""Convert processed data into a dictionary of decompositions
Parameters
----------
data : tuple
The output of _process_data. Should contain information about the
scatter plot and the biplot.
Returns
-------
dict
A dictionary describing the plots contained in the ordination
object and the sample + feature metadata.
"""
# data is a tuple as returned by _process_data
(coord_ids, coords, pct_var, ci,
headers, metadata, names,
edges,
bi_coords, bi_ids,
bi_headers, bi_metadata) = data
data = {
'plot': {
'decomposition': {
'sample_ids': coord_ids,
'coordinates': coords,
'axes_names': names,
'percents_explained': pct_var,
'ci': ci,
'edges': edges
},
'type': 'scatter',
'metadata_headers': headers,
'metadata': metadata,
'settings': self.settings,
}
}
# we can rely on the fact that the dictionary above will exist
if self.ordination.features is not None:
data['biplot'] = deepcopy(data['plot'])
data['biplot']['decomposition']['ci'] = []
data['biplot']['decomposition']['edges'] = []
data['biplot']['type'] = 'arrow'
data['biplot']['settings'] = None
data['biplot']['metadata'] = bi_metadata
data['biplot']['metadata_headers'] = bi_headers
data['biplot']['decomposition']['sample_ids'] = bi_ids
data['biplot']['decomposition']['coordinates'] = bi_coords
return data
def _get_template(self, standalone=False):
"""Get the jinja template object
Parameters
----------
standalone: bool, optional
Whether or not the generated plot will load resources locally
(``True``), or from a specified URL (``False``).
Returns
-------
jinja2.Template
Template where the plot is created.
"""
# based on: http://stackoverflow.com/a/6196098
loader = FileSystemLoader(join(get_emperor_support_files_dir(),
'templates'))
if standalone:
main_path = basename(STANDALONE_PATH)
else:
main_path = basename(JUPYTER_PATH)
env = Environment(loader=loader)
return env.get_template(main_path)
def _process_data(self, custom_axes, jackknifing_method):
"""Handle the coordinates data
Parameters
----------
custom_axes : list of str, optional
Custom axes to embed in the ordination.
jackknifing_method : {'IQR', 'sdef'}, optional
Used only when plotting ellipsoids for jackknifed beta diversity
(i.e. using a directory of coord files instead of a single coord
file). Valid values are ``"IQR"`` (for inter-quartile ranges) and
``"sdev"`` (for standard deviation). This argument is ignored if
``self.jackknifed`` is ``None`` or an empty list.
Returns
-------
list of str
Sample identifiers in the ordination.
list of lists of floats
Matrix of coordinates in the ordination data with custom_axes if
provided.
list of float
either the eigenvalues of the input coordinates or the average
eigenvalues of the multiple coords that were passed in
list of lists floats
coordinates representing the span of each ellipse on every axis;
None if no jackknifing is applied
list of str
Name of the metadata columns and the index name.
list of lists of str
Data in ``mf``.
list of str
Names of the dimensions in the resulting ordination.
list of list of str
An edge list for procrustes plots
list of list of float
Arrow locations for the biplots.
list of str
Arrow identifiers for biplots.
list of str
Header names for biplot metadata.
list of list of str
Metadata for the biplots.
Notes
-----
This method is exercised by testing the ``make_emperor`` method, and is
not intended to be used by end-users.
"""
if (self.jackknifed or self.procrustes) and len(custom_axes) > 1:
raise ValueError("Jackknifed and Procrustes plots are limited to "
"one custom axis.")
# turn modern data into legacy data
dims = self.dimensions
ci = None
bi_coords, bi_ids, bi_headers, bi_metadata = None, None, None, None
c_headers, c_data, c_eigenvals, c_pct, edges = [], [], [], [], []
ordinations = []
if self.jackknifed or self.procrustes:
ordinations = [self.ordination] + self.procrustes + self.jackknifed
for data in ordinations:
c_headers.append(data.samples.index.tolist())
coords = data.samples.values[:, :dims]
c_data.append(coords / np.max(np.abs(coords)))
c_eigenvals.append(data.eigvals.values[:dims])
c_pct.append(data.proportion_explained[:dims] * 100)
else:
data = self.ordination
c_headers = data.samples.index.tolist()
coords = self.ordination.samples.values[:, :dims]
c_data = (coords / np.max(np.abs(coords)))
c_eigenvals = data.eigvals.values[:dims]
c_pct = data.proportion_explained[:dims] * 100
# repeats is only dependant on procrustes
headers, metadata = self._to_legacy_map(self.mf, custom_axes,
len(self.procrustes))
# make an edge list for the procrustes plot
if self.procrustes:
for i in range(len(self.procrustes)):
for sample in self.mf.index:
edges.append([sample + '_0', sample + '_%d' % (i + 1)])
c_headers, c_data, _, c_pct, low, high, _ = \
preprocess_coords_file(c_headers, c_data, c_eigenvals, c_pct,
headers, metadata, custom_axes,
jackknifing_method,
is_comparison=bool(self.procrustes))
names = self.ordination.samples.columns[:dims].values.tolist()
c_pct = c_pct.tolist()
if custom_axes:
names = custom_axes + names
c_pct = ([-1] * len(custom_axes)) + c_pct
if low is not None or high is not None:
ci = np.abs(high - low).tolist()
if self.ordination.features is not None:
bi_coords = self.ordination.features.values[:, :dims]
bi_coords = bi_coords / np.max(np.abs(bi_coords))
bi_coords = bi_coords.tolist()
bi_ids = self.ordination.features.index.values.tolist()
bi_headers, bi_metadata = self._to_legacy_map(self.feature_mf)
return (c_headers, c_data.tolist(),
c_pct, ci, headers, metadata, names,
edges,
bi_coords, bi_ids,
bi_headers, bi_metadata)
def _to_legacy_map(self, mf, custom_axes=None, repeats=0):
"""Helper method to convert Pandas dataframe to legacy QIIME structure
Parameters
----------
mf : pd.DataFrame
DataFrame with the metadata, this can be feature or sample
metadata. If the index name is ``None``, then it will be set as
``'SampleID'``, otherwise it will be left untouched.
custom_axes : list of str, optional
Custom axes to embed in the ordination.
repeats : int
Number of times that the sample ids should be repeated. This is
used exclusively for procrustes plots. If the procrustes_names
property is available a column will be added with each procrustes
name.
Returns
-------
list of str
Name of the metadata columns and the index name.
list of list of str
Data in ``mf``.
"""
# there's a bug in old versions of Pandas that won't allow us to rename
# a DataFrame's index, newer versions i.e 0.18 work just fine but 0.14
# would overwrite the name and simply set it as None
if mf.index.name is None:
index_name = 'SampleID'
else:
index_name = mf.index.name
if custom_axes:
mf = validate_and_process_custom_axes(mf, custom_axes)
if repeats:
mfs = []
# repeats and the original
for i in range(repeats + 1):
mfs.append(mf.copy())
mfs[i].index = pd.Index(mfs[i].index + '_%d' % i,
name=mfs[i].index.name)
# add to be able to differentiate between ordinations
if self.procrustes_names:
mfs[i]['__Procrustes_Names__'] = self.procrustes_names[i]
mf = pd.concat(mfs)
# create a list of lists representation for the entire dataframe
headers = [index_name] + mf.columns.astype(str).tolist()
metadata = mf.reset_index().astype(str).values.tolist()
return headers, metadata
def _base_data_checks(self, category, data, d_type):
"""Perform common checks in the methods that modify the plot
Parameters
----------
category: str
The metadata category used for this attribute.
data: dict or pd.Series
Mapping of metadata value to attribute.
d_type: object
The required type in the ``data`` mappings.
Returns
-------
dict
Validated and consumable dictionary of attribute mappings.
"""
if not isinstance(category, str):
raise TypeError('Metadata category must be a string')
if category not in self.mf.columns:
raise KeyError('The category %s is not present in your metadata' %
category)
if isinstance(data, pd.Series):
data = data.to_dict()
# if no data is provide just return an empty dictionary
if data is None or not data:
return {}
present = set(self.mf[category].value_counts().index)
given = set(data.keys())
if present != given:
if present.issubset(given):
raise ValueError('More categories present in the provided '
'data, the following categories were '
'not found in the metadata: %s.' %
', '.join(given - present))
elif given.issubset(present):
raise ValueError('The following categories are not present'
' in the provided data: %s' %
', '.join(present - given))
# isinstance won't recognize numpy dtypes that are still valid
if not all(np.issubdtype(type(v), d_type) for v in data.values()):
raise TypeError('Values in the provided data must be '
'of %s' % d_type)
return data
def color_by(self, category, colors=None, colormap=None, continuous=False):
"""Set the coloring settings for the plot elements
Parameters
----------
category: str
Name of the metadata column.
colors: dict or pd.Series, optional
Mapping of categories to a CSS color attribute. Defaults to the
colors described by ``colormap``.
colormap: str, optional
Name of the colormap to use. Supports continuous and discrete
colormaps, see the notes section. Defaults to QIIME's discrete
colorscheme.
continuous: bool, optional
Whether or not the ``category`` should be interpreted as numeric.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``category`` is not part of the metadata.
TypeError
If ``category`` is not a string.
ValueError
If ``colors`` describes fewer or more categories than the ones
present in the ``category`` column.
If ``colors`` has colors in a non-string format.
Notes
-----
Valid colormaps are listed below (under the `Code` column), for
examples see [1]_ or [2]_.
+----------+---------------------+------------+
| Code | Name | Type |
+==========+=====================+============+
| Paired | Paired | Discrete |
+----------+---------------------+------------+
| Accent | Accent | Discrete |
+----------+---------------------+------------+
| Dark2 | Dark | Discrete |
+----------+---------------------+------------+
| Set1 | Set1 | Discrete |
+----------+---------------------+------------+
| Set2 | Set2 | Discrete |
+----------+---------------------+------------+
| Set3 | Set3 | Discrete |
+----------+---------------------+------------+
| Pastel1 | Pastel1 | Discrete |
+----------+---------------------+------------+
| Pastel2 | Pastel2 | Discrete |
+----------+---------------------+------------+
| Viridis | Viridis | Sequential |
+----------+---------------------+------------+
| Reds | Reds | Sequential |
+----------+---------------------+------------+
| RdPu | Red-Purple | Sequential |
+----------+---------------------+------------+
| Oranges | Oranges | Sequential |
+----------+---------------------+------------+
| OrRd | Orange-Red | Sequential |
+----------+---------------------+------------+
| YlOrBr | Yellow-Orange-Brown | Sequential |
+----------+---------------------+------------+
| YlOrRd | Yellow-Orange-Red | Sequential |
+----------+---------------------+------------+
| YlGn | Yellow-Green | Sequential |
+----------+---------------------+------------+
| YlGnBu | Yellow-Green-Blue | Sequential |
+----------+---------------------+------------+
| Greens | Greens | Sequential |
+----------+---------------------+------------+
| GnBu | Green-Blue | Sequential |
+----------+---------------------+------------+
| Blues | Blues | Sequential |
+----------+---------------------+------------+
| BuGn | Blue-Green | Sequential |
+----------+---------------------+------------+
| BuPu | Blue-Purple | Sequential |
+----------+---------------------+------------+
| Purples | Purples | Sequential |
+----------+---------------------+------------+
| PuRd | Purple-Red | Sequential |
+----------+---------------------+------------+
| PuBuGn | Purple-Blue-Green | Sequential |
+----------+---------------------+------------+
| Greys | Greys | Sequential |
+----------+---------------------+------------+
| Spectral | Spectral | Diverging |
+----------+---------------------+------------+
| RdBu | Red-Blue | Diverging |
+----------+---------------------+------------+
| RdYlGn | Red-Yellow-Green | Diverging |
+----------+---------------------+------------+
| RdYlB | Red-Yellow-Blue | Diverging |
+----------+---------------------+------------+
| RdGy | Red-Grey | Diverging |
+----------+---------------------+------------+
| PiYG | Pink-Yellow-Green | Diverging |
+----------+---------------------+------------+
| BrBG | Brown-Blue-Green | Diverging |
+----------+---------------------+------------+
| PuOr | Purple-Orange | Diverging |
+----------+---------------------+------------+
| PRGn | Purple-Green | Diverging |
+----------+---------------------+------------+
See Also
--------
emperor.core.Emperor.visibility_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.opacity_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
References
----------
.. [1] https://matplotlib.org/examples/color/colormaps_reference.html
.. [2] http://colorbrewer2.org/
"""
colors = self._base_data_checks(category, colors, str)
if colormap is None:
colormap = 'discrete-coloring-qiime'
elif not isinstance(colormap, str):
raise TypeError('The colormap argument must be a string')
self._settings.update({"color": {
"category": category,
"colormap": colormap,
"continuous": continuous,
"data": colors
}})
return self
def visibility_by(self, category, visibilities=None, negate=False):
"""Set the visibility settings for the plot elements
Parameters
----------
category: str
Name of the metadata column.
visibilities: dict, list or pd.Series, optional
When this argument is a ``dict`` or ``pd.Series``, it is a mapping
of categories to a boolean values determining whether or not that
category should be visible. When this argument is a ``list``, only
categories present will be visible in the plot.
negate: bool
Whether or not to negate the values in ``visibilities``.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``category`` is not part of the metadata.
TypeError
If ``category`` is not a string.
ValueError
If ``visibilities`` describes fewer or more categories than the
ones present in the ``category`` column.
If ``visibilities`` has visibilities in a non-string format.
See Also
--------
emperor.core.Emperor.color_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.opacity_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
"""
if isinstance(visibilities, list) and category in self.mf:
cats = self.mf[category].unique()
visibilities = {c: c in visibilities for c in cats}
visibilities = self._base_data_checks(category, visibilities, bool)
# negate visibilities using XOR
visibilities = {k: v ^ negate for k, v in visibilities.items()}
self._settings.update({"visibility": {
"category": category,
"data": visibilities
}})
return self
def scale_by(self, category, scales=None, global_scale=1.0, scaled=False):
"""Set the scaling settings for the plot elements
Parameters
----------
category: str
Name of the metadata column.
scales: dict or pd.Series, optional
Mapping of categories to numbers determining the size of the
elements in each category.
global_scale: int or float, optional
The size of all the elements.
scaled: bool
Whether or not the values in ``scales`` should be assumed to be
numeric and scaled in size according to their value.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``category`` is not part of the metadata.
TypeError
If ``category`` is not a string.
If ``global_scale`` is not a number.
If ``scaled`` is not a boolean value.
ValueError
If ``scales`` describes fewer or more categories than the ones
present in the ``category`` column.
If ``scales`` has sizes in a non-numeric format.
See Also
--------
emperor.core.Emperor.visibility_by
emperor.core.Emperor.color_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
"""
scales = self._base_data_checks(category, scales, float)
if (not isinstance(global_scale, (float, int)) or
isinstance(global_scale, bool)):
raise TypeError('The global scale argument must be a float or int')
if not isinstance(scaled, bool):
raise TypeError('The scaled argument must be a bool')
self._settings.update({"scale": {
"category": category,
"globalScale": str(global_scale),
"scaleVal": scaled,
"data": scales
}})
return self
def opacity_by(self, category, opacities=None, global_scale=1.0,
scaled=False):
"""Set the scaling settings for the plot elements
Parameters
----------
category: str
Name of the metadata column.
opacities: dict or pd.Series, optional
Mapping of categories to numbers determining the opacity of the
elements in each category.
global_scale: int or float, optional
The size of all the elements.
scaled: bool
Whether or not the values in ``opacities`` should be assumed to be
numeric and scaled in size according to their value.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``category`` is not part of the metadata.
TypeError
If ``category`` is not a string.
If ``global_scale`` is not a number.
If ``scaled`` is not a boolean value.
ValueError
If ``opacities`` describes fewer or more categories than the ones
present in the ``category`` column.
If ``opacities`` has sizes in a non-numeric format.
See Also
--------
emperor.core.Emperor.visibility_by
emperor.core.Emperor.color_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
"""
opacities = self._base_data_checks(category, opacities, float)
if (not isinstance(global_scale, (float, int)) or
isinstance(global_scale, bool)):
raise TypeError('The global scale argument must be a float or int')
if not isinstance(scaled, bool):
raise TypeError('The scaled argument must be a bool')
self._settings.update({"opacity": {
"category": category,
"globalScale": str(global_scale),
"scaleVal": scaled,
"data": opacities
}})
return self
def shape_by(self, category, shapes=None):
"""Set the shape settings for the plot elements
Parameters
----------
category: str
Name of the metadata column.
shapes: dict or pd.Series, optional
Mapping of categories to string values determining the shape of
the objects. See the notes for the valid options.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``category`` is not part of the metadata.
TypeError
If ``category`` is not a string.
ValueError
If ``shapes`` describes fewer or more categories than the
ones present in the ``category`` column.
If ``shapes`` has shapes in a non-string format.
Notes
-----
The valid shape names are ``"Sphere"``, ``"Cube"``, ``"Cone"``,
``"Icosahedron"`` and ``"Cylinder"``.
See Also
--------
emperor.core.Emperor.color_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.opacity_by
emperor.core.Emperor.visibility_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
"""
shapes = self._base_data_checks(category, shapes, str)
self._settings.update({"shape": {
"category": category,
"data": shapes
}})
return self
def animations_by(self, gradient, trajectory, colors, speed=1, radius=1):
"""Set the shape settings for the plot elements
Parameters
----------
gradient: str
Name of the metadata column that orders samples.
trajectory: str
Name of the metadata column that groups samples.
colors: dict or pd.Series
Mapping of trajectory categories to a CSS color attribute.
speed: float
How fast the animation should go.
radius: float
The radius of the animated traces.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
KeyError
If ``gradient`` or ``trajectory`` are not part of the metadata.
TypeError
If ``speed`` or ``radius`` are not numbers.
See Also
--------
emperor.core.Emperor.color_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.opacity_by
emperor.core.Emperor.visibility_by
emperor.core.Emperor.set_background_color
emperor.core.Emperor.set_axes
"""
if gradient not in self.mf.columns:
raise KeyError('The gradient category is not present in your '
'metadata')
if trajectory not in self.mf.columns:
raise KeyError('The trajectory category is not present in your '
'metadata')
if not isinstance(speed, (float, int)):
raise TypeError('Speed is not a number')
if not isinstance(radius, (float, int)):
raise TypeError('Radius is not a number')
colors = self._base_data_checks(trajectory, colors, str)
self._settings.update({"animations": {
"gradientCategory": gradient,
"trajectoryCategory": trajectory,
"speed": speed,
"radius": radius,
"colors": colors
}})
return self
def set_axes(self, visible=None, invert=None, color='white'):
"""Change visual aspects about visible dimensions in a plot
Parameters
----------
visible: list of thee ints, optional
List of three indices of the dimensions that will be visible.
invert: list of bools, optional
List of three bools that determine whether each axis is inverted or
not.
color: str
Color of the axes lines in the plot, should be a name or value in
CSS format.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Raises
------
ValueError
If the ``visible`` or ``invert`` arrays don't have exactly three
elements.
If the ``visible`` elements are out of range i.e. if an index is
not contained in the space defined by the dimensions property.
TypeError
If the indices in ``visible`` are not all integers.
If the values of ``invert`` are not all boolean.
If ``color`` is not a string.
Notes
-----
This method is internally coupled to the ``set_background_color``
method.
See Also
--------
emperor.core.Emperor.color_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.opacity_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.set_background_color
"""
if visible is None:
visible = [0, 1, 2]
if invert is None:
invert = [False, False, False]
if len(visible) != 3:
raise ValueError('Exactly three elements must be contained in the'
' visible array')
if len(invert) != 3:
raise ValueError('Exactly three elements must be contained in the'
' invert array')
if any([v >= self.dimensions or v < 0 for v in visible]):
raise ValueError('One or more of your visible dimensions are out '
'of range.')
# prevent obscure JavaScript errors by validating the data
if any([not isinstance(v, int) for v in visible]):
raise TypeError('All axes indices should be integers')
if any([not isinstance(i, bool) for i in invert]):
raise TypeError('The elements in the invert argument should all '
'be boolean')
if not isinstance(color, str):
raise TypeError('Colors should be a CSS color as a string')
# the background color and axes information are intertwined, so before
# updating the data, we need to retrieve the color if it exists
# see the code in set_background_color
bc = self.settings.get('axes', {}).get('backgroundColor', 'black')
self._settings.update({'axes': {
'visibleDimensions': visible,
'flippedAxes': invert,
'axesColor': color,
'backgroundColor': bc
}})
return self
def set_background_color(self, color='black'):
"""Changes the background color of the plot
Parameters
----------
color: str, optional
The background color. Color name or value in the CSS format.
Defaults to black.
Returns
-------
emperor.Emperor
Emperor object with updated settings.
Notes
-----
This method is tightly coupled to ``set_axes``.
Raises
------
TypeError
If the color is not a string.
See Also
--------
emperor.core.Emperor.color_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.scale_by
emperor.core.Emperor.shape_by
emperor.core.Emperor.set_axes
"""
if not isinstance(color, str):
raise TypeError('The background color has to be a string')
# the background color and axes information are intertwined, so before
# updating the data, we need to make sure we have other values present
# see the code in set_axes
if 'axes' not in self.settings:
self.set_axes()
self._settings["axes"]["backgroundColor"] = color
return self
@property
def settings(self):
"""Dictionary to load default settings from, when displaying a plot"""
return self._settings
@settings.setter
def settings(self, setts):
if setts is None:
del self.settings
return
for key, val in setts.items():
if key == 'shape':
self.shape_by(val['category'], val['data'])
elif key == 'visibility':
self.visibility_by(val['category'], val['data'])
elif key == 'scale':
self.scale_by(val['category'], val['data'],
float(val['globalScale']),
val['scaleVal'])
elif key == 'axes':
self.set_axes(val['visibleDimensions'], val['flippedAxes'],
val['axesColor'])
self.set_background_color(val['backgroundColor'])
elif key == 'color':
self.color_by(val['category'], val['data'], val['colormap'],
val['continuous'])
elif key == 'opacity':
self.opacity_by(val['category'], val['data'],
float(val['globalScale']), val['scaleVal'])
elif key == 'animations':
self.animations_by(val['gradientCategory'],
val['trajectoryCategory'], val['colors'],
val['speed'], val['radius'])
else:
raise KeyError('Unrecognized settings key: %s' % key)
@settings.deleter
def settings(self):
self._settings = {}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_import_schema_config tests the nagios config formatter
copyright: 2015, (c) sproutsocial.com
author: Nicholas Flink <nicholas@sproutsocial.com>
"""
# This script requires the following packages to be installed:
# mock==1.0.1
# PyYAML==3.11
import import_schema_config
import logging
import os
import unittest
logging.basicConfig(level=logging.CRITICAL)
logger = logging.getLogger(__name__)
class TestImportSchemaConfig(unittest.TestCase):
yamlDict = None
ldapUrl = "ldaps://ldap.example.com"
productManagersList = ["tom", "dick", "harry"]
def setUp(self):
"""use a custom yaml file
NOTE: the overrideYamlDictForTests should only be called from tests
"""
self.yamlDict = {
'mysql_users': {
'amysql.ip.example.com':
["'adeveloper'@'%'"]},
'mysql_schemas': {
'amysql.ip.example.com':
["bdb", "adb"],
'bmysql.ip.example.com':
["cdb"]},
}
yamlFile = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "import_schema.yaml")
self.importSchemaConfig = import_schema_config.ImportSchemaConfig(yamlFile)
self.importSchemaConfig.overrideYamlDictForTests(self.yamlDict)
self.importEmptySchemaConfig = import_schema_config.ImportSchemaConfig(yamlFile)
self.importEmptySchemaConfig.overrideYamlDictForTests("{}")
def test_getMysqlUsers(self):
"""tests the getMysqlUsers function
returns a valid dict of servers => users
"""
users = self.importSchemaConfig.getMysqlUsers()
self.assertItemsEqual(users, ["amysql.ip.example.com"])
self.assertItemsEqual(self.yamlDict['mysql_users'].keys(),
["amysql.ip.example.com"])
self.assertItemsEqual(self.yamlDict['mysql_users']['amysql.ip.example.com'],
["'adeveloper'@'%'"])
def test_getMysqlSchemas(self):
"""tests the getMysqlSchemas function
returns a valid dict of servers => dbs
"""
schemas = self.importSchemaConfig.getMysqlSchemas()
self.assertItemsEqual(schemas, ["amysql.ip.example.com", "bmysql.ip.example.com"])
self.assertItemsEqual(self.yamlDict['mysql_schemas'].keys(),
["amysql.ip.example.com", "bmysql.ip.example.com"])
self.assertItemsEqual(self.yamlDict['mysql_schemas']['amysql.ip.example.com'],
["bdb", "adb"])
self.assertItemsEqual(self.yamlDict['mysql_schemas']['bmysql.ip.example.com'],
["cdb"])
if __name__ == '__main__':
unittest.main()
|
/* eslint valid-jsdoc: "off" */
'use strict';
const path = require('path');
const isLocal = process.env.EGG_SERVER_ENV === 'local';
if (isLocal) {
require('dotenv').config({
path: path.join(__dirname, '..', '.env.local'),
});
}
/**
* @param {Egg.EggAppInfo} appInfo app info
*/
module.exports = appInfo => {
/**
* built-in config
* @type {Egg.EggAppConfig}
**/
const config = exports = {};
// use for cookie sign key, should change to your own and keep security
config.keys = appInfo.name + '_1580783791359_8688';
// add your middleware config here
config.middleware = [];
// add your user config here
const userConfig = {
// myAppName: 'egg',
deployUrl: 'http://127.0.0.1:7001/',
authRedirectUrl: 'http://localhost:9528/#/login',
sequelize: {
sync: true, // whether sync when app init
dialect: 'postgres',
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB_NAME,
username: process.env.DB_USER,
password: process.env.DB_PASSWORD,
},
redis: {
client: {
port: process.env.REDIS_PORT,
host: process.env.REDIS_HOST,
password: process.env.REDIS_PASSWORD,
db: 0,
},
},
security: {
csrf: {
enable: false,
},
},
cors: {
origin: '*',
allowMethods: 'GET,HEAD,PUT,POST,DELETE,PATCH',
},
authing: {
appId: process.env.AUTHING_APPID,
appSecret: process.env.AUTHING_APPSECRET,
},
jwt: {
secret: '123456',
},
};
return {
...config,
...userConfig,
};
};
|
import string
import os
from os import walk
from os.path import expanduser, isdir, dirname, join, sep
from kivy.utils import platform
def convert_bytes(num):
"""Convert bytes to MB.... GB... etc."""
for x in ["bytes", "KB", "MB", "GB", "TB"]:
if num < 1024.0:
return "%3.1f %s" % (num, x)
num /= 1024.0
def file_size(file_path):
"""Return the file size."""
if os.path.isfile(file_path):
file_info = os.stat(file_path)
return convert_bytes(file_info.st_size)
def get_access_string(path):
"""Return strind `rwx`."""
access_string = ""
access_data = {"r": os.R_OK, "w": os.W_OK, "x": os.X_OK}
for access in access_data.keys():
access_string += access if os.access(path, access_data[access]) else "-"
return access_string
def get_icon_for_treeview(path, ext, isdir):
icon_image = "file"
if isdir:
access_string = get_access_string(path)
if "r" not in access_string:
icon_image = "folder-lock"
else:
icon_image = "folder"
else:
if ext == ".py":
icon_image = "language-python"
return icon_image
def get_home_directory():
if platform == "win":
user_path = expanduser("~")
if not isdir(join(user_path, "Desktop")):
user_path = dirname(user_path)
else:
user_path = expanduser("~")
return user_path
def get_drives():
drives = []
if platform == "win":
from ctypes import windll, create_unicode_buffer
bitmask = windll.kernel32.GetLogicalDrives()
GetVolumeInformationW = windll.kernel32.GetVolumeInformationW
for letter in string.ascii_uppercase:
if bitmask & 1:
name = create_unicode_buffer(64)
# get name of the drive
drive = letter + ":"
res = GetVolumeInformationW(
drive + sep, name, 64, None, None, None, None, 0
)
if isdir(drive):
drives.append((drive, name.value))
bitmask >>= 1
elif platform == "linux":
drives.append((sep, sep))
drives.append((expanduser("~"), "~/"))
places = (sep + "mnt", sep + "media")
for place in places:
if isdir(place):
for directory in next(walk(place))[1]:
drives.append((place + sep + directory, directory))
elif platform == "macosx":
drives.append((expanduser("~"), "~/"))
vol = sep + "Volume"
if isdir(vol):
for drive in next(walk(vol))[1]:
drives.append((vol + sep + drive, drive))
return drives
|
describe("me.ObservableVector3d", function () {
var x = 1, y = 2, z = 3;
var a, b, c, d;
var _newX, _newY, _newZ, _oldX, _oldY, _oldZ;
var callback = function (newX, newY, newZ, oldX, oldY, oldZ) {
// this will also validate the argument list
_newX = newX;
_newY = newY;
_newZ = newZ;
_oldX = oldX;
_oldY = oldY;
_oldZ = oldZ;
};
var callback_with_ret = function (newX, newY, oldX, oldY) {
return {
x : 10,
y : 10,
z : 10
};
};
it("should be initialized to a (0, 0, 0) 3d vector", function () {
a = new me.ObservableVector3d(0, 0, 0, {
onUpdate : callback.bind(this)
});
b = new me.ObservableVector3d(x, 0, 0, {
onUpdate : callback.bind(this)
});
c = new me.ObservableVector3d(x, y, 0, {
onUpdate : callback.bind(this)
});
d = new me.ObservableVector3d(x, y, z, {
onUpdate : callback.bind(this)
});
expect(a.toString()).toEqual("x:0,y:0,z:0");
});
it("setting the vector triggers the callback", function () {
a.set(10, 100, 20);
expect(a.x + a.y + a.z).toEqual(_newX + _newY + _newZ);
});
it("callback returns a vector value", function () {
var d = new me.ObservableVector3d(0, 0, 0, {
onUpdate : callback_with_ret.bind(this)
});
d.set(100, 100, 100);
expect(d.x + d.y + d.z).toEqual(30); // 10 + 10 + 10
});
it("add a vector triggers the callback", function () {
a.add(new me.Vector3d(10, 10, 10));
expect(a.y).toEqual(_oldY + 10);
});
it("sub a vector triggers the callback", function () {
a.sub(new me.Vector3d(10, 10, 10));
expect(a.x).toEqual(_oldX - 10);
});
it("scale a vector triggers the callback", function () {
a.scaleV(new me.Vector3d(10, 10, 10));
expect(a.x).toEqual(_oldX * 10);
expect(a.y).toEqual(_oldY * 10);
expect(a.z).toEqual(_oldZ * 10);
});
it("negate (1, 2, 3)", function () {
a.set(x, y, z);
expect(a.negateSelf().toString()).toEqual("x:"+-x+",y:"+-y+",z:"+-z);
});
it("dotProduct (1, 2, 3) and (-1, -2, -3)", function () {
a.set(x, y, z);
b.set(-x, -y, -z);
// calculate the dot product
expect(a.dotProduct(b)).toEqual(-x*x-y*y-z*z);
});
it("length/lengthSqrt functions", function () {
a.set( x, 0, 0 );
b.set( 0, -y, 0 );
c.set( 0, 0, z );
d.set(0, 0, 0);
expect( a.length() ).toEqual(x);
expect( a.length2() ).toEqual(x*x);
expect( b.length() ).toEqual(y);
expect( b.length2() ).toEqual(y*y);
expect( c.length() ).toEqual(z);
expect( c.length2() ).toEqual(z*z);
expect( d.length() ).toEqual(0);
expect( d.length2() ).toEqual(0);
a.set( x, y, z );
expect( a.length() ).toEqual(Math.sqrt( x*x + y*y + z*z ));
expect( a.length2() ).toEqual(( x*x + y*y + z*z ));
});
it("lerp functions", function () {
a.set(x, 0, z);
b.set(0, -y, 0);
expect(a.clone().lerp(a, 0).equals(a.lerp(a, 0.5))).toEqual(true);
expect(a.clone().lerp(a, 0).equals(a.lerp(a, 1))).toEqual(true);
expect(a.clone().lerp(b, 0).equals(a)).toEqual(true);
expect(a.clone().lerp(b, 0.5).x).toEqual(x * 0.5);
expect(a.clone().lerp(b, 0.5).y).toEqual(-y * 0.5);
expect(a.clone().lerp(b, 0.5).z).toEqual(z * 0.5);
expect(a.clone().lerp(b, 1).equals(b)).toEqual(true);
});
it("normalize function", function () {
a.set( x, 0, 0 );
b.set( 0, -y, 0 );
c.set( 0, 0, z );
a.normalize();
expect( a.length()).toEqual(1);
expect( a.x ).toEqual(1);
b.normalize();
expect( b.length() ).toEqual(1);
expect( b.y ).toEqual(-1);
c.normalize();
expect( c.length() ).toEqual(1);
expect( c.z ).toEqual(1);
});
it("distance function", function () {
a.set( x, 0, 0 );
b.set( 0, -y, 0 );
c.set( 0, 0, z );
d.set(0, 0, 0);
expect( a.distance( d ) ).toEqual(x);
expect( b.distance( d ) ).toEqual(y);
expect( c.distance( d ) ).toEqual(z);
});
it( "min/max/clamp", function() {
a.set( x, y, z );
b.set( -x, -y, -z );
c.set( 0, 0, 0 );
c.copy( a ).minV( b );
expect( c.x ).toEqual(-x);
expect( c.y ).toEqual(-y);
expect( c.z ).toEqual(-z);
c.copy( a ).maxV( b );
expect( c.x ).toEqual(x);
expect( c.y ).toEqual(y);
expect( c.z ).toEqual(z);
c.set( -2*x, 2*x, 2*z );
c.clampSelf( -x, x );
expect( c.x ).toEqual(-x);
expect( c.y ).toEqual(x);
expect( c.z ).toEqual(x);
});
it( "ceil/floor", function() {
expect( a.set( -0.1, 0.1, 0.3 ).floorSelf().equals(new me.Vector3d( -1, 0, 0 ))).toEqual(true);
expect( a.set( -0.5, 0.5, 0.6 ).floorSelf().equals(new me.Vector3d( -1, 0, 0 ))).toEqual(true);
expect( a.set( -0.9, 0.9, 0.8 ).floorSelf().equals(new me.Vector3d( -1, 0, 0 ))).toEqual(true);
expect( a.set( -0.1, 0.1, 0.3 ).ceilSelf().equals(new me.Vector3d( 0, 1, 1 ))).toEqual(true);
expect( a.set( -0.5, 0.5, 0.6 ).ceilSelf().equals(new me.Vector3d( 0, 1, 1 ))).toEqual(true);
expect( a.set( -0.9, 0.9, 0.9 ).ceilSelf().equals(new me.Vector3d( 0, 1, 1 ))).toEqual(true);
});
it("project a on b", function () {
a.set(x, y, z);
b.set(-x, -y, -z);
// the following only works with (-)1, (-)2, (-)3 style of values
expect(a.project(b).equals(b)).toEqual(true);
});
it("angle between a and b", function () {
a.set( 0, -0.18851655680720186, 0.9820700116639124 );
b.set( 0, 0.18851655680720186, -0.9820700116639124 );
expect( a.angle( a ) ).toEqual(0);
expect( a.angle( b ) ).toEqual(Math.PI);
a.set(x, y, 0);
b.set(-x, -y, 0);
// why is this not perfectly 180 degrees ?
expect(Math.round(me.Math.radToDeg(a.angle(b)))).toEqual(180);
b.set(4*x, -y, 0);
expect(a.angle(b) ).toEqual(Math.PI / 2);
});
it("perp and rotate function", function () {
a.set(x, y, z);
b.copy(a).perp();
// perp rotate the vector by 90 degree clockwise on the z axis
c.copy(a).rotate(Math.PI/2);
expect(a.angle(b)).toEqual(a.angle(c));
});
});
|
import os
import unittest
from config import basedir
from app import app, db
from app.models import User
if __name__ == '__main__':
unittest.main()
|
# customize string representations of objects
class myColor():
def __init__(self):
self.red = 50
self.green = 75
self.blue = 100
# use getattr to dynamically return a value
def __getattr__(self, attr):
if attr == "rgbcolor":
return (self.red, self.green, self.blue)
elif attr == "hexcolor":
return "#{0:02x}{1:02x}{2:02x}".format(self.red, self.green, self.blue)
else:
raise AttributeError
# use setattr to dynamically return a value
def __setattr__(self, attr, val):
if attr == "rgbcolor":
self.red = val[0]
self.green = val[1]
self.blue = val[2]
else:
super().__setattr__(attr, val)
# use dir to list the available properties
def __dir__(self):
return ("rgbolor", "hexcolor")
def main():
# create an instance of myColor
cls1 = myColor()
# print the value of a computed attribute
print(cls1.rgbcolor)
print(cls1.hexcolor)
# set the value of a computed attribute
cls1.rgbcolor = (125, 200, 86)
print(cls1.rgbcolor)
print(cls1.hexcolor)
# access a regular attribute
print(cls1.red)
# list the available attributes
print(dir(cls1))
if __name__ == "__main__":
main()
|
const post2get = require('../src/post2get.js')
const chai = require('chai')
const chaiHttp = require('chai-http')
chai.use(chaiHttp)
const expect = chai.expect
const assert = chai.assert
const username = "bobbobberson"
const password = "bobsfordays"
let app, express
beforeAll(() => {
process.env['USERS'] = `${username}:${password}`
express = require('express')
app = express()
post2get.register(app)
})
test('409 for used id', async () => {
//2 calls to the same endpoint
//with the first one open results
//in the 2nd one getting a 409
//status response
let requester = chai.request(app).keepOpen()
let request1, request2
request1 = requester.get('/test1').auth(username, password).timeout(25).catch(reason => {})
request2 = requester.get('/test1').auth(username, password).catch(err => {})
const [result1, result2] = await Promise.all([request1, request2])
expect(result1).to.equal(undefined)
expect(result2.status).to.equal(409)
requester.close()
})
test('id freed after use', async () => {
//2 calls to the same endpoint sequentially
//previous call gets cleaned up
let requester = chai.request(app).keepOpen()
let request1, request2
request1 = requester.get('/test1').auth(username, password).timeout(25).catch(reason => {})
const [result1] = await Promise.all([request1])
request2 = requester.get('/test1').auth(username, password).timeout(25).catch(err => {})
const [result2] = await Promise.all([request2])
expect(result1).to.equal(undefined)
expect(result2).to.equal(undefined)
requester.close()
})
test('post request body passed to get response body', async () => {
const requester = chai.request(app).keepOpen()
let request1, request2
const postBody = "this is the post request body"
request1 = requester.get('/test1').auth(username, password).timeout(200).catch(reason => {})
request2 = requester.post('/test1').send(postBody).auth(username, password)
const [result1,result2] = await Promise.all([request1,request2])
expect(result1.status).to.equal(200)
expect(result2.status).to.equal(200)
expect(result1.text).to.equal(postBody)
requester.close()
})
test('id freed after successful completion', async () => {
const requester = chai.request(app).keepOpen()
let request1, request2
const postBody = "this is the post request body"
request1 = requester.get('/test1').auth(username, password).timeout(200).catch(reason => {})
request2 = requester.post('/test1').send(postBody).auth(username, password)
const [result1,result2] = await Promise.all([request1,request2])
request3 = requester.get('/test1').auth(username, password).timeout(25).catch(reason => {})
const [result3] = await Promise.all([request3])
expect(result1.status).to.equal(200)
expect(result1.text).to.equal(postBody)
expect(result2.status).to.equal(200)
expect(result3).to.equal(undefined)
requester.close()
})
test('1st get request is closed after 6th request is received', async () => {
const requester = chai.request(app).keepOpen()
let request1, request2, request3, request4, request5, request6
request1 = requester.get('/test1').auth(username, password).timeout(200).catch(reason => {})
request2 = requester.get('/test2').auth(username, password).timeout(200).catch(reason => {})
request3 = requester.get('/test3').auth(username, password).timeout(200).catch(reason => {})
request4 = requester.get('/test4').auth(username, password).timeout(200).catch(reason => {})
request5 = requester.get('/test5').auth(username, password).timeout(200).catch(reason => {})
request6 = requester.get('/test6').auth(username, password).timeout(200).catch(reason => {})
const [result1, result2, result3, result4, result5, result6] = await Promise.all([request1, request2, request3, request4, request5, request6])
expect(result1.status).to.equal(429)
expect(result2).to.equal(undefined)
expect(result3).to.equal(undefined)
expect(result4).to.equal(undefined)
expect(result5).to.equal(undefined)
expect(result6).to.equal(undefined)
requester.close()
})
|
import unittest
import openmesh
from math import pi, fabs
class Others(unittest.TestCase):
def setUp(self):
self.mesh = openmesh.TriMesh()
self.vhandle = []
def test_is_estimated_feature_edge(self):
# Add some vertices
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 0, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(1, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 0, 1)))
# Add four faces
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[2])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[3])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[0])
self.mesh.add_face(face_vhandles)
# ===============================================
# Setup complete
# ===============================================
# Check one Request only vertex normals
# Face normals are required for vertex and halfedge normals, so
# that prevent access to non existing properties are in place
self.mesh.request_vertex_normals()
self.mesh.request_halfedge_normals()
self.mesh.request_face_normals()
# Automatically compute all normals
# As only vertex normals are requested and no face normals, this will compute nothing.
self.mesh.update_normals()
he = self.mesh.halfedges().__next__()
self.assertTrue(self.mesh.is_estimated_feature_edge(he, 0.0))
self.assertTrue(self.mesh.is_estimated_feature_edge(he, 0.125 * pi))
self.assertTrue(self.mesh.is_estimated_feature_edge(he, 0.250 * pi))
self.assertTrue(self.mesh.is_estimated_feature_edge(he, 0.375 * pi))
self.assertTrue(self.mesh.is_estimated_feature_edge(he, 0.500 * pi))
self.assertFalse(self.mesh.is_estimated_feature_edge(he, 0.625 * pi))
self.assertFalse(self.mesh.is_estimated_feature_edge(he, 0.750 * pi))
self.assertFalse(self.mesh.is_estimated_feature_edge(he, 0.875 * pi))
self.assertFalse(self.mesh.is_estimated_feature_edge(he, 1.000 * pi))
def test_is_estimated_feature_edge(self):
# Test setup:
# 1 -- 2
# | / |
# | / |
# 0 -- 3
# Add some vertices
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 0, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(0, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(1, 1, 0)))
self.vhandle.append(self.mesh.add_vertex(openmesh.Vec3d(1, 0, 0)))
# Add two faces
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[2])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
# ===============================================
# Setup complete
# ===============================================
he = self.mesh.halfedge_handle(4)
self.assertEqual(self.mesh.to_vertex_handle(he).idx(), 0)
self.assertEqual(self.mesh.from_vertex_handle(he).idx(), 2)
self.assertEqual(self.mesh.edge_handle(he).idx(), 2)
eh = self.mesh.edge_handle(he)
self.assertEqual(self.mesh.calc_dihedral_angle(eh), 0.0)
# Modify point
tmp = (openmesh.Vec3d(0.0, 0.0, -1.0) + openmesh.Vec3d(1.0, 1.0, -1.0)) * 0.5
self.mesh.set_point(self.vhandle[2], tmp)
difference = fabs(1.36944 - self.mesh.calc_dihedral_angle(eh))
self.assertTrue(difference < 0.00001)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(Others)
unittest.TextTestRunner(verbosity=2).run(suite)
|
# coding: utf-8
# Modified Work: Copyright (c) 2018, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
# Original Work: Copyright (c) 2018 Character Encoding Detector contributors. https://github.com/chardet
"""
All of the Enums that are used throughout the chardet package.
:author: Dan Blanchard (dan.blanchard@gmail.com)
"""
class InputState(object):
"""
This enum represents the different states a universal detector can be in.
"""
PURE_ASCII = 0
ESC_ASCII = 1
HIGH_BYTE = 2
class LanguageFilter(object):
"""
This enum represents the different language filters we can apply to a
``UniversalDetector``.
"""
CHINESE_SIMPLIFIED = 0x01
CHINESE_TRADITIONAL = 0x02
JAPANESE = 0x04
KOREAN = 0x08
NON_CJK = 0x10
ALL = 0x1F
CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
CJK = CHINESE | JAPANESE | KOREAN
class ProbingState(object):
"""
This enum represents the different states a prober can be in.
"""
DETECTING = 0
FOUND_IT = 1
NOT_ME = 2
class MachineState(object):
"""
This enum represents the different states a state machine can be in.
"""
START = 0
ERROR = 1
ITS_ME = 2
class SequenceLikelihood(object):
"""
This enum represents the likelihood of a character following the previous one.
"""
NEGATIVE = 0
UNLIKELY = 1
LIKELY = 2
POSITIVE = 3
@classmethod
def get_num_categories(cls):
""":returns: The number of likelihood categories in the enum."""
return 4
class CharacterCategory(object):
"""
This enum represents the different categories language models for
``SingleByteCharsetProber`` put characters into.
Anything less than CONTROL is considered a letter.
"""
UNDEFINED = 255
LINE_BREAK = 254
SYMBOL = 253
DIGIT = 252
CONTROL = 251
|
#**********************************************************************
# Copyright 2020 Advanced Micro Devices, Inc
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#********************************************************************
from pathlib import Path
import tempfile
import os
import shutil
import platform
import numpy as np
import math
import bpy
import hdusd
# saving current process id
PID = os.getpid()
OS = platform.system()
IS_WIN = OS == 'Windows'
IS_MAC = OS == 'Darwin'
IS_LINUX = OS == 'Linux'
BLENDER_VERSION = f'{bpy.app.version[0]}.{bpy.app.version[1]}'
PLUGIN_ROOT_DIR = Path(hdusd.__file__).parent
BLENDER_DATA_DIR = Path(bpy.utils.resource_path('LOCAL')) / 'datafiles'
DEBUG_MODE = bool(int(os.environ.get('HDUSD_BLENDER_DEBUG', 0)))
LIBS_DIR = PLUGIN_ROOT_DIR.parent.parent / 'libs' if DEBUG_MODE else \
PLUGIN_ROOT_DIR / 'libs'
from . import logging
log = logging.Log('utils')
def temp_dir():
""" Returns $TEMP/rprblender temp dir. Creates it if needed """
d = Path(tempfile.gettempdir()) / "hdusd"
if not d.is_dir():
log("Creating temp dir", d)
d.mkdir()
return d
def temp_pid_dir():
""" Returns $TEMP/rprblender/PID temp dir for current process. Creates it if needed """
d = temp_dir() / str(PID)
if not d.is_dir():
log("Creating image temp pid dir", d)
d.mkdir()
return d
def get_temp_file(suffix, name=None):
if not name:
return Path(tempfile.mktemp(suffix, "tmp", temp_pid_dir()))
if suffix:
name += suffix
return temp_pid_dir() / name
def clear_temp_dir():
""" Clears whole $TEMP/rprblender temp dir """
d = temp_dir()
paths = tuple(d.iterdir())
if not paths:
return
log("Clearing temp dir", d)
for path in paths:
if path.is_dir():
shutil.rmtree(path, ignore_errors=True)
else:
os.remove(path)
def get_data_from_collection(collection, attribute, size, dtype=np.float32):
len = np.prod(size)
data = np.zeros(len, dtype=dtype)
collection.foreach_get(attribute, data)
return data.reshape(size)
def get_prop_array_data(arr, dtype=np.float32):
if hasattr(arr, 'foreach_get'):
data = np.empty(len(arr), dtype=dtype)
arr.foreach_get(data)
else:
data = np.fromiter(arr, dtype=dtype)
return data
def time_str(val):
""" Convert perfcounter difference to time string minutes:seconds.milliseconds """
return f"{math.floor(val / 60):02}:{math.floor(val % 60):02}.{math.floor((val % 1) * 100):02}"
def title_str(str):
s = str.replace('_', ' ')
return s[:1].upper() + s[1:]
def code_str(str):
return str.replace(' ', '_').replace('.', '_')
def pass_node_reroute(link):
while isinstance(link.from_node, bpy.types.NodeReroute):
if not link.from_node.inputs[0].links:
return None
link = link.from_node.inputs[0].links[0]
return link if link.is_valid else None
|
// Credit: https://codeburst.io/easy-i18n-in-10-lines-of-javascript-poc-eb9e5444d71e
function i18n(template) {
for (var info = i18n.db[i18n.locale][template],
out = [info.t[0]],
i = 1, length = info.t.length; i < length; i++) out[i] = arguments[1 + info.v[i - 1]] + info.t[i];
return out.join('');
}
function i18n_load(locale) {
if (locale == null) {
if (navigator.language.includes('de')) {
i18n.locale = 'de';
} else if (navigator.language.includes('bg')) {
i18n.locale = 'bg';
} else i18n.locale = 'en';
} else {
i18n.locale = locale;
}
Array.from(document.querySelectorAll('[data-i18n-key]'), (el) => { el.innerHTML = i18n(el.getAttribute('data-i18n-key')) });
Array.from(document.querySelectorAll('[data-i18n-placeholder-key]'), (el) => { el.setAttribute('placeholder', i18n(el.getAttribute('data-i18n-placeholder-key'))) });
Array.from(document.querySelectorAll('[data-i18n-value-key]'), (el) => { el.setAttribute('value', i18n(el.getAttribute('data-i18n-value-key'))) });
}
i18n_load();
|
let bodypix;
let video;
let segmentation;
let img;
const options = {
outputStride: 8, // 8, 16, or 32, default is 16
segmentationThreshold: 0.3 // 0 - 1, defaults to 0.5
}
function preload(){
bodypix = ml5.bodyPix(options);
}
function setup() {
createCanvas(320, 240);
// load up your video
video = createCapture(VIDEO);
video.size(width, height);
// video.hide(); // Hide the video element, and just show the canvas
bodypix.segment(video, gotResults)
}
function gotResults(err, result) {
if (err) {
console.log(err)
return
}
segmentation = result;
background(0);
image(segmentation.backgroundMask, 0, 0, width, height)
bodypix.segment(video, gotResults)
}
|
/*
* SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
/*
* Note: Currently, the backtraces must still be checked manually. Therefore,
* these test cases should always pass.
* Todo: Automate the checking of backtrace addresses.
*/
#include <stdlib.h>
#include "unity.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/xtensa_api.h"
#include "esp_intr_alloc.h"
#include "esp_rom_sys.h"
#include "esp_rom_uart.h"
#define SW_ISR_LEVEL_1 7
#define SW_ISR_LEVEL_3 29
#define RECUR_DEPTH 3
#define ACTION_ABORT -1
#define ACTION_INT_WDT -2
// Set to (-1) for abort(), (-2) for interrupt watchdog
static int backtrace_trigger_source;
/*
* Recursive functions to generate a longer call stack. When the max specified
* recursion depth is reached, the following actions can be taken.
*/
static void __attribute__((__noinline__)) recursive_func(int recur_depth, int action)
{
if (recur_depth > 1) {
recursive_func(recur_depth - 1, action);
} else if (action >= 0) {
xt_set_intset(1 << action);
} else if (action == ACTION_ABORT) {
abort();
// Todo: abort() causes problems in GDB Stub backtrace due to being 'non returning'.
} else if (action == ACTION_INT_WDT) {
portDISABLE_INTERRUPTS();
while (1) {
;
}
}
}
static void level_three_isr (void *arg)
{
xt_set_intclear(1 << SW_ISR_LEVEL_3); //Clear interrupt
recursive_func(RECUR_DEPTH, backtrace_trigger_source); //Abort at the max recursive depth
}
static void level_one_isr(void *arg)
{
xt_set_intclear(1 << SW_ISR_LEVEL_1); //Clear interrupt
recursive_func(RECUR_DEPTH, SW_ISR_LEVEL_3); //Trigger nested interrupt max recursive depth
}
TEST_CASE("Test backtrace from abort", "[reset_reason][reset=abort,SW_CPU_RESET]")
{
//Allocate level one and three SW interrupts
esp_intr_alloc(ETS_INTERNAL_SW0_INTR_SOURCE, 0, level_one_isr, NULL, NULL); //Level 1 SW intr
esp_intr_alloc(ETS_INTERNAL_SW1_INTR_SOURCE, 0, level_three_isr, NULL, NULL); //Level 3 SW intr
backtrace_trigger_source = ACTION_ABORT;
recursive_func(RECUR_DEPTH, SW_ISR_LEVEL_1); //Trigger lvl 1 SW interrupt at max recursive depth
}
TEST_CASE("Test backtrace from interrupt watchdog timeout", "[reset_reason][reset=Interrupt wdt timeout on CPU0,SW_CPU_RESET]")
{
//Allocate level one and three SW interrupts
esp_intr_alloc(ETS_INTERNAL_SW0_INTR_SOURCE, 0, level_one_isr, NULL, NULL); //Level 1 SW intr
esp_intr_alloc(ETS_INTERNAL_SW1_INTR_SOURCE, 0, level_three_isr, NULL, NULL); //Level 3 SW intr
backtrace_trigger_source = ACTION_INT_WDT;
recursive_func(RECUR_DEPTH, SW_ISR_LEVEL_1); //Trigger lvl 1 SW interrupt at max recursive depth
}
static void write_char_crash(char c)
{
esp_rom_uart_putc(c);
*(char*) 0x00000001 = 0;
}
TEST_CASE("Test backtrace with a ROM function", "[reset_reason][reset=StoreProhibited,SW_CPU_RESET]")
{
ets_install_putc1(&write_char_crash);
esp_rom_printf("foo");
}
|
import sys
sys.path.append('..')
import numpy as np
import math
from geneticalgorithm import geneticalgorithm as ga
def f(X):
dim = len(X)
OF = 0
for i in range (0, dim):
OF+=(X[i]**2)-10*math.cos(2*math.pi*X[i])+10
return OF
def test_rastrigin():
parameters={'max_num_iteration': 1000,
'population_size':200,
'mutation_probability':0.1,
'elit_ratio': 0.02,
'crossover_probability': 0.5,
'parents_portion': 0.3,
'crossover_type':'two_point',
'max_iteration_without_improv':None,
'multiprocessing_ncpus': 4,
'multiprocessing_engine': None,
}
varbound = np.array([[-5.12, 5.12]]*2)
model = ga(function=f, dimension=2, variable_type='real',
variable_boundaries=varbound, algorithm_parameters=parameters)
model.run()
assert model.best_function < 1e-6
if __name__ == '__main__':
test_rastrigin()
|
import torch
from scripts.study_case.ID_13.torch_geometric.utils import is_undirected, to_undirected
def test_is_undirected():
row = torch.tensor([0, 1, 0])
col = torch.tensor([1, 0, 0])
assert is_undirected(torch.stack([row, col], dim=0))
row = torch.tensor([0, 1, 1])
col = torch.tensor([1, 0, 2])
assert not is_undirected(torch.stack([row, col], dim=0))
def test_to_undirected():
row = torch.tensor([0, 1, 1])
col = torch.tensor([1, 0, 2])
edge_index = to_undirected(torch.stack([row, col], dim=0))
assert edge_index.tolist() == [[0, 1, 1, 2], [1, 0, 2, 1]]
|
/**
* Manage the search functions for N2, which uses the Awesomplete widget from
* https://leaverou.github.io/awesomplete/
* @typedef N2Search
*/
class N2Search {
/**
* Initialize N2Search object properties and Awesomeplete.
* @param {N2TreeNode} zoomedElement The selected node in the model tree.
* @param {N2TreeNode} root The base element of the model tree.
*/
constructor(zoomedElement, root) {
// Used for autocomplete suggestions:
this.filteredWord = {
'value': "",
'containsDot': false,
'baseName': ""
}
this.filterSet = {};
this.updateRecomputesAutoComplete = true;
this.wordIndex = 0;
this.searchVals = [];
this.inDataFunction = true;
this.searchCollapsedUndo = []; // Non-matching nodes to be minimized/hidden.
this.numMatches = 0;
this.searchInputDiv = d3.select("#awesompleteId").node();
this.searchCountDiv = d3.select("#searchCountId");
this.searchInputDiv.value = '';
this._setupAwesomplete();
this._addEventListeners();
this.update(zoomedElement, root);
}
/** Initialize the Awesomplete widget. */
_setupAwesomplete() {
const self = this;
this.searchAwesomplete = new Awesomplete(self.searchInputDiv, {
"minChars": 1,
"maxItems": 15,
"list": [],
"filter": function (text, input) {
if (self.inDataFunction) {
self.inDataFunction = false;
self.filterSet = {};
}
if (self.filteredWord.value.length == 0) return false;
if (self.filterSet.hasOwnProperty(text)) return false;
self.filterSet[text] = true;
if (self.filteredWord.containsDot)
return Awesomplete.FILTER_STARTSWITH(text,
self.filteredWord.value);
return Awesomplete.FILTER_CONTAINS(text,
self.filteredWord.value);
},
"item": function (text, input) {
return Awesomplete.ITEM(text, self.filteredWord.value);
},
"replace": function (text) {
let newVal = "";
let cursorPos = 0;
for (let i = 0; i < self.searchVals.length; ++i) {
newVal += ((i == self.wordIndex) ? text : self.searchVals[i]) + " ";
if (i == self.wordIndex) cursorPos = newVal.length - 1;
}
this.input.value = newVal;
self.searchInputDiv.setSelectionRange(cursorPos, cursorPos);
},
"data": function (item /*, input*/) {
self.inDataFunction = true;
if (self.filteredWord.containsDot) {
let baseIndex = item.toLowerCase().indexOf("." +
self.filteredWord.baseName.toLowerCase() + ".");
if (baseIndex > 0) return item.slice(baseIndex + 1);
}
return item;
}
});
}
/**
* Add a couple of event listeners that are easier to do from here
* than in N2UserInterface.
*/
_addEventListeners() {
const self = this;
d3.select('#awesompleteId').on('awesomplete-selectcomplete', function () {
self.searchInputEventListener();
self.searchAwesomplete.evaluate();
});
d3.select('#awesompleteId').on('input', this.searchInputEventListener.bind(this));
d3.select('#awesompleteId').on('focus', this.searchInputEventListener.bind(this));
}
/**
* Recurse through the tree and find nodes with pathnames that match
* the computed regular expression. Minimize/hide nodes that don't match.
* @param {N2TreeNode} node The current node to operate on.
* @param {RegExp} regexMatch A regular expression assembled from the search values.
* @param {Array} undoList List of nodes that have been hidden/minimized.
* @returns {Boolean} True if a match was found, false otherwise.
*/
_doSearch(node, regexMatch, undoList) {
let didMatch = false;
if (node.hasChildren() && !node.isMinimized) {
// depth first, dont go into minimized children
for (let child of node.children) {
if (this._doSearch(child, regexMatch, undoList)) didMatch = true;
}
}
if (node === this.zoomedElement) return didMatch;
if (!didMatch && !node.hasChildren() && node.isInputOrOutput()) {
didMatch = regexMatch.test(node.absPathName);
if (didMatch) {
// only inputs and outputs can count as matches
++this.numMatches;
}
else if (undoList) {
// did not match and undo list is not null
node.varIsHidden = true;
undoList.push(node);
}
}
if (!didMatch && node.hasChildren() && !node.isMinimized && undoList) {
// minimizeable and undoList not null
node.isMinimized = true;
undoList.push(node);
}
return didMatch;
}
/**
* Reset the number of matches to zero and execute the search with a null value
* for undoList, so it's not changed.
*/
_countMatches() {
this.numMatches = 0;
if (this.searchVals.length != 0)
this._doSearch(this.zoomedElement, this._getSearchRegExp(this.searchVals), null);
}
/** Undo results of the previous search, and perform a new one. */
performSearch() {
for (let node of this.searchCollapsedUndo) {
//auto undo on successive searches
if (!node.hasChildren() && node.isInputOrOutput()) node.varIsHidden = false;
else node.isMinimized = false;
}
this.numMatches = 0;
this.searchCollapsedUndo = [];
if (this.searchVals.length != 0)
this._doSearch(this.zoomedElement, this._getSearchRegExp(this.searchVals),
this.searchCollapsedUndo);
}
/** Do some escaping and replacing of globbing with regular expressions. */
_getSearchRegExp(searchValsArray) {
let regexStr = new String("(^" + searchValsArray.join("$|^") + "$)")
.replace(/[\.\?\*\^]/g, function (c) {
return {
'.': "\\.",
'?': ".",
'*': ".*?",
'^': "^.*?"
}[c];
});
return new RegExp(regexStr, "i"); // case insensitive
}
_isValid(value) {
return value.length > 0;
}
/**
* React to each value entered into the search input box.
* @param {Event} e The object describing the keypress event.
*/
searchInputEventListener() {
testThis(this, 'N2Search', 'searchInputEventListener');
const target = d3.event.target;
//valid characters AlphaNumeric : _ ? * space .
const newVal = target.value.replace(/([^a-zA-Z0-9:_\?\*\s\.])/g, "");
if (newVal != target.value) {
target.value = newVal; // won't trigger new event
}
this.searchVals = target.value.split(" ");
const filtered = this.searchVals.filter(this._isValid);
this.searchVals = filtered;
const lastLetterTypedIndex = target.selectionStart - 1;
let endIndex = target.value.indexOf(" ", lastLetterTypedIndex);
if (endIndex == -1) endIndex = target.value.length;
let startIndex = target.value.lastIndexOf(" ", lastLetterTypedIndex);
if (startIndex == -1) startIndex = 0;
const sub = target.value.substring(startIndex, endIndex).trim();
// valid openmdao character types: AlphaNumeric : _ .
this.filteredWord.value = sub.replace(/([^a-zA-Z0-9:_\.])/g, "");
let i = 0;
for (let val of this.searchVals) {
if (val.replace(/([^a-zA-Z0-9:_\.])/g, "") == this.filteredWord.value) {
this.wordIndex = i;
break;
}
++i;
}
this.filteredWord.containsDot = (this.filteredWord.value.indexOf(".") != -1);
this.searchAwesomplete.list = this.filteredWord.containsDot ?
this.autoComplete.paths.list : this.autoComplete.names.list;
this.filteredWord.baseName = this.filteredWord.containsDot ?
this.filteredWord.value.split(".")[0].trim() : "";
this._countMatches();
this.searchCountDiv.html("" + this.numMatches + " matches");
}
/**
* Find the earliest minimized parent of the specified node.
* @param {N2TreeNode} node The node to search from.
* @returns {N2TreeNode} The earliest mimimized parent node.
*/
findRootOfChangeForSearch(node) {
let earliestObj = node;
for (let obj = node; obj != null; obj = obj.parent) {
if (obj.isMinimized) earliestObj = obj;
}
return earliestObj;
}
/**
* Recurse through the children of the node and add their names to the
* autocomplete list of names, if they're not already in it.
* @param {N2TreeNode} node The node to search from.
*/
_populateAutoCompleteList(node) {
if (node.hasChildren() && !node.isMinimized) {
// Depth first, dont go into minimized children
for (let child of node.children) {
this._populateAutoCompleteList(child);
}
}
if (node === this.zoomedElement) return;
let nodeName = node.name;
if (!node.isInputOrOutput()) nodeName += ".";
let namesToAdd = [nodeName];
for (let name of namesToAdd) {
if (!this.autoComplete.names.set.hasOwnProperty(name)) {
this.autoComplete.names.set[name] = true;
this.autoComplete.names.list.push(name);
}
}
let localPathName = (this.zoomedElement === this.modelRoot) ?
node.absPathName : node.absPathName.slice(this.zoomedElement.absPathName.length + 1);
if (!this.autoComplete.paths.set.hasOwnProperty(localPathName)) {
this.autoComplete.paths.set[localPathName] = true;
this.autoComplete.paths.list.push(localPathName);
}
}
/**
* If the zoomed element has changed, update the auto complete lists.
* @param {N2TreeNode} zoomedElement The selected node in the model tree.
* @param {N2TreeNode} root The base element of the model tree.
*/
update(zoomedElement, root) {
this.zoomedElement = zoomedElement;
this.modelRoot = root;
if (!this.updateRecomputesAutoComplete) {
this.updateRecomputesAutoComplete = true;
return;
}
this.autoComplete = {
'names': {
'list': [],
'set': {}
},
'paths': {
'list': [],
'set': {}
}
}
this._populateAutoCompleteList(this.zoomedElement);
}
}
|
var _conf = global.conf;
var _path = require('path');
var _db = require('../db');
var _helper = require('../helper');
function init()
{
}
function getItems(path, cb)
{
if (!path)
{
return cb (null, [{type : 'Section', title : 'Web Videos', id : 'webvids'}]);
}
else if (path[0] !== 'webvids')
return cb(null, null);
else
{
_db.Media.findAll({ where : { state : 'ready', type : 'WebVideo' }, order : 'source_name'}).complete(cb);
}
}
init();
module.exports = {
getItems : getItems
}
|
const Employee = require("./Employee");
class Manager extends Employee {
constructor(name, id, email, officeNumber) {
super(name, id, email)
this.officeNumber = officeNumber
}
getOfficeNumber() {
return this.officeNumber
}
getRole() {
return 'Manager'
}
htmlcard(){
return `<div class="card border-primary mb-3 col-m-2">
<div class="card-header">
Manager
</div>
<div class="card-body">
<h5 class="card-title">${this.name}</h5>
<p class="card-text">Id: ${this.id}</p>
<p class="card-text">Email: ${this.email}</p>
<p class="card-text">Office Number: ${this.officeNumber}</p>
</div>
</div>`
}
}
module.exports = Manager
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _mongoose = _interopRequireDefault(require("mongoose"));
var Schema = _mongoose["default"].Schema;
var cateogorySchema = new Schema({
categoryName: {
type: String,
required: true
}
}, {
versionKey: false
});
var categoryModel = _mongoose["default"].model("Category", cateogorySchema);
module.exports = categoryModel;
|
# Discord Card Wars Bot
import sys
import os
import discord
from discord.ext import commands
import requests
import csv
import datetime
import json
import aiohttp
import urllib.parse
import funcs
import asyncio
from PIL import Image
import random
from discord_components import DiscordComponents
from paginate import paginate
import get
from discord.ext.tasks import loop
#from server import keep_alive
#from pretty_help import DefaultMenu, PrettyHelp
execk = 0
@loop(hours=1)
async def git_pull():
os.system("git pull")
if execk != 0:
os.execv(sys.executable, ['python'] + sys.argv)
execk += 1
devs = []
def log_write(text):
with open("log.log","a") as log:
all = "[{}] : \t{}\n".format(str(datetime.datetime.now()),text)
print(text)
log.write(all)
log_write("Starting BOT!!!")
#
bot = commands.AutoShardedBot(command_prefix='jk!')
#bot.remove_command('help')
#menu = DefaultMenu(page_left="⬅️", page_right="➡️", remove="❌", active_time=10)
#bot.help_command=PrettyHelp(menu=menu)
@bot.event
async def on_ready():
DiscordComponents(bot)
"""
pfp_path = "pfp/download_1.jpeg"
fp = open(pfp_path, 'rb')
pfp = fp.read()
await bot.user.edit(avatar=pfp)
"""
await bot.change_presence(activity=discord.Game(name='Card Wars'))
log_write('We have logged in as {0.user}'.format(bot))
@bot.command()
async def servers(ctx):
if ctx.author.id in devs:
embed = discord.Embed(title="servers",description="servers of the bot")
for guild in bot.guilds:
embed.add_field(name=f"{guild.name}",value=f"{guild.id}")
await ctx.author.send(embed=embed)
@bot.command()
async def invite(ctx):
embed = discord.Embed(title="Invite",description="[Invite](https://discord.com/oauth2/authorize?&client_id=858548922771701770&scope=applications.commands+bot&permissions=2088234230)",color=ctx.author.color)
await ctx.send(embed=embed)
@bot.command(aliases=["mview"])
async def mythicview(ctx, *, arg):
with open('./cards.csv') as cfile:
csv_file = csv.reader(cfile, delimiter=',',quotechar='"')
# Find card and return value
log_write("{1} \t$c {0}".format(arg,ctx.message.author))
search=[]
s2=[]
for row in csv_file:
if arg.lower() in row[0].lower():
search.append(row[0])
returned_card=row
if arg == row[0]:
s2.append(row[0])
returned_card=row
if len(search) != 1:
if len(s2)==1:
search = s2
if len(search) > 1:
embed = discord.Embed(color=0xfff100)
embed.set_author(name="Did you mean:")
x=1
for ting in search:
embed.add_field(name=str(x)+".", value=ting, inline=False)
x+=1
try:
embed.add_field(name="Disclaimer", value="Try typing it with proper capitalization.", inline=False)
await ctx.send(embed=embed)
log_write("".join(search))
log_write("")
except:
await ctx.send("That search exceeds the limit ({} cards were returned). Please be more specific.".format(str(len(search))))
log_write("Call for {} cards.".format(str(len(search))))
if len(search) == 1:
print(returned_card)
embed = discord.Embed(color=0xfff100)
cardname = returned_card[0]
insert = ""
cardurlname = cardname.split()
for item in cardurlname:
insert += item
urlz=f"https://github.com/641i130/card-wars-discord-bot/raw/master/images/{insert}.jpg"
print(urlz)
embed.set_image(url=f"attachment://{returned_card[0]}.jpg")
embed.set_author(name=returned_card[0], icon_url="https://cdn.discordapp.com/avatars/705581980628025415/0a89eae2186c741e269d72b10c407b47.webp")
embed.add_field(name="Deck / Quantity", value=returned_card[8].rstrip(), inline=False)
embed.set_thumbnail(url="http://35.184.199.95/images/{}.jpg".format(urllib.parse.quote(returned_card[0])))
if (returned_card[2].rstrip() == "Creature"):
embed.add_field(name="Landscape", value=returned_card[3].rstrip(), inline=True)
embed.add_field(name="Type", value=returned_card[2].rstrip(), inline=True)
embed.add_field(name="Cost", value=returned_card[4].rstrip(), inline=True)
embed.add_field(name="ATK", value=returned_card[5].rstrip(), inline=True)
embed.add_field(name="DEF", value=returned_card[6].rstrip(), inline=True)
embed.add_field(name="Description", value=returned_card[1].rstrip(), inline=True)
if (returned_card[2].rstrip() == "Spell" or returned_card[2].rstrip() == "Building" or returned_card[2].rstrip() == "Teamwork"):
embed.add_field(name="Landscape", value=returned_card[3].rstrip(), inline=True)
embed.add_field(name="Type", value=returned_card[2].rstrip(), inline=True)
embed.add_field(name="Cost", value=returned_card[4].rstrip(), inline=True)
embed.add_field(name="Description", value=returned_card[1].rstrip(), inline=True)
if (returned_card[2].rstrip() == "Hero"):
embed.add_field(name="Type", value=returned_card[2].rstrip(), inline=True)
embed.add_field(name="Description", value=returned_card[1].rstrip(), inline=True)
#embed.add_field(name="Card Set", value=returned_card[9].rstrip(), inline=True)
await ctx.send(embed=embed)
log_write("".join(search))
log_write("")
@bot.command(aliases=["mimage"])
async def mythicimage(ctx, *, arg):
if ctx.author.id == 746904488396324864:
return await ctx.send("Ee")
with open('./cards.csv') as cfile:
csv_file = csv.reader(cfile, delimiter=',',quotechar='"')
# Find card and return value
log_write("{1} \t$c {0}".format(arg,ctx.message.author))
search=[]
s2=[]
for row in csv_file:
if arg.lower() in row[0].lower():
search.append(row[0])
returned_card=row
if arg == row[0]:
s2.append(row[0])
returned_card=row
if len(search) != 1:
if len(s2)==1:
search = s2
if len(search) > 1:
embed = discord.Embed(color=0xfff100)
embed.set_author(name="Did you mean:")
x=1
for ting in search:
embed.add_field(name=str(x)+".", value=ting, inline=False)
x+=1
try:
embed.add_field(name="Disclaimer", value="Try typing it with proper capitalization.", inline=False)
await ctx.send(embed=embed)
log_write("".join(search))
log_write("")
except:
await ctx.send("That search exceeds the limit ({} cards were returned). Please be more specific.".format(str(len(search))))
log_write("Call for {} cards.".format(str(len(search))))
if len(search) == 1:
await ctx.send(file=discord.File("images/{}.jpg".format(returned_card[0])))
log_write("".join(search))
log_write("")
@bot.command(aliases=["aview"])
async def animeview(ctx,*,query=None):
try:
if query == None:
return await ctx.send("please enter a card name.")
loadring = await ctx.send("loading...")
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
query = query.replace(" ","+")
print(query)
response = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_q={query}',headers=headers)
print(response.text)
anime = response.json()
pages = []
embed = discord.Embed(title="Welcome to the Anime Pack",description="For a targetted user please use their id or enjoy all results for that name",color=ctx.author.color)
pages.append(embed)
for item in anime["search_results"]:
name = item["name"]
e = name.replace(" ","-")
amtx = 1
image = "."
e = e.lower()
print(e)
ex = "."
while ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
image = requests.request("GET",f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-{amtx}.jpg")
ex = image.text
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
amtx += 1
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == True and amtx == 1:
image = item["character_image"]
else:
image = f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-2.jpg"
print(image)
id = item["id"]
gender = item["gender"]
desc = item["desc"]
collection = item["anime_name"]
card = item["id"]
cost = "not found."
if int(card) < 10:
cost = int(card)*1000
elif int(card) < 100:
cost = int(card)*1000
elif int(card) < 1000:
cost = int(card)*100
elif int(card) < 10000:
cost = int(card)*10
elif int(card) < 100000:
cost = int(card)
else:
cost = int(card)
classer = get.getclass(int(id))
att = get.getattack(int(id))
hp = get.gethp(int(id))
embed = discord.Embed(title=f"{name}",description=f"**Id:** `{id}`\n**Gender:** {gender}\n**Editions:** {amtx}\n**collection:** {collection}\n**description:** *{desc}*\n**Cost:** `{cost}`\n**Class:** {classer}\n**Attack:** `{att}`\n**Health:** `{hp}`",color=ctx.author.color)
embed.set_image(url=image)
pages.append(embed)
await paginate(bot,ctx,pages,loadring)
except IndexError:
return await loadring.edit(content="Not Found.")
@bot.command(aliases=["acview"])
async def animecollectionview(ctx,*,query=None):
with open("data/collections.json","r") as k:
cel = json.load(k)
try:
if query == None:
return await ctx.send("please enter a collection name.")
loadring = await ctx.send("loading...")
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
query = query.replace(" ","+")
print(query)
response = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?anime_q={query}',headers=headers)
print(response.text)
anime = response.json()
pages = []
embed = discord.Embed(title="Welcome to the Anime Pack",description="For a targetted collection please use their id or enjoy all results for that name",color=ctx.author.color)
pages.append(embed)
for item in anime["search_results"]:
char = requests.request("POST",f"""{item["characters_url"]}""",headers=headers)
char = char.json()
image = item["anime_image"]
name = item["anime_name"]
id = item["anime_id"]
embed = discord.Embed(title=f"{name}'s card",description=f"**Id:** `{id}`\n**name:** {name}",color=ctx.author.color)
embed.set_image(url=image)
for namez in char["characters"]:
card = namez["id"]
cost = "not found."
if int(card) < 10:
cost = int(card)*1000
elif int(card) < 100:
cost = int(card)*1000
elif int(card) < 1000:
cost = int(card)*100
elif int(card) < 10000:
cost = int(card)*10
elif int(card) < 100000:
cost = int(card)
else:
cost = int(card)
embed.add_field(name=f"""{namez["name"]}""",value=f"""id: `{namez["id"]}`\ncost: `{cost}`""")
pages.append(embed)
await paginate(bot,ctx,pages,loadring)
except IndexError:
return await loadring.edit(content="Not Found.")
#@bot.command()
@bot.command(aliases=["aimage"])
async def animeimage(ctx,*,query=None):
if query == None:
return await ctx.send("please enter a card name.")
loadring = await ctx.send("loading...")
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
query = query.replace(" ","+")
print(query)
response = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_q={query}',headers=headers)
print(response.text)
anime = response.json()
image = anime["search_results"][0]["character_image"]
await loadring.edit(content=str(image))
@bot.command(aliases=["ainv"])
async def animeinventory(ctx,user:discord.Member=None):
ids = []
amt = 0
loadring = await ctx.send("loading...")
pages = []
if user == None:
user = ctx.author
with open("data/bank.json","r") as f:
bank = json.load(f)
for val in bank[str(user.id)]["cards"]:
for item in bank[str(user.id)]["cards"][val]:
#print(bank[str(user.id)]["cards"][item][0])
print(item)
print( bank[str(user.id)]["cards"][val][0])
name = item
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
if item == bank[str(user.id)]["cards"][val][0]:
namez = name.replace(" ","+")
anime = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?anime_q={namez}',headers=headers)
anime = anime.json()
anime_id = anime["search_results"][0]["anime_id"]
embed = discord.Embed(title=f"{name} collection",description=f"{name}",color=ctx.author.color)
characters = discord.Embed(title=f"{name} characters",description=f"The characters for {name}:",color=ctx.author.color)
for valuex in bank[str(user.id)]["cards"][val]:
#print(valuex)
if valuex != bank[str(user.id)]["cards"][val][0]:
char_name = ""
lzt = valuex.split()
end = len(lzt)
#print(end-1)
for valuem in lzt:
if valuem != lzt[end-1]:
char_name += " " + valuem
#print(lzt)
characters.add_field(name=f"{char_name}",value=f"id: `{lzt[end-1]}`")
embed.set_image(url=anime["search_results"][0]["anime_image"])
if anime_id not in ids:
amt += 1
await loadring.edit(f"loading... **{amt}**")
ids.append(anime_id)
pages.append(embed)
pages.append(characters)
for value in bank[str(user.id)]["cards"][val]:
if item != bank[str(user.id)]["cards"][val][0]:
lzt = item.split()
end = len(lzt)
id = lzt[end-1]
itemz = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_id={id}',headers=headers)
print(itemz.text)
itemz = itemz.json()
name = itemz["name"]
e = name.replace(" ","-")
amtx = 1
image = "."
e = e.lower()
print(e)
ex = "."
while ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
image = requests.request("GET",f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-{amtx}.jpg")
ex = image.text
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False and amt == 1:
amtx += 1
if amtx != 1:
image = f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-2.jpg"
else:
image = itemz["character_image"]
print(image)
id = itemz["id"]
gender = itemz["gender"]
desc = itemz["desc"]
collection = itemz["origin"]
card = itemz["id"]
cost = "not found."
if int(card) < 10:
cost = int(card)*1000
elif int(card) < 100:
cost = int(card)*1000
elif int(card) < 1000:
cost = int(card)*100
elif int(card) < 10000:
cost = int(card)*10
elif int(card) < 100000:
cost = int(card)
else:
cost = int(card)
classer = get.getclass(int(id))
att = get.getattack(int(id))
hp = get.gethp(int(id))
embed = discord.Embed(title=f"{name}",description=f"**Id:** `{id}`\n**Gender:** {gender}\n**Editions:** {amtx}\n**collection:** {collection}\n**description:** *{desc}*\n**Cost:** `{cost}`\n**Class:** {classer}\n**Attack:** `{att}`\n**Health:** `{hp}`",color=ctx.author.color)
embed.set_image(url=image)
if id not in ids:
amt += 1
await loadring.edit(f"loading... **{amt}**")
pages.append(embed)
ids.append(id)
#print(pages)
await loadring.edit(f"{ctx.author.mention} loaded!")
await paginate(bot,ctx,pages,loadring)
@bot.command()
async def buy(ctx,card):
if card == None:
return await ctx.send("please enter a card name/id.")
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
card = card.replace(" ","+")
response = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_id={card}',headers=headers)
print(response.text)
anime = response.json()
if int(card) < 10:
cost = int(card)*1000
elif int(card) < 100:
cost = int(card)*1000
elif int(card) < 1000:
cost = int(card)*100
elif int(card) < 10000:
cost = int(card)*10
elif int(card) < 100000:
cost = int(card)
else:
cost = int(card)
with open("data/bank.json","r") as f:
bank = json.load(f)
if cost > bank[str(ctx.author.id)]["balance"]:
balance = bank[str(ctx.author.id)]["balance"]
return await ctx.send(f"please up your current balance `{balance}` to `{cost}`")
collection = anime["origin"]
if collection not in bank[str(ctx.author.id)]["cards"]:
bank[str(ctx.author.id)]["cards"][str(collection)] = []
if collection not in bank[str(ctx.author.id)]["cards"][str(collection)]:
bank[str(ctx.author.id)]["cards"][str(collection)].append(collection)
if anime["id"] not in bank[str(ctx.author.id)]["cards"][str(collection)]:
toappend = f"""{anime["name"]} {anime["id"]}"""
bank[str(ctx.author.id)]["cards"][str(collection)].append(str(toappend))
else:
return await ctx.send(f"""You already have {anime["name"]}""")
bank[str(ctx.author.id)]["balance"] -= int(cost)
with open("data/bank.json","w") as z:
json.dump(bank,z)
name = anime["name"]
e = name.replace(" ","-")
amtx = 1
image = "."
e = e.lower()
print(e)
ex = "."
while ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False:
image = requests.request("GET",f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-{amtx}.jpg")
ex = image.text
if ex.startswith("""<?xml version="1.0" encoding="UTF-8"?>""") == False and amtx == 1:
amtx += 1
if amtx != 1:
image = f"https://d2l56h9h5tj8ue.cloudfront.net/images/cards/{e}-2.jpg"
else:
image = anime["character_image"]
embed = discord.Embed(title=f"successfully bought {name}",description=f"You bought {name} from {collection}",color=discord.Color.green())
embed.set_image(url=image)
await ctx.send(embed=embed)
@bot.command(aliases=["bal"])
async def balance(ctx,user:discord.Member=None):
if user == None:
user = ctx.author
with open("data/bank.json","r") as f:
bank = json.load(f)
balance = bank[str(user.id)]["balance"]
embed = discord.Embed(title=f"{user.name}'s balance",description=f"**balance:** `{balance}`💳",color=ctx.author.color)
await ctx.send(embed=embed)
@bot.command()
async def trivia(ctx):
with open("data/bank.json","r") as f:
bank = json.load(f)
response = requests.request("GET","https://opentdb.com/api.php?amount=1&category=31")
anime = response.json()
type = anime["results"][0]["type"]
diff = anime["results"][0]["difficulty"]
qn = anime["results"][0]["question"]
qn = qn.replace(""","")
qn = qn.replace("'","")
correct = anime["results"][0]["correct_answer"]
inc = anime["results"][0]["incorrect_answers"]
answers = []
answers.append(correct)
for item in inc:
answers.append(item)
a = random.choice(answers)
answers.remove(a)
b = random.choice(answers)
answers.remove(b)
c = random.choice(answers)
answers.remove(c)
d = random.choice(answers)
answers.remove(d)
embed = discord.Embed(title="Anime Trivia",description=f"Pick an answer from A-D\nQuestion:{qn}\n**A)** {a}\n**B)** {b}\n**C)** {c}\n**D)** {d}",color=ctx.author.color)
await ctx.send(embed=embed)
try:
choice = await bot.wait_for("message", check = lambda msg: msg.author == ctx.author, timeout = 30)
if choice.content == "a" or choice.content == "A":
answergiven = a
elif choice.content == "b" or choice.content == "B":
answergiven = b
elif choice.content == "c" or choice.content == "C":
answergiven = c
elif choice.content == "d" or choice.content == "D":
answergiven = d
else:
answergiven = "null"
except asyncio.TimeoutError:
return await ctx.send(f"You didn't answer, the answer was `{correct}`.")
print(answergiven)
if answergiven == correct:
injected = random.randint(1,5000)
await ctx.send(f"you won `{injected}` 💳")
bank[str(ctx.author.id)]["balance"] += injected
with open("data/bank.json","w") as z:
json.dump(bank,z)
else:
return await ctx.send(f"You got it wrong, the answer was `{correct}`.")
@bot.command(aliases=["abox"])
async def box(ctx,action=None,type=None):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
if action == None:
embed = discord.Embed(title="Box Shop",description="Bronze box:\nfrom ids 1-1000\ncost: `10000`💳\nSilver box:\nfrom ids 1-10000\ncost: `100000`💳\nGold box:\nfrom ids 1-100000\ncost: `50000`💳",color=ctx.author.color)
return await ctx.send(embed=embed)
if action == "buy":
if type == None:
return await ctx.send("please enter a type such as `bronze`")
id = 0
cost = 0
if type == "bronze":
id = random.randint(1,1000)
cost = 10000
elif type == "silver":
id = random.randint(1,10000)
cost = 100000
elif type == "gold":
id = random.randint(1,100000)
cost = 500000
else:
return await ctx.send("please enter a valid type")
card = id
response = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_id={card}',headers=headers)
print(response.text)
anime = response.json()
with open("data/bank.json","r") as f:
bank = json.load(f)
collection = anime["origin"]
if collection not in bank[str(ctx.author.id)]["cards"]:
bank[str(ctx.author.id)]["cards"][str(collection)] = []
if collection not in bank[str(ctx.author.id)]["cards"][str(collection)]:
bank[str(ctx.author.id)]["cards"][str(collection)].append(collection)
if anime["id"] not in bank[str(ctx.author.id)]["cards"][str(collection)]:
toappend = f"""{anime["name"]} {anime["id"]}"""
bank[str(ctx.author.id)]["cards"][str(collection)].append(str(toappend))
else:
return await ctx.send(f"""You already have {anime["name"]} Bad Luck!""")
bank[str(ctx.author.id)]["balance"] -= int(cost)
with open("data/bank.json","w") as z:
json.dump(bank,z)
name = anime["name"]
embed = discord.Embed(title=f"successfully bought {name}",description=f"You bought {name} from {collection}",color=discord.Color.green())
embed.set_image(url=anime["character_image"])
await ctx.send(embed=embed)
@bot.command()
async def deck(ctx,id=None):
inte = True
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
'From': 'cool5tarxv@gmail.com' # This is another valid field
}
pages = []
with open("data/bank.json","r") as f:
bank = json.load(f)
try:
int(id)
except:
inte = False
if inte != False:
comment = ""
it = "null"
found = False
try:
int(id)
except:
return await ctx.send("Please only use ids to add to your deck")
classer = get.getclass(int(id))
if bank[str(ctx.author.id)]["deck"][classer] == str(id):
bank[str(ctx.author.id)]["deck"][classer] = "None set yet use `jk!deck id` to add/remove a card"
comment = "removed"
else:
bank[str(ctx.author.id)]["deck"][classer] = str(id)
comment = "added"
with open("data/bank.json","w") as z:
json.dump(bank,z)
return await ctx.send(f"Successfully {comment} `{id}` from deck")
else:
loadring = await ctx.send("loading...")
if id != None:
user = id
else:
user = ctx.author
embed = discord.Embed(title=f"{user.name}'s deck",description=f"Your deck is used in battle, the order of combat from 1st to last:\n1. Beserker\n2. Rider\n3. Lancer\n4. Saber\n5. Archer\n6. Assassin\n7. Caster",color=ctx.author.color)
pages.append(embed)
for item in bank[str(user.id)]["deck"]:
print(item)
ids = bank[str(user.id)]["deck"][item]
if ids == "None set yet use `jk!deck id` to add/remove a card":
embed = discord.Embed(title=f"{item}",description="None set yet use `jk!deck id` to add/remove a card",color=ctx.author.color)
pages.append(embed)
else:
itemz = requests.request("POST",f'https://www.animecharactersdatabase.com/api_series_characters.php?character_id={ids}',headers=headers)
print(itemz.text)
itemz = itemz.json()
image = itemz["character_image"]
name = itemz["name"]
idx = itemz["id"]
gender = itemz["gender"]
desc = itemz["desc"]
collection = itemz["origin"]
card = itemz["id"]
cost = "not found."
if int(card) < 10:
cost = int(card)*1000
elif int(card) < 100:
cost = int(card)*1000
elif int(card) < 1000:
cost = int(card)*100
elif int(card) < 10000:
cost = int(card)*10
elif int(card) < 100000:
cost = int(card)
else:
cost = int(card)
classer = get.getclass(int(idx))
att = get.getattack(int(idx))
hp = get.gethp(int(idx))
embed = discord.Embed(title=f"{item}",description=f"**Id:** `{id}`\n**Gender:** {gender}\n**collection:** {collection}\n**description:** *{desc}*\n**Cost:** `{cost}`\n**Class:** {classer}\n**Attack:** `{att}`\n**Health:** `{hp}`",color=ctx.author.color)
embed.set_image(url=image)
pages.append(embed)
await asyncio.sleep(1)
await loadring.edit(f"{ctx.author.mention} loaded!")
await paginate(bot,ctx,pages,loadring)
@bot.command()
async def fight(ctx,user:discord.Member=None):
embed = discord.Embed(title="Fight time!",description=f"{ctx.author.name} has challenged {user.name} to a fight.\n Type `accept` to start the fight.",color=ctx.author.color)
await ctx.send(embed=embed)
try:
choice = await bot.wait_for("message", check = lambda msg: msg.author == user, timeout = 30)
if choice.content.startswith("accept"):
embed = discord.Embed(title="Fight time!",description=f"{ctx.author.name} has challenged {user.name} to a fight!",color=ctx.author.color)
embed.set_image(url="https://steamuserimages-a.akamaihd.net/ugc/1743429419938967655/FE544A0351697618062E05713EC1CFA482C595E1/?imw=268&imh=268&ima=fit&impolicy=Letterbox&imcolor=%23000000&letterbox=true")
embed.add_field(name="loading",value="Fetching cards...")
await ctx.send(embed=embed)
except asyncio.TimeoutError:
return await ctx.send("Well maybe next time.")
@bot.command()
async def public(ctx,action,action1=None,*,args=None):
pub = funcs.getpub()
if str(ctx.author.id) not in pub["users"]:
funcs.openpub(ctx.author)
if action == "view":
if action1 == None:
action1 = ctx.author.id
if action1 not in pubs["users"]:
await ctx.send("User not found! Please view by user id or use the search function.")
# loading data to embed!
with open("data/bank.json","r") as f:
bank = json.load(f)
avatar = pub["users"][str(action1)]["avatar"]
desc = pub["users"][str(action1)]["description"]
name = pub["users"][str(action1)]["name"]
bal = bank[str(action1)]["balance"]
amt = 0
for item in bank[str(action1)]["cards"]:
amt += 1
embed = discord.Embed(title=f"{name}'s Profile!",description="",color=ctx.author.color)
embed.set_image(url=avatar)
embed.add_field(name="Description:",value=f"Status: {desc}\nBalance: {bal}\nCollections: {amt}",inline=False)
await ctx.send(embed=embed)
#if action == "":
@bot.listen("on_message")
async def open_account(message):
pub = funcs.getpub()
if str(message.author.id) not in pub:
funcs.openpub(message.author)
with open("data/bank.json","r") as f:
bank = json.load(f)
if str(message.author.id) not in bank:
bank[str(message.author.id)] = {}
bank[str(message.author.id)]["balance"] = 0
bank[str(message.author.id)]["cards"] = {}
# Deck
bank[str(message.author.id)]["deck"] = {}
bank[str(message.author.id)]["deck"]["beserker"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["rider"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["lancer"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["saber"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["archer"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["assassin"] = "None set yet use `jk!deck id` to add/remove a card"
bank[str(message.author.id)]["deck"]["caster"] = "None set yet use `jk!deck id` to add/remove a card"
"""
with open("data/collections.json","r") as k:
cel = json.load(k)
for item in cel:
if item not in bank[str(message.author.id)]["cards"]:
bank[str(message.author.id)]["cards"] = {}
bank[str(message.author.id)]["cards"][str(item)] = []
"""
with open("data/bank.json","w") as z:
json.dump(bank,z)
with open("config.json","r") as x:
cfg = json.load(x)
#keep_alive()
bot.run(cfg["token"])
|
import unittest
from app.models import Pitch,User
from app import db
class PitchModelTest(unittest.TestCase):
def setUp(self):
self.user_manow = User(username = 'manow',password = '1234')
self.new_pitch = Pitch(name='zee',title='Money',description='moneyreview',user =self.user_manow, category='Finance')
# def tearDown(self):
# Pitch.query.delete()
# User.query.delete()
def test_check_instance_variable(self):
self.assertEquals(self.new_pitch.name,'zee')
self.assertEquals(self.new_pitch.title,'Money')
self.assertEquals(self.new_pitch.description,'moneyreview')
self.assertEquals(self.new_pitch.category, 'Finance')
# self.assertEquals(self.new_pitch.user,self.user_manow)
def test_save_pitch(self):
self.new_pitch.save_pitch()
self.assertTrue(len(Pitch.query.all()) >0)
def test_get_pitch_by_id(self):
self.new_pitch.save_pitch()
got_pitch = Pitch.get_pitches(12345)
self.assertTrue(len(got_pitch) > 0)
|
# 使用requests库前都要导入requests库
import requests
# 发送GET,POST,PUT,DELETE,HEAD 以及 OPTIONS 请求
a = requests.get('https://postman-echo.com/get')
b = requests.post('https://postman-echo.com/post')
c = requests.put('https://postman-echo.com/put')
d = requests.delete('https://postman-echo.com/delete')
f = requests.head('https://postman-echo.com/get')
print(a, b, c, d, f)
|
const fs = require('fs')
const del = require('del')
const path = require('path')
const chalk = require('chalk')
const Listr = require('listr')
const vfs = require('vinyl-fs')
const inquirer = require('inquirer')
const { promisify } = require('util')
const template = require('gulp-template')
const { installDependencies } = require('../lib/npm')
async function handler ({ cwd, directory: folderName = 'slides', yes: isDefault }) {
// Let's check if such folder exists
const directory = path.isAbsolute(folderName) ? folderName : path.join(cwd, folderName)
if (fs.existsSync(directory)) {
const { isForce } = await inquirer.prompt({
name: 'isForce',
type: 'confirm',
default: false,
message: `The ${chalk.yellow(folderName)} dir already exists. Do you want to overwrite it?`
})
if (isForce) {
await del([directory])
} else {
process.stdout.write(chalk.red(`\n Creating aborted\n`))
return
}
}
const options = {
template: 'presentation',
year: (new Date()).getFullYear()
}
const defaultParams = {
theme: 'ribbon',
ratio: '16:9'
}
const params = [{
name: 'theme',
type: 'list',
message: 'Select theme',
choices: ['ribbon', 'material']
}, {
name: 'ratio',
type: 'list',
message: 'Select presentation ratio',
choices: ['16:9', '4:3']
}]
if (isDefault) {
Object.assign(options, defaultParams)
} else {
Object.assign(options, await inquirer.prompt(params))
}
options.ratio = options.ratio.replace(/:/, ' / ')
process.stdout.write('\n')
const tasks = new Listr([
// 1. Create project structure
{
title: `Creating project structure in "${folderName}" dir`,
async task () {
await promisify(fs.mkdir)(directory)
await new Promise((resolve, reject) => {
const files = ['**', '**/.*']
vfs.src(files, {
cwd: path.join(__dirname, '..', '..', 'templates', options.template)
})
.pipe(template(options))
.pipe(vfs.dest(directory))
.on('end', resolve)
.on('error', reject)
})
}
},
// 2. Install dependencies
{
title: 'Installing dependencies',
task: () => Promise.all([
installDependencies(directory, ['@shower/cli'], 'save-dev'),
installDependencies(directory, ['@shower/core', `@shower/${options.theme}`])
])
}
])
await tasks.run()
}
function builder (yargs) {
return yargs
.options({
yes: {
alias: ['y'],
default: false,
type: 'boolean'
}
})
.positional('directory', {
default: 'slides',
type: 'string'
})
}
function messages ({ directory: folderName = 'slides' }) {
return {
end: `Project created in ${chalk.bold(folderName)} dir`
}
}
module.exports = { handler, builder, messages }
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (c) 2021, OPEN AI LAB
* Author: qtang@openailab.com
*/
#include "region_param.h"
#include "graph/tensor.h"
#include "graph/node.h"
#include "graph/graph.h"
#include "utility/sys_port.h"
#include "utility/float.h"
#include "utility/log.h"
#include "device/cpu/cpu_node.h"
#include "device/cpu/cpu_graph.h"
#include "device/cpu/cpu_module.h"
#include <math.h>
#include <string.h>
static int entry_index(int batch, int location, int entry, int hw, int chw, int classes)
{
int coords = 4;
int n = location / hw;
int loc = location % hw;
return batch * chw + n * hw * (coords + classes + 1) + entry * hw + loc;
}
static inline float logistic_activate(float x)
{
return 1. / (1. + exp(-x));
}
static void logit_activate_array(float* x, const int n)
{
int i;
for (i = 0; i < n; ++i)
{
x[i] = logistic_activate(x[i]);
}
}
static void softmax(const float* input, int n, int stride, float* output)
{
int i;
float sum = 0;
float largest = input[0];
for (i = 0; i < n; ++i)
{
if (input[i * stride] > largest)
largest = input[i * stride];
}
for (i = 0; i < n; ++i)
{
float e = exp(input[i * stride] - largest);
sum += e;
output[i * stride] = e;
}
for (i = 0; i < n; ++i)
{
output[i * stride] /= sum;
}
}
static void softmax_cpu(const float* input, int n, int batch, int batch_offset, int groups, int stride, float* output)
{
int g, b;
for (b = 0; b < batch; ++b)
{
for (g = 0; g < groups; ++g)
{
softmax(input + b * batch_offset + g, n, stride, output + b * batch_offset + g);
}
}
}
static int ref_region_fp32(struct tensor* input_tensor, struct tensor* output_tensor, struct region_param* param,
int num_thread)
{
int n = input_tensor->dims[0];
int c = input_tensor->dims[1];
int h = input_tensor->dims[2];
int w = input_tensor->dims[3];
int batch = n;
int hw = h * w;
int chw = c * hw;
int nchw = n * chw;
int num_box = param->num_box;
int num_class = param->num_classes;
int coords = param->coords;
float* in_data = input_tensor->data;
float* out_data = output_tensor->data;
memcpy(out_data, in_data, nchw * sizeof(float));
for (int b = 0; b < batch; b++)
{
for (int ni = 0; ni < num_box; ni++)
{
int index = entry_index(b, ni * hw, 0, hw, chw, num_class);
logit_activate_array(out_data + index, 2 * hw);
index = entry_index(b, ni * hw, coords, hw, chw, num_class);
logit_activate_array(out_data + index, hw);
index = entry_index(b, ni * hw, coords + 1, hw, chw, num_class);
}
}
int index = entry_index(0, 0, coords + 1, hw, chw, num_class);
softmax_cpu(in_data + index, num_class, batch * num_box, chw / num_box, hw, hw, out_data + index);
return 0;
}
static int init_node(struct node_ops* node_ops, struct exec_node* exec_node, struct exec_graph* exec_graph)
{
return 0;
}
static int release_node(struct node_ops* node_ops, struct exec_node* exec_node, struct exec_graph* exec_graph)
{
return 0;
}
static int prerun(struct node_ops* node_ops, struct exec_node* exec_node, struct exec_graph* exec_graph)
{
return 0;
}
static int run(struct node_ops* node_ops, struct exec_node* exec_node, struct exec_graph* exec_graph)
{
struct node* ir_node = exec_node->ir_node;
struct graph* ir_graph = ir_node->graph;
struct tensor* input_tensor;
struct tensor* output_tensor;
input_tensor = get_ir_graph_tensor(ir_graph, ir_node->input_tensors[0]);
output_tensor = get_ir_graph_tensor(ir_graph, ir_node->output_tensors[0]);
struct region_param* region_param = ( struct region_param* )ir_node->op.param_mem;
ref_region_fp32(input_tensor, output_tensor, region_param, exec_graph->num_thread);
return 0;
}
static int score(struct node_ops* node_ops, struct exec_graph* exec_graph, struct node* exec_node)
{
return OPS_SCORE_BEST;
}
static struct node_ops hcl_node_ops = {.prerun = prerun,
.run = run,
.reshape = NULL,
.postrun = NULL,
.init_node = init_node,
.release_node = release_node,
.score = score};
int register_region_ref_op()
{
return register_builtin_node_ops(OP_REGION, &hcl_node_ops);
}
int unregister_region_ref_op()
{
return unregister_builtin_node_ops(OP_REGION, &hcl_node_ops);
}
|
# Generated by Django 3.1.2 on 2020-11-15 11:11
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Testi',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nama', models.CharField(max_length=100)),
('institusi', models.CharField(max_length=100)),
('testimoni', models.TextField(max_length=1000)),
('tanggal_testi', models.DateField(auto_now=True)),
],
),
migrations.AddIndex(
model_name='testi',
index=models.Index(fields=['institusi', 'nama'], name='testi_testi_institu_db0e59_idx'),
),
migrations.AddIndex(
model_name='testi',
index=models.Index(fields=['nama'], name='nama_idx'),
),
]
|
import {Zondy} from '../../service/common/Base';
import {assign} from 'ol/obj.js';
import Feature from 'ol/Feature.js';
import * as ol_extent from 'ol/extent.js';
import BaseObject from 'ol/Object.js';
import Polygon from 'ol/geom/Polygon.js';
import GeometryLayout from 'ol/geom/GeometryLayout.js';
import MultiLineString from 'ol/geom/MultiLineString.js';
import MultiPoint from 'ol/geom/MultiPoint.js';
var PolygonJSON = function (opt_options) {
var options = opt_options !== undefined ? opt_options : {};
assign(this, options);
};
/**
Deserialize a MapGIS Features , and Return an array of ol.Feature
*
* Parameters:
* json-{String} | {Object},needs a Zondy.Object.FeatureSet format Javascript object.
**/
PolygonJSON.prototype.read = function (json, options) {
if (json === undefined) {
return null;
}
var obj = null;
if (typeof json === 'string') {
obj = JSON.parse(json);
} else {
obj = json;
}
if (obj !== null) {
return this.parseVectors(obj);
}
};
/*
* Parameters:
* obj: {Object},an object stand for Zondy.IGServer.WebService.REST.IGS.ExtendBaselibClass.SFeatureElementSet
*/
PolygonJSON.prototype.parseVectors = function (zfeatureset) {
// an array of OpenLayers.Feature.Vector
if (zfeatureset === undefined || zfeatureset.SFEleArray === undefined) {
return null;
}
if(!zfeatureset.SFEleArray){
return null ;
}
if (zfeatureset.SFEleArray.length == 0) {
return null;
}
var results = new Array();
for (var i = 0, len = zfeatureset.SFEleArray.length; i < len; i++) {
var zfeature = zfeatureset.SFEleArray[i];
var attribute = this.parseAttribute(zfeatureset.AttStruct, zfeature.AttValue);
var geometry = this.parseGeometry(zfeature.fGeom, zfeature.ftype);
//var vector = new OpenLayers.Feature.Vector(geometry, attribute, null);
var feature = new Feature();
feature.setGeometry(geometry);
feature.setId(zfeature.FID.toString());
feature.setProperties(attribute);
results[i] = feature;
}
return results;
};
PolygonJSON.prototype.parseBound = function (zBound) {
if (zBound === undefined) {
return null;
}
var result = ol_extent.createOrUpdate(zBound.xmin, zBound.ymin, zBound.xmax, zBound.ymax);
return result;
};
/*
* get the attribute object of the vector
* parameters :
* attstruct: {Zondy.Object.CAttStruct}
* attvalue: {ol.Object}
*/
PolygonJSON.prototype.parseAttribute = function (attstruct, attvalue) {
if (attstruct === undefined || attvalue === undefined) {
return null;
}
if (attstruct.FldName.length != attvalue.length) {
return null;
}
var attributes = new BaseObject();
for (var i = 0, len = attstruct.FldName.length; i < len; i++) {
attributes.set(attstruct.FldName[i], attvalue[i]);
};
return attributes;
};
/**
* fGeom :{Zondy.Object.FeatureGeometry}转换为{ol.geom.Geometry}
* @fGeom {Zondy.Object.FeatureGeometry} fGeom.
* @type {number} type:{1:点;2:线;3:多边形}.
* @return {ol.geom.Geometry} .
* @api stable
*/
PolygonJSON.prototype.parseGeometry = function (fGeom, type) {
var result = null;
if (type == "Unknow") {
if (fGeom.PntGeom.length > 0) {
type = 1;
}
else if (fGeom.LinGeom.length > 0) {
type = 2;
} else {
type = 3;
}
}
switch (type) {
case 1:
result = this.parseGPoint(fGeom.PntGeom);
break;
case 2:
// if the obj is type of Line
result = this.parseGLine(fGeom.LinGeom);
break;
case 3:
// if the obj is type of Region
result = this.parseGRegion(fGeom.RegGeom);
break;
}
return result;
};
/**
* gRegions Array{Zondy.Object.GRegion}转换为{ol.geom.Polygon}
* @param Array{Zondy.Object.GRegion} gRegions.
* @return {ol.geom.Polygon} .
* @api stable
*/
PolygonJSON.prototype.parseGRegion = function (gRegions) {
if (gRegions === undefined || gRegions.length === undefined || gRegions.length == 0) {
return null;
}
var m = 0;
var results = new Array();
for (var i = 0; i < gRegions.length; i++) {
var specifiedGRegion = gRegions[i];
if (specifiedGRegion === undefined || specifiedGRegion.Rings === undefined) {
return null;
}
var specifiedGRegionLength = specifiedGRegion.Rings.length;
for (var j = 0, len = specifiedGRegionLength; j < len; j++) {
var zondyAnyLine = specifiedGRegion.Rings[j];
var points = new Array();
var zondyDots = zondyAnyLine.Arcs[0].Dots;
for (var k = 0, zLen = zondyDots.length; k < zLen; k++) {
points[k] = [zondyDots[k].x, zondyDots[k].y];
}
results[m++] = points;
}
}
return new Polygon(results, GeometryLayout.XY);
};
/**
* glines Array{Zondy.Object.GLine}转换为{ol.geom.MultiLineString}
* @param Array{Zondy.Object.GLine} glines.
* @return {ol.geom.MultiLineString} .
* @api stable
*/
PolygonJSON.prototype.parseGLine = function (glines) {
if (glines === undefined || glines.length === undefined || glines.length == 0) {
return null;
}
var glinesLength;
var results = []; // an array of ol.geom.LineString;
if (!glines)
return null;
glinesLength = glines.length;
if (glinesLength === 0)
return null;
for (var i = 0, len = glines.length; i < len; i++) {
var points = new Array();
var zondyDots = glines[i].Line.Arcs[0].Dots;
for (var j = 0, dLen = zondyDots.length; j < dLen; j++) {
points[j] = [zondyDots[j].x, zondyDots[j].y];
}
results[i] = points;
}
var mulLineString = new MultiLineString(results);
//mulLineString.setLineStrings(results);
return mulLineString;
};
/**
* 将gpoint: Array{Zondy.Object.GPoint}转换为{ol.geom.MultiPoint}
* @param: Array{Zondy.Object.GPoint} gpoint.
* @return {ol.geom.MultiPoint} .
* @api stable
*/
PolygonJSON.prototype.parseGPoint = function (gpoint) {
if (gpoint === undefined || gpoint.length === undefined || gpoint.length == 0) {
return null;
}
var points = [];
var dot = null;
for (var i = 0, len = gpoint.length; i < len; i++) {
dot = gpoint[i].Dot;
//points[i] = new ol.geom.Point([dot.x, dot.y], ol.geom.GeometryLayout.XY);
points[i] = [dot.x, dot.y];
}
var result = new MultiPoint(points, GeometryLayout.XY);
return result;
};
export { PolygonJSON };
Zondy.Format.PolygonJSON = PolygonJSON;
|
/*! For license information please see chunk.0fc9b800556417becf02.js.LICENSE.txt */
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[4764],{28417:(e,t,r)=>{"use strict";r(50808);var i=r(33367),n=r(93592),o=r(87156);const s={getTabbableNodes:function(e){const t=[];return this._collectTabbableNodes(e,t)?n.H._sortByTabIndex(t):t},_collectTabbableNodes:function(e,t){if(e.nodeType!==Node.ELEMENT_NODE||!n.H._isVisible(e))return!1;const r=e,i=n.H._normalizedTabIndex(r);let s,a=i>0;i>=0&&t.push(r),s="content"===r.localName||"slot"===r.localName?(0,o.vz)(r).getDistributedNodes():(0,o.vz)(r.shadowRoot||r.root||r).children;for(let e=0;e<s.length;e++)a=this._collectTabbableNodes(s[e],t)||a;return a}},a=customElements.get("paper-dialog"),l={get _focusableNodes(){return s.getTabbableNodes(this)}};class c extends((0,i.P)([l],a)){}customElements.define("ha-paper-dialog",c)},45890:(e,t,r)=>{"use strict";r.d(t,{A:()=>i});const i=r(50424).iv`
ha-switch {
padding: 16px 6px;
}
.side-by-side {
display: flex;
}
.side-by-side > * {
flex: 1;
padding-right: 8px;
}
.side-by-side > *:last-child {
flex: 1;
padding-right: 0;
}
.suffix {
margin: 0 8px;
}
`},74764:(e,t,r)=>{"use strict";r.r(t),r.d(t,{HuiDialogEditLovelace:()=>C});r(53918),r(22626);var i=r(50424),n=r(55358),o=r(47181),s=(r(28417),r(31206),r(11654)),a=(r(30879),r(45890));function l(){l=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!f(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var s=t[e.placement];s.splice(s.indexOf(e.key),1);var a=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(a)||a);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var s=0;s<e.length-1;s++)for(var a=s+1;a<e.length;a++)if(e[s].key===e[a].key&&e[s].placement===e[a].placement)throw new TypeError("Duplicated element ("+e[s].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return m(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?m(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=h(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:p(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=p(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function c(e){var t,r=h(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function d(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function f(e){return e.decorators&&e.decorators.length}function u(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function p(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function h(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function m(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}!function(e,t,r,i){var n=l();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var s=t((function(e){n.initializeInstanceElements(e,a.elements)}),r),a=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(u(o.descriptor)||u(n.descriptor)){if(f(o)||f(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(f(o)){if(f(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}d(o,n)}else t.push(o)}return t}(s.d.map(c)),e);n.initializeClassElements(s.F,a.elements),n.runClassFinishers(s.F,a.finishers)}([(0,n.Mo)("hui-lovelace-editor")],(function(e,t){return{F:class extends t{constructor(...t){super(...t),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"config",value:void 0},{kind:"get",key:"_title",value:function(){return this.config&&this.config.title||""}},{kind:"method",key:"render",value:function(){return i.dy`
<div class="card-config">
<paper-input
.label=${this.hass.localize("ui.panel.lovelace.editor.edit_lovelace.title")}
.value="${this._title}"
.configValue="${"title"}"
@value-changed="${this._valueChanged}"
></paper-input>
</div>
`}},{kind:"method",key:"_valueChanged",value:function(e){if(!this.config)return;const t=e.currentTarget;if(this[`_${t.configValue}`]===t.value)return;let r;t.configValue&&(r={...this.config,[t.configValue]:t.value}),(0,o.B)(this,"lovelace-config-changed",{config:r})}},{kind:"get",static:!0,key:"styles",value:function(){return a.A}}]}}),i.oi);function v(){v=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(i){t.forEach((function(t){var n=t.placement;if(t.kind===i&&("static"===n||"prototype"===n)){var o="static"===n?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var i=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===i?void 0:i.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],i=[],n={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,n)}),this),e.forEach((function(e){if(!b(e))return r.push(e);var t=this.decorateElement(e,n);r.push(t.element),r.push.apply(r,t.extras),i.push.apply(i,t.finishers)}),this),!t)return{elements:r,finishers:i};var o=this.decorateConstructor(r,t);return i.push.apply(i,o.finishers),o.finishers=i,o},addElementPlacement:function(e,t,r){var i=t[e.placement];if(!r&&-1!==i.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");i.push(e.key)},decorateElement:function(e,t){for(var r=[],i=[],n=e.decorators,o=n.length-1;o>=0;o--){var s=t[e.placement];s.splice(s.indexOf(e.key),1);var a=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,n[o])(a)||a);e=l.element,this.addElementPlacement(e,t),l.finisher&&i.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:i,extras:r}},decorateConstructor:function(e,t){for(var r=[],i=t.length-1;i>=0;i--){var n=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[i])(n)||n);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var s=0;s<e.length-1;s++)for(var a=s+1;a<e.length;a++)if(e[s].key===e[a].key&&e[s].placement===e[a].placement)throw new TypeError("Duplicated element ("+e[s].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return _(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?_(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=E(e.key),i=String(e.placement);if("static"!==i&&"prototype"!==i&&"own"!==i)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+i+'"');var n=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:i,descriptor:Object.assign({},n)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(n,"get","The property descriptor of a field descriptor"),this.disallowProperty(n,"set","The property descriptor of a field descriptor"),this.disallowProperty(n,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:w(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=w(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var i=(0,t[r])(e);if(void 0!==i){if("function"!=typeof i)throw new TypeError("Finishers must return a constructor.");e=i}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function y(e){var t,r=E(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var i={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(i.decorators=e.decorators),"field"===e.kind&&(i.initializer=e.value),i}function g(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function b(e){return e.decorators&&e.decorators.length}function k(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function w(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function E(e){var t=function(e,t){if("object"!=typeof e||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var i=r.call(e,t||"default");if("object"!=typeof i)return i;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"==typeof t?t:String(t)}function _(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,i=new Array(t);r<t;r++)i[r]=e[r];return i}let C=function(e,t,r,i){var n=v();if(i)for(var o=0;o<i.length;o++)n=i[o](n);var s=t((function(e){n.initializeInstanceElements(e,a.elements)}),r),a=n.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},i=0;i<e.length;i++){var n,o=e[i];if("method"===o.kind&&(n=t.find(r)))if(k(o.descriptor)||k(n.descriptor)){if(b(o)||b(n))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");n.descriptor=o.descriptor}else{if(b(o)){if(b(n))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");n.decorators=o.decorators}g(o,n)}else t.push(o)}return t}(s.d.map(y)),e);return n.initializeClassElements(s.F,a.elements),n.runClassFinishers(s.F,a.finishers)}([(0,n.Mo)("hui-dialog-edit-lovelace")],(function(e,t){return{F:class extends t{constructor(){super(),e(this),this._saving=!1}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.SB)()],key:"_lovelace",value:void 0},{kind:"field",key:"_config",value:void 0},{kind:"field",key:"_saving",value:void 0},{kind:"method",key:"showDialog",value:async function(e){this._lovelace=e,null==this._dialog&&await this.updateComplete;const{views:t,...r}=this._lovelace.config;this._config=r,this._dialog.open()}},{kind:"method",key:"closeDialog",value:function(){this._config=void 0,this._dialog.close(),(0,o.B)(this,"dialog-closed",{dialog:this.localName})}},{kind:"get",key:"_dialog",value:function(){return this.shadowRoot.querySelector("ha-paper-dialog")}},{kind:"method",key:"render",value:function(){return i.dy`
<ha-paper-dialog with-backdrop modal>
<h2>
${this.hass.localize("ui.panel.lovelace.editor.edit_lovelace.header")}
</h2>
<paper-dialog-scrollable>
${this.hass.localize("ui.panel.lovelace.editor.edit_lovelace.explanation")}
<hui-lovelace-editor
.hass=${this.hass}
.config="${this._config}"
@lovelace-config-changed="${this._ConfigChanged}"
></hui-lovelace-editor
></paper-dialog-scrollable>
<div class="paper-dialog-buttons">
<mwc-button @click=${this.closeDialog}
>${this.hass.localize("ui.common.cancel")}</mwc-button
>
<mwc-button
?disabled="${!this._config||this._saving}"
@click="${this._save}"
>
${this._saving?i.dy`<ha-circular-progress
active
size="small"
title="Saving"
></ha-circular-progress>`:""}
${this.hass.localize("ui.common.save")}</mwc-button
>
</div>
</ha-paper-dialog>
`}},{kind:"method",key:"_save",value:async function(){if(!this._config)return;if(!this._isConfigChanged())return void this.closeDialog();this._saving=!0;const e=this._lovelace,t={...e.config,...this._config};try{await e.saveConfig(t),this.closeDialog()}catch(e){alert(`Saving failed: ${e.message}`)}finally{this._saving=!1}}},{kind:"method",key:"_ConfigChanged",value:function(e){e.detail&&e.detail.config&&(this._config=e.detail.config)}},{kind:"method",key:"_isConfigChanged",value:function(){const{views:e,...t}=this._lovelace.config;return JSON.stringify(this._config)!==JSON.stringify(t)}},{kind:"get",static:!0,key:"styles",value:function(){return[s.yu,i.iv`
@media all and (max-width: 450px), all and (max-height: 500px) {
/* overrule the ha-style-dialog max-height on small screens */
ha-paper-dialog {
max-height: 100%;
height: 100%;
}
}
@media all and (min-width: 660px) {
ha-paper-dialog {
width: 650px;
}
}
ha-paper-dialog {
max-width: 650px;
}
`]}}]}}),i.oi)}}]);
//# sourceMappingURL=chunk.0fc9b800556417becf02.js.map
|
from werkzeug.security import safe_str_cmp
from models.user import UserModel
def authenticate(username, password):
user = UserModel.find_by_username(username)
if user and safe_str_cmp(user.password, password):
return user
def identity(payload):
user_id = payload['identity']
return UserModel.find_by_id(user_id)
|
#ifndef TESTCHAIN_BLOCK_H
#define TESTCHAIN_BLOCK_H
#include <cstdint>
#include <iostream>
#include <sstream>
#include <fstream>
#include "writeCSV.h"
using namespace std;
class Block {
uint32_t _nIndex;
uint32_t _nNonce;
string _sData;
time_t _tTime;
string sDataFrom;
string sDataTo;
string sFrom;
string sTo;
CSVWriter my_writer = CSVWriter("hashdetails.csv");
string _CalculateHash() const;
public:
string sHash;
string sPrevHash;
//ofstream myfile;
Block(uint32_t nIndexIn, const string &sDataFrom, const string &sDataTo);
void MineBlock(uint32_t nDifficulty);
//void updateHash(int id, string hash, string from, string to);
};
#endif //TESTCHAIN_BLOCK_H
|
import os
import telebot
from flask import Flask, request
TOKEN = '1790892466:AAEo2tiTgaDjpXiA9omYZGKmZHFdLLXaPQY' # это мой токен
bot = telebot.TeleBot(TOKEN)
server = Flask(__name__)
@bot.message_handler(commands=['start'])
def send_info(message):
text = (
"<b>Welcome to the Ыbot!</b>\n"
"Say Hello to the bot to get a reply from it!"
)
bot.send_message(message.chat.id, text, parse_mode='HTML')
# Если строка на входе непустая, то бот повторит ее
@bot.message_handler(func=lambda m: True)
def echo_all(message):
bot.reply_to(message, message.text)
@server.route('/' + TOKEN, methods=['POST'])
def getMessage():
bot.process_new_updates([telebot.types.Update.de_json(request.stream.read().decode("utf-8"))])
return "!", 200
@server.route("/")
def webhook():
bot.remove_webhook()
bot.set_webhook(url='https://mytelegramproject.herokuapp.com/' + TOKEN) #
return "!", 200
if __name__ == "__main__":
server.run(host="0.0.0.0", port=int(os.environ.get('PORT', 5000)))
|
#/usr/bin/env python
#
# Copyright 2020-2021 John T. Foster
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
class Grid:
"""
Simple class to generate a computational grid and apply boundary conditions.
"""
def __init__(self, nx=10, ny=10, xmin=0.0, xmax=1.0, ymin=0.0, ymax=1.0):
self.xmin, self.xmax, self.ymin, self.ymax = xmin, xmax, ymin, ymax
self.nx, self.ny = nx, ny
self.dx = (xmax - xmin) / (nx - 1)
self.dy = (ymax - ymin) / (ny - 1)
self.u = np.zeros((ny, nx), dtype=np.double)
def set_boundary_condition(self, side, boundary_condition_function = lambda x,y: 0.0):
xmin, ymin = self.xmin, self.ymin
xmax, ymax = self.xmax, self.ymax
x = np.arange(xmin, xmax, self.dx)
y = np.arange(ymin, ymax, self.dy)
if side == 'bottom':
self.u[0 ,:] = boundary_condition_function(xmin, y)
elif side == 'top':
self.u[-1 ,:] = boundary_condition_function(xmax, y)
elif side == 'left':
self.u[:, 0] = boundary_condition_function(x, ymin)
elif side == 'right':
self.u[:, -1] = boundary_condition_function(x, ymax)
class LaplaceSolver(Grid):
"""
Class that solves the Laplace equation in 2D
"""
def iterate(self):
"""
A Python (slow) implementation of a finite difference iteration
"""
u = self.u
nx, ny = u.shape
dx2, dy2 = self.dx ** 2, self.dy ** 2
err = 0.0
for i in range(1, nx - 1):
for j in range(1, ny - 1):
tmp = u[i,j]
u[i,j] = ((u[i-1, j] + u[i+1, j]) * dy2 +
(u[i, j-1] + u[i, j+1]) * dx2) / (dx2 + dy2) / 2
diff = u[i,j] - tmp
err += diff * diff
return np.sqrt(err)
def solve(self, max_iterations=10000, tolerance=1.0e-16, quiet=False):
"""
Calls iterate() sequentially until the error is reduced below a tolerance.
"""
for i in range(max_iterations):
error = self.iterate()
if error < tolerance:
if not quiet:
print("Solution converged in " + str(i) + " iterations.")
break
def get_solution(self):
return self.u
|
import typing
from functools import cmp_to_key
import collections.abc
import inspect
from .logging import warn
from .util import get_class_name, get_type_index, lift_key, reflect, make_comparator
T = typing.TypeVar('T')
_primitive_types = [ type(None), bool, int, float, str ]
def satisfies_type(value, ty):
if ty is None:
return value is None
if ty is typing.Any:
return True
origin = typing.get_origin(ty)
if origin is None:
return isinstance(value, ty)
args = typing.get_args(ty)
if origin is typing.Union:
return any(satisfies_type(value, arg) for arg in args)
if origin is dict:
key_type = args[0]
value_type = args[1]
if not isinstance(value, dict):
return False
for k, v in value.items():
if not satisfies_type(k, key_type) or not satisfies_type(v, value_type):
return False
return True
if origin is set:
element_type = args[0]
return isinstance(value, set) \
and all(satisfies_type(v, element_type) for v in value)
if origin is list:
element_type = args[0]
return isinstance(value, list) \
and all(satisfies_type(v, element_type) for v in value)
if origin is collections.abc.Callable:
# TODO check whether the parameter types are satisfied
return callable(value)
if origin is tuple:
if not isinstance(value, tuple) or len(value) != len(args):
return False
i = 0
for element in value:
element_type = args[i]
if element_type == Ellipsis:
element_type = args[i-1]
else:
i += 1
if not satisfies_type(element, element_type):
return False
return True
warn(f'no type-checking logic defined for {origin}')
return isinstance(value, origin)
def _get_all_union_elements(value):
origin = typing.get_origin(value)
if origin is typing.Union:
args = typing.get_args(value)
for arg in args:
yield from _get_all_union_elements(arg)
return
yield value
def _lt_helper_dict(a, b):
keys_a = list(a.keys())
keys_a.sort()
keys_b = list(b.keys())
keys_b.sort()
i1 = 0
i2 = 0
is_equal = True
while True:
if i1 == len(keys_a) or i2 == len(keys_b):
break
k1 = keys_a[i1]
k2 = keys_b[i2]
if _lt_helper(k1, k2):
is_equal = False
i1 += 1
elif is_equal and _lt_helper(k2, k1):
return False
else:
v1 = a[k1]
v2 = b[k2]
if _lt_helper(v1, v2):
is_equal = False
elif is_equal and _lt_helper(v2, v1):
return False
i1 += 1
i2 += 1
if is_equal:
return len(keys_a) < len(keys_b)
return True
def _lt_helper_sequence(a, b):
is_equal = True
for v1, v2 in zip(a, b):
if _lt_helper(v1, v2):
is_equal = False
elif is_equal and _lt_helper(v2, v1):
return False
if is_equal:
return len(a) < len(b)
return True
def _lt_helper(a, b):
i1 = get_type_index(a)
i2 = get_type_index(b)
if i1 != i2:
return i1 < i2
if a is None:
# b must be None
return False
if isinstance(a, dict):
return _lt_helper_dict(a, b)
if isinstance(a, list) \
or isinstance(a, tuple):
return _lt_helper_sequence(a, b)
return a < b
def has_annotation(cls, expected):
prev_annotations = None
for cls in cls.__mro__:
if hasattr(cls, '__annotations__') and cls.__annotations__ != prev_annotations:
if expected in cls.__annotations__:
return True
else:
prev_annotations = cls.__annotations__
return False
def get_all_subclasses(cls):
yield cls
for subcls in cls.__subclasses__():
yield from get_all_subclasses(subcls)
def find_subclass_named(name, cls):
for subcls in get_all_subclasses(cls):
if subcls.__name__ == name:
return subcls
raise NameError(f"class named '{name}' not found")
def get_defaults(cls):
result = dict()
for pcls in inspect.getmro(cls):
for k, v in pcls.__dict__.items():
if k not in result and not k.startswith('__'):
result[k] = v
return result
_class_coercions = list()
def add_coercion(cls, proc):
assert(cls not in _class_coercions)
_class_coercions.append((cls, proc))
class CoercionError(RuntimeError):
pass
def get_all_superclasses(cls):
yield cls
for parent_cls in cls.__bases__:
yield from get_all_superclasses(parent_cls)
def get_common_superclass(classes):
cls = classes[0]
for parent_cls in get_all_superclasses(cls):
if all(issubclass(cls, parent_cls) for cls in classes):
return parent_cls
def coerce(value, ty):
if ty is type(None):
if value is not None:
raise CoercionError(f'could not coerce {value} to NoneType because the only allowed value is None')
return None
origin = typing.get_origin(ty)
if origin is None:
if isinstance(value, ty):
return value
attempts = []
for cls, proc in _class_coercions:
if ty is cls or issubclass(ty, cls):
attempts.append((cls, proc))
attempts.sort(key=lift_key(cmp_to_key(make_comparator(issubclass)), 0))
for cls, proc in attempts:
try:
return proc(value, ty)
except CoercionError:
pass
raise CoercionError(f'could not coerce {value} to {ty} because no known coercions exist for {ty}')
if origin is typing.Union:
classes = []
has_none = False
has_non_cls = False
for arg in _get_all_union_elements(ty):
if arg is type(None):
has_none = True
continue
origin = typing.get_origin(arg)
if origin is not None:
has_non_cls = True
classes.append(arg)
if value is None:
if not has_none:
raise CoercionError(f'could not coerce None to {ty} because None is not allowed')
return None
if has_non_cls:
raise CoercionError(f'could not coerce {value} to {ty} because {arg} cannot be joined with the other typing.Union elements')
cls = get_common_superclass(classes)
if cls is None:
raise CoercionError(f'could not coerce {value} to {ty} because {ty} can be multiple unrelated types')
return coerce(value, cls)
args = typing.get_args(ty)
if origin is list:
if value is None:
return []
element_type = args[0]
return list(coerce(element, element_type) for element in value)
if origin is tuple:
if value is None:
return tuple(coerce(None, element_type) for element_type in args)
return tuple(coerce(element, element_type) for element, element_type in zip(value, args))
raise RuntimeError(f'cannot coerce {value} into {ty} because {origin} is an unsupported typing')
class RecordFields:
def __init__(self, record) -> None:
self.record = record
def __contains__(self, key: str):
hints = typing.get_type_hints(type(self.record))
return key in hints
def __getitem__(self, key: str) -> typing.Any:
hints = typing.get_type_hints(type(self.record))
if key not in hints:
raise KeyError(f"key '{key}' is not found in the fields of {self.record}")
return getattr(self.record, key)
def __setitem__(self, key: str, new_value: typing.Any):
hints = typing.get_type_hints(type(self.record))
if key not in hints:
raise KeyError(f"key '{key}' is not found in the fields of {self.record}")
setattr(self.record, key, new_value)
def keys(self):
hints = typing.get_type_hints(type(self.record))
return hints.keys()
def values(self):
for key in typing.get_type_hints(type(self.record)):
yield getattr(self.record, key)
def items(self):
for name in typing.get_type_hints(type(self.record)):
yield name, getattr(self.record, name)
def pretty_enum(elements: typing.List[str]) -> str:
if not elements:
return 'nothing'
if len(elements) == 1:
return elements[0]
out = elements[0]
for element in elements[1:-1]:
out += f', {element}'
out += f' and {elements[-1]}'
return out
def transform(value: T, proc) -> T:
new_value = proc(value)
if new_value != value:
return new_value
for cls in _primitive_types:
if isinstance(value, cls):
return value
if isinstance(value, tuple):
new_elements = []
has_new_element = False
for element in value:
new_element = transform(element, proc)
if new_element != element:
has_new_element = True
new_elements.append(new_element)
if not has_new_element:
return value
return tuple(new_elements)
if isinstance(value, list):
new_elements = []
has_new_element = False
for element in value:
new_element = transform(element, proc)
if new_element != element:
has_new_element = True
new_elements.append(new_element)
if not has_new_element:
return value
return new_elements
if isinstance(value, set):
new_elements = set()
has_new_element = False
for element in value:
new_element = transform(element, proc)
if new_element != element:
has_new_element = True
new_elements.add(new_element)
if not has_new_element:
return value
return new_elements
if isinstance(value, Record):
cls = value.__class__
kwargs = dict()
has_new_v = False
for k, v in value.fields.items():
new_v = transform(v, proc)
if new_v != v:
has_new_v = True
kwargs[k] = new_v
if not has_new_v:
return value
return cls(**kwargs)
if isinstance(value, dict):
new_value = dict()
has_new_v = False
for k, v in value.items():
new_v = transform(v, proc)
if new_v != v:
has_new_v = True
new_value[k] = new_v
if not has_new_v:
return value
return new_value
raise RuntimeError(f'unexpected {value}')
def clone(value: T, deep=False) -> T:
for cls in _primitive_types:
if isinstance(value, cls):
return value
if isinstance(value, object) and hasattr(value, 'clone'):
return value.clone()
if isinstance(value, dict):
return dict((k, clone(v, True) if deep else v) for k, v in value.items())
if isinstance(value, list):
return list(clone(el, True) if deep else el for el in value)
if isinstance(value, tuple):
return tuple(clone(el, True) if deep else el for el in value)
if isinstance(value, set):
return set(clone(el, True) if deep else el for el in value)
raise RuntimeError(f'could not clone {value} becaue it did not have a .clone() and was not recognised as a primitive type')
def _record_clone_helper(value, deep: bool):
if isinstance(value, dict):
return dict((k, _record_clone_helper(v, deep)) for k, v in value.items())
if isinstance(value, list):
return list(_record_clone_helper(el, deep) for el in value)
if isinstance(value, tuple):
return tuple(_record_clone_helper(el, deep) for el in value)
if not deep:
return value
return clone(value, True)
@reflect
class Record:
def __init__(self, *args, **kwargs):
type_hints = typing.get_type_hints(self.__class__)
defaults = get_defaults(self.__class__)
i = 0
for name, ty in type_hints.items():
if name in defaults:
continue
if name in kwargs:
value = kwargs[name]
del kwargs[name]
elif i < len(args):
value = args[i]
i += 1
else:
try:
value = coerce(None, ty)
except CoercionError:
raise TypeError(f"argument '{name}' is required but did not receive a value")
if not satisfies_type(value, ty):
value = coerce(value, ty)
self.__dict__[name] = value
for name, ty in type_hints.items():
if name not in defaults:
continue
ty = type_hints[name]
if name in kwargs:
value = kwargs[name]
del kwargs[name]
elif i < len(args):
value = args[i]
i += 1
else:
value = defaults[name]
if not satisfies_type(value, ty):
raise TypeError(f"{value} did not satisfy type {ty}")
self.__dict__[name] = value
if i < len(args) or len(kwargs) > 0:
parts = []
if i < len(args):
parts.append(f'{i-len(args)} positional')
for k in kwargs:
parts.append(f"'{k}'")
raise TypeError(f'excess arguments received to {get_class_name(self)}: {pretty_enum(parts)}')
def get_field_names(self):
return typing.get_type_hints(self).keys()
@property
def fields(self):
return RecordFields(self)
def __lt__(self, other):
if not isinstance(other, Record):
return _lt_helper(self, other)
return _lt_helper(self.fields, other.fields)
def __getitem__(self, name):
return self.fields[name]
def __setitem__(self, key, new_value):
self.fields[key] = new_value
def __setattr__(self, name, new_value):
hints = typing.get_type_hints(type(self))
if name in hints:
ty = hints[name]
if not satisfies_type(new_value, ty):
raise RuntimeError(f"cannot set field '{name}' to {new_value} on {get_class_name(self)} because the type {ty} is not satisfied")
super().__setattr__(name, new_value)
def clone(self, deep=False):
new_fields = dict()
for k, v in self.fields.items():
new_fields[k] = _record_clone_helper(v, deep)
return self.__class__(**new_fields)
def encode(self, encoder):
fields = { '$type': self.__class__.__name__ }
for k, v in self.fields.items():
fields[k] = v
return encoder.encode(fields)
def _coerce_to_record(value, ty):
hints = typing.get_type_hints(ty)
defaults = get_defaults(ty)
required = 0
for key in hints.keys():
if key not in defaults:
required += 1
if required == 0:
if value is not None:
raise CoercionError(f'could not coerce {value} to {ty} because all fields are optional')
return ty()
if required == 1:
return ty(value)
raise CoercionError(f'could not coerce {value} to {ty} because it requires more than one field')
add_coercion(Record, _coerce_to_record)
|
const { getLoad } = require('../lib/getLoad')
const { getUptime } = require('../lib/getUptime')
const metricsMiddleware = async (req, res, next) => {
const uptime = getUptime()
const loadData = getLoad()
res.set('Content-Type', 'text/plain')
let metrics = ''
if (loadData) {
metrics += `# TYPE node_load gauge
node_load{duration="1m"} ${loadData[0]}
node_load{duration="5m"} ${loadData[1]}
node_load{duration="15m"} ${loadData[2]}
`
}
if (uptime) {
metrics += `# TYPE node_uptime gauge
node_uptime ${uptime}`
}
res.send(metrics)
}
module.exports = {
metricsMiddleware
}
|
export { default as Icon } from './ui/Icon.svelte'
export { default as SentryAuth } from './ui/SentryAuth.svelte'
export { sentry, isAuthChanging } from './sentry'
export { sessionFromCookies, cookiesFromSession } from './helper'
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 5 15:52:22 2019
@author: dan
"""
import numpy as np
def chisq_from_stim_table(stim_table,
columns,
mean_sweep_events,
num_shuffles=1000,
verbose=False):
# stim_table is a pandas DataFrame with len = num_sweeps
# columns is a list of column names that define the categories (e.g. ['Ori','Contrast'])
# mean_sweep_events is a numpy array with shape (num_sweeps,num_cells)
sweep_categories = stim_table_to_categories(stim_table,columns,verbose=verbose)
p_vals = compute_chi_shuffle(mean_sweep_events,sweep_categories,num_shuffles=num_shuffles)
return p_vals
def compute_chi_shuffle(mean_sweep_events,
sweep_categories,
num_shuffles=1000):
# mean_sweep_events is a numpy array with shape (num_sweeps,num_cells)
# sweep_conditions is a numpy array with shape (num_sweeps)
# sweep_conditions gives the category label for each sweep
(num_sweeps,num_cells) = np.shape(mean_sweep_events)
assert len(sweep_categories) == num_sweeps
sweep_categories_dummy = make_category_dummy(sweep_categories)
expected = compute_expected(mean_sweep_events,sweep_categories_dummy)
observed = compute_observed(mean_sweep_events,sweep_categories_dummy)
chi_actual = compute_chi(observed,expected)
chi_shuffle = np.zeros((num_cells,num_shuffles))
for ns in range(num_shuffles):
#print 'shuffle ' + str(ns+1) + ' of ' + str(num_shuffles)
shuffle_sweeps = np.random.choice(num_sweeps,size=(num_sweeps,))
shuffle_sweep_events = mean_sweep_events[shuffle_sweeps]
shuffle_expected = compute_expected(shuffle_sweep_events,sweep_categories_dummy)
shuffle_observed = compute_observed(shuffle_sweep_events,sweep_categories_dummy)
chi_shuffle[:,ns] = compute_chi(shuffle_observed,shuffle_expected)
p_vals = np.mean(chi_actual.reshape(num_cells,1)<chi_shuffle,axis=1)
return p_vals
def compute_chi_from_blanks(mean_sweep_events,
sweep_categories,
num_shuffles=1000):
# mean_sweep_events is a numpy array with shape (num_sweeps,num_cells)
# sweep_conditions is a numpy array with shape (num_sweeps)
# sweep_conditions gives the category label for each sweep
(num_sweeps,num_cells) = np.shape(mean_sweep_events)
assert len(sweep_categories) == num_sweeps
sweep_categories_dummy = make_category_dummy(sweep_categories)
expected = compute_expected_for_blank(mean_sweep_events,sweep_categories_dummy)
observed = compute_observed(mean_sweep_events,sweep_categories_dummy)
chi_pos_actual, chi_neg_actual = compute_chi_rectified(observed,expected)
chi_pos_shuffle = np.zeros((num_cells,num_shuffles))
chi_neg_shuffle = np.zeros((num_cells,num_shuffles))
for ns in range(num_shuffles):
#print 'shuffle ' + str(ns+1) + ' of ' + str(num_shuffles)
shuffle_sweeps = np.random.choice(num_sweeps,size=(num_sweeps,))
shuffle_sweep_events = mean_sweep_events[shuffle_sweeps]
shuffle_expected = compute_expected_for_blank(shuffle_sweep_events,sweep_categories_dummy)
shuffle_observed = compute_observed(shuffle_sweep_events,sweep_categories_dummy)
chi_pos, chi_neg = compute_chi_rectified(shuffle_observed,shuffle_expected)
chi_pos_shuffle[:,ns] = chi_pos
chi_neg_shuffle[:,ns] = chi_neg
p_vals_pos = np.mean(chi_pos_actual.reshape(num_cells,1)<chi_pos_shuffle,axis=1)
p_vals_neg = np.mean(chi_neg_actual.reshape(num_cells,1)<chi_neg_shuffle,axis=1)
return p_vals_pos, p_vals_neg
def stim_table_to_categories(stim_table,
columns,
verbose=False):
# get the categories for all sweeps with each unique combination of
# parameters in 'columns' being one category
# sweeps with non-finite values in ANY column (e.g. np.NaN) are labeled
# as blank sweeps (category = -1)
num_sweeps = len(stim_table)
num_params = len(columns)
unique_params = []
options_per_column = []
max_combination = 1
for column in columns:
column_params = np.unique(np.array(stim_table[column].values))
#column_params = column_params[np.isfinite(column_params)]
unique_params.append(column_params)
options_per_column.append(len(column_params))
max_combination*=len(column_params)
category = 0
sweep_categories = -1*np.ones((num_sweeps,))
curr_combination = np.zeros((num_params,),dtype=np.int)
options_per_column = np.array(options_per_column).astype(np.int)
all_tried = False
while not all_tried:
matches_combination = np.ones((num_sweeps,),dtype=np.bool)
for i_col,column in enumerate(columns):
param = unique_params[i_col][curr_combination[i_col]]
matches_param = np.array(stim_table[column].values) == param
matches_combination *= matches_param
if np.any(matches_combination):
sweep_categories[matches_combination] = category
if verbose:
print('Category ' + str(category))
for i_col,column in enumerate(columns):
param = unique_params[i_col][curr_combination[i_col]]
print(column + ': ' + str(param))
category+=1
#advance the combination
curr_combination = advance_combination(curr_combination,options_per_column)
all_tried = curr_combination[0]==options_per_column[0]
if verbose:
blank_sweeps = sweep_categories==-1
print('num blank: ' + str(blank_sweeps.sum()))
return sweep_categories
def advance_combination(curr_combination,
options_per_column):
num_cols = len(curr_combination)
might_carry = True
col = num_cols-1
while might_carry:
curr_combination[col] += 1
if col==0 or curr_combination[col]<options_per_column[col]:
might_carry = False
else:
curr_combination[col] = 0
col-=1
return curr_combination
def make_category_dummy(sweep_categories):
#makes a dummy variable version of the sweep category list
num_sweeps = len(sweep_categories)
categories = np.sort(np.unique(sweep_categories))
num_categories = len(categories)
sweep_category_mat = np.zeros((num_sweeps,num_categories),dtype=np.bool)
for i_cat,category in enumerate(categories):
category_idx = np.argwhere(sweep_categories==category)[:,0]
sweep_category_mat[category_idx,i_cat] = True
return sweep_category_mat
def compute_observed(mean_sweep_events,sweep_conditions):
(num_sweeps,num_conditions) = np.shape(sweep_conditions)
num_cells = np.shape(mean_sweep_events)[1]
observed_mat = (mean_sweep_events.T).reshape(num_cells,num_sweeps,1) * sweep_conditions.reshape(1,num_sweeps,num_conditions)
observed = np.sum(observed_mat,axis=1)
return observed
def compute_expected(mean_sweep_events,sweep_conditions):
num_conditions = np.shape(sweep_conditions)[1]
num_cells = np.shape(mean_sweep_events)[1]
sweeps_per_condition = np.sum(sweep_conditions,axis=0)
events_per_sweep = np.mean(mean_sweep_events,axis=0)
expected = sweeps_per_condition.reshape(1,num_conditions) * events_per_sweep.reshape(num_cells,1)
return expected
def compute_expected_for_blank(mean_sweep_events,sweep_conditions):
#assumes the blank sweeps are assigned to the first category!
blank_sweeps = sweep_conditions[:,0]
num_conditions = np.shape(sweep_conditions)[1]
num_cells = np.shape(mean_sweep_events)[1]
sweeps_per_condition = np.sum(sweep_conditions,axis=0)
events_per_blank_sweep = np.mean(mean_sweep_events[blank_sweeps],axis=0)
expected = sweeps_per_condition.reshape(1,num_conditions) * events_per_blank_sweep.reshape(num_cells,1)
return expected
def compute_chi(observed,expected):
chi = (observed - expected) ** 2 /expected
chi = np.where(expected>0,chi,0.0)
return np.sum(chi,axis=1)
def compute_chi_rectified(observed,expected):
chi = (observed - expected) ** 2 /expected
chi = np.where(expected>0,chi,0.0)
chi_pos = np.where(observed>expected,chi,0.0).sum(axis=1)
chi_neg = np.where(observed<expected,chi,0.0).sum(axis=1)
return chi_pos, chi_neg
|
#!/usr/bin/env python
"""
Plots an extracted sky spectrum with an archived one
Probably most useful for exploring sky spectra in the blue
"""
def parse_args(options=None, return_parser=False):
import argparse
parser = argparse.ArgumentParser(description='Compare the extracted sky spectrum against an '
'archived sky model maintained by PypeIt.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('file', type=str, help='Spectral file')
parser.add_argument('skyfile', type=str,
help='Archived PypeIt sky file (e.g. paranal_sky.fits)')
parser.add_argument('--exten', type=int, help='FITS extension')
parser.add_argument('--optimal', default=False, action='store_true',
help='Show Optimal? Default is boxcar')
parser.add_argument('--scale_user', default=1., type=float,
help='Scale user spectrum by a factor')
if return_parser:
return parser
return parser.parse_args() if options is None else parser.parse_args(options)
# Script to run XSpec from the command line or ipython
def main(args):
import os
from pkg_resources import resource_filename
from matplotlib import pyplot as plt
from linetools.spectra.io import readspec
# Path to archived sky spectra
sky_path = os.path.join(resource_filename('pypeit', 'data'), 'sky_spec')
# Extension
exten = args.exten if hasattr(args, 'exten') else 0
# Read spec keywords
ikwargs = {}
if args.optimal:
ikwargs['wave_tag'] = 'opt_wave'
ikwargs['flux_tag'] = 'opt_sky'
else:
ikwargs['wave_tag'] = 'box_wave'
ikwargs['flux_tag'] = 'box_sky'
# Load user file
user_sky = readspec(args.file, exten=exten, **ikwargs)
# Load sky spec
arx_sky = readspec(sky_path+args.skyfile)
# Plot
plt.clf()
plt.plot(user_sky.wavelength, user_sky.flux*args.scale_user, 'k-', label='user')
plt.plot(arx_sky.wavelength, arx_sky.flux, 'b-', label='archive')
legend = plt.legend(loc='upper left', scatterpoints=1, borderpad=0.3,
handletextpad=0.3, fontsize='small', numpoints=1)
plt.show()
|
menu_name = "Flashlight"
from zerophone_hw import RGB_LED
led = None
state = False
def init_app(i, o):
global led
led = RGB_LED()
def callback():
global state
if not state:
led.set_color("white")
state = True
else:
led.set_color("none")
state = False
|
#!/usr/bin/python
#
# Code by Tony Bussieres <t.bussieres@gmail.com> inspired by
# 40-convert_to_tcx.py by Gustav Tiger <gustav@tiger.name>
#
# This helper uses GcpUploader to send the fit files to Garmin Connect
#
# To install GcpUploader:
#
# sudo pip install GcpUploader
#
# edit the file ~/.guploadrc and add the following
# [Credentials]
# username=yourgarminuser
# password=yourgarminpass
#
# Then change the gupload path (See CHANGEME in the code)
#
# Don't forget to make this script executable :
#
# chmod +x /path/to/40-upload_to_garmin_connect.py
import errno
import os
import subprocess
import sys
# CHANGE ME:
gupload = "/path/to/bin/gupload.py"
def main(action, filename):
if action != "DOWNLOAD":
return 0
try:
process = subprocess.Popen([gupload, filename], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(data, _) = process.communicate()
except OSError as e:
print "Could not send to Garmin", gupload, \
"-", errno.errorcode[e.errno], os.strerror(e.errno)
return -1
if process.returncode != 0:
print "gupload.py exited with error code", process.returncode
return -1
print "Successfully uploaded %s to Garmin Connect" % (filename);
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv[1], sys.argv[2]))
|
"""Implements the File extension.
https://github.com/stac-extensions/file
"""
from enum import Enum
from typing import Any, Dict, List, Optional, Set
import pystac
from pystac.extensions.base import ExtensionManagementMixin, PropertiesExtension
from pystac.extensions.hooks import ExtensionHooks
from pystac.serialization.identify import (
OldExtensionShortIDs,
STACJSONDescription,
STACVersionID,
)
from pystac.utils import get_required
SCHEMA_URI = "https://stac-extensions.github.io/file/v2.0.0/schema.json"
PREFIX = "file:"
BYTE_ORDER_PROP = PREFIX + "byte_order"
CHECKSUM_PROP = PREFIX + "checksum"
HEADER_SIZE_PROP = PREFIX + "header_size"
SIZE_PROP = PREFIX + "size"
VALUES_PROP = PREFIX + "values"
class ByteOrder(str, Enum):
"""List of allows values for the ``"file:byte_order"`` field defined by the
:stac-ext:`File Info Extension <file>`."""
def __str__(self) -> str:
return str(self.value)
LITTLE_ENDIAN = "little-endian"
BIG_ENDIAN = "big-endian"
class MappingObject:
"""Represents a value map used by assets that are used as classification layers, and
give details about the values in the asset and their meanings."""
properties: Dict[str, Any]
def __init__(self, properties: Dict[str, Any]) -> None:
self.properties = properties
def apply(self, values: List[Any], summary: str) -> None:
"""Sets the properties for this :class:`~MappingObject` instance.
Args:
values : The value(s) in the file. At least one array element is required.
summary : A short description of the value(s).
"""
self.values = values
self.summary = summary
@classmethod
def create(cls, values: List[Any], summary: str) -> "MappingObject":
"""Creates a new :class:`~MappingObject` instance.
Args:
values : The value(s) in the file. At least one array element is required.
summary : A short description of the value(s).
"""
m = cls({})
m.apply(values=values, summary=summary)
return m
@property
def values(self) -> List[Any]:
"""Gets or sets the list of value(s) in the file. At least one array element is
required."""
return get_required(self.properties["values"], self, "values")
@values.setter
def values(self, v: List[Any]) -> None:
self.properties["values"] = v
@property
def summary(self) -> str:
"""Gets or sets the short description of the value(s)."""
return get_required(self.properties["summary"], self, "summary")
@summary.setter
def summary(self, v: str) -> None:
self.properties["summary"] = v
class FileExtension(PropertiesExtension, ExtensionManagementMixin[pystac.Item]):
"""A class that can be used to extend the properties of an :class:`~pystac.Asset`
with properties from the :stac-ext:`File Info Extension <file>`.
To create an instance of :class:`FileExtension`, use the
:meth:`FileExtension.ext` method. For example:
.. code-block:: python
>>> asset: pystac.Asset = ...
>>> file_ext = FileExtension.ext(asset)
"""
def __init__(self, asset: pystac.Asset):
self.asset_href = asset.href
self.properties = asset.properties
if asset.owner and isinstance(asset.owner, pystac.Item):
self.additional_read_properties = [asset.owner.properties]
def __repr__(self) -> str:
return "<AssetFileExtension Asset href={}>".format(self.asset_href)
def apply(
self,
byte_order: Optional[ByteOrder] = None,
checksum: Optional[str] = None,
header_size: Optional[int] = None,
size: Optional[int] = None,
values: Optional[List[MappingObject]] = None,
) -> None:
"""Applies file extension properties to the extended Item.
Args:
byte_order : Optional byte order of integer values in the file. One of
``"big-endian"`` or ``"little-endian"``.
checksum : Optional multihash for the corresponding file,
encoded as hexadecimal (base 16) string with lowercase letters.
header_size : Optional header size of the file, in bytes.
size : Optional size of the file, in bytes.
values : Optional list of :class:`~MappingObject` instances that lists the
values that are in the file and describe their meaning. See the
:stac-ext:`Mapping Object <file#mapping-object>` docs for an example.
If given, at least one array element is required.
"""
self.byte_order = byte_order
self.checksum = checksum
self.header_size = header_size
self.size = size
self.values = values
@property
def byte_order(self) -> Optional[ByteOrder]:
"""Gets or sets the byte order of integer values in the file. One of big-endian
or little-endian."""
return self._get_property(BYTE_ORDER_PROP, ByteOrder)
@byte_order.setter
def byte_order(self, v: Optional[ByteOrder]) -> None:
self._set_property(BYTE_ORDER_PROP, v)
@property
def checksum(self) -> Optional[str]:
"""Get or sets the multihash for the corresponding file, encoded as hexadecimal
(base 16) string with lowercase letters."""
return self._get_property(CHECKSUM_PROP, str)
@checksum.setter
def checksum(self, v: Optional[str]) -> None:
self._set_property(CHECKSUM_PROP, v)
@property
def header_size(self) -> Optional[int]:
"""Get or sets the header size of the file, in bytes."""
return self._get_property(HEADER_SIZE_PROP, int)
@header_size.setter
def header_size(self, v: Optional[int]) -> None:
self._set_property(HEADER_SIZE_PROP, v)
@property
def size(self) -> Optional[int]:
"""Get or sets the size of the file, in bytes."""
return self._get_property(SIZE_PROP, int)
@size.setter
def size(self, v: Optional[int]) -> None:
self._set_property(SIZE_PROP, v)
@property
def values(self) -> Optional[List[MappingObject]]:
"""Get or sets the list of :class:`~MappingObject` instances that lists the
values that are in the file and describe their meaning. See the
:stac-ext:`Mapping Object <file#mapping-object>` docs for an example. If given,
at least one array element is required."""
return self._get_property(VALUES_PROP, List[MappingObject])
@values.setter
def values(self, v: Optional[List[MappingObject]]) -> None:
self._set_property(VALUES_PROP, v)
@classmethod
def get_schema_uri(cls) -> str:
return SCHEMA_URI
@classmethod
def ext(cls, obj: pystac.Asset, add_if_missing: bool = False) -> "FileExtension":
"""Extends the given STAC Object with properties from the :stac-ext:`File Info
Extension <file>`.
This extension can be applied to instances of :class:`~pystac.Asset`.
"""
if isinstance(obj, pystac.Asset):
if add_if_missing and isinstance(obj.owner, pystac.Item):
cls.add_to(obj.owner)
cls.validate_has_extension(obj)
return cls(obj)
else:
raise pystac.ExtensionTypeError(
f"File Info extension does not apply to type {type(obj)}"
)
class FileExtensionHooks(ExtensionHooks):
schema_uri: str = SCHEMA_URI
prev_extension_ids: Set[str] = set(["file"])
stac_object_types: Set[pystac.STACObjectType] = set([pystac.STACObjectType.ITEM])
def migrate(
self, obj: Dict[str, Any], version: STACVersionID, info: STACJSONDescription
) -> None:
# The checksum field was previously it's own extension.
old_checksum: Optional[Dict[str, str]] = None
if info.version_range.latest_valid_version() < "v1.0.0-rc.2":
if OldExtensionShortIDs.CHECKSUM.value in info.extensions:
old_item_checksum = obj["properties"].get("checksum:multihash")
if old_item_checksum is not None:
if old_checksum is None:
old_checksum = {}
old_checksum["__item__"] = old_item_checksum
for asset_key, asset in obj["assets"].items():
old_asset_checksum = asset.get("checksum:multihash")
if old_asset_checksum is not None:
if old_checksum is None:
old_checksum = {}
old_checksum[asset_key] = old_asset_checksum
try:
obj["stac_extensions"].remove(OldExtensionShortIDs.CHECKSUM.value)
except ValueError:
pass
super().migrate(obj, version, info)
if old_checksum is not None:
if SCHEMA_URI not in obj["stac_extensions"]:
obj["stac_extensions"].append(SCHEMA_URI)
for key in old_checksum:
if key == "__item__":
obj["properties"][CHECKSUM_PROP] = old_checksum[key]
else:
obj["assets"][key][CHECKSUM_PROP] = old_checksum[key]
FILE_EXTENSION_HOOKS: ExtensionHooks = FileExtensionHooks()
|
import got from 'got'
export const getCatFact = async () => {
const json = await got('https://the-cat-fact.herokuapp.com/api/randomfact').json()
const [factData] = json.data
const factText = factData.fact
if (!factText) {
throw new Error('Fact text not found.')
}
return factText
}
|
/*
* Copyright (c) 2017-2018 Aion foundation.
*
* This file is part of the aion network project.
*
* The aion network project is free software: you can redistribute it
* and/or modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or any later version.
*
* The aion network project is distributed in the hope that it will
* be useful, but WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with the aion network project source files.
* If not, see <https://www.gnu.org/licenses/>.
*
* Contributors:
* Aion foundation.
* Fabian Vogelsteller <fabian@frozeman.de>
*/
"use strict";
var _ = require('underscore');
var errors = require('aion-web3-core-helpers').errors;
var Ws = null;
var _btoa = null;
var parseURL = null;
if (typeof window !== 'undefined' && typeof window.WebSocket !== 'undefined') {
Ws = function(url, protocols) {
return new window.WebSocket(url, protocols);
};
_btoa = btoa;
parseURL = function(url) {
return new URL(url);
};
} else {
Ws = require('websocket').w3cwebsocket;
_btoa = function(str) {
return Buffer(str).toString('base64');
};
var url = require('url');
if (url.URL) {
// Use the new Node 6+ API for parsing URLs that supports username/password
var newURL = url.URL;
parseURL = function(url) {
return new newURL(url);
};
}
else {
// Web3 supports Node.js 5, so fall back to the legacy URL API if necessary
parseURL = require('url').parse;
}
}
// Default connection ws://localhost:8546
var WebsocketProvider = function WebsocketProvider(url, options) {
var _this = this;
this.responseCallbacks = {};
this.notificationCallbacks = [];
options = options || {};
this._customTimeout = options.timeout;
// The w3cwebsocket implementation does not support Basic Auth
// username/password in the URL. So generate the basic auth header, and
// pass through with any additional headers supplied in constructor
var parsedURL = parseURL(url);
var headers = options.headers || {};
var protocol = options.protocol || undefined;
if (parsedURL.username && parsedURL.password) {
headers.authorization = 'Basic ' + _btoa(parsedURL.username + ':' + parsedURL.password);
}
// Allow a custom client configuration
var clientConfig = options.clientConfig || undefined;
// When all node core implementations that do not have the
// WHATWG compatible URL parser go out of service this line can be removed.
if (parsedURL.auth) {
headers.authorization = 'Basic ' + _btoa(parsedURL.auth);
}
this.connection = new Ws(url, protocol, undefined, headers, undefined, clientConfig);
this.addDefaultEvents();
// LISTEN FOR CONNECTION RESPONSES
this.connection.onmessage = function(e) {
/*jshint maxcomplexity: 6 */
var data = (typeof e.data === 'string') ? e.data : '';
_this._parseResponse(data).forEach(function(result){
var id = null;
// get the id which matches the returned id
if(_.isArray(result)) {
result.forEach(function(load){
if(_this.responseCallbacks[load.id])
id = load.id;
});
} else {
id = result.id;
}
// notification
if(!id && result && result.method && result.method.indexOf('_subscription') !== -1) {
_this.notificationCallbacks.forEach(function(callback){
if(_.isFunction(callback))
callback(result);
});
// fire the callback
} else if(_this.responseCallbacks[id]) {
_this.responseCallbacks[id](null, result);
delete _this.responseCallbacks[id];
}
});
};
// make property `connected` which will return the current connection status
Object.defineProperty(this, 'connected', {
get: function () {
return this.connection && this.connection.readyState === this.connection.OPEN;
},
enumerable: true,
});
};
/**
Will add the error and end event to timeout existing calls
@method addDefaultEvents
*/
WebsocketProvider.prototype.addDefaultEvents = function(){
var _this = this;
this.connection.onerror = function(){
_this._timeout();
};
this.connection.onclose = function(){
_this._timeout();
// reset all requests and callbacks
_this.reset();
};
// this.connection.on('timeout', function(){
// _this._timeout();
// });
};
/**
Will parse the response and make an array out of it.
@method _parseResponse
@param {String} data
*/
WebsocketProvider.prototype._parseResponse = function(data) {
var _this = this,
returnValues = [];
// DE-CHUNKER
var dechunkedData = data
.replace(/\}[\n\r]?\{/g,'}|--|{') // }{
.replace(/\}\][\n\r]?\[\{/g,'}]|--|[{') // }][{
.replace(/\}[\n\r]?\[\{/g,'}|--|[{') // }[{
.replace(/\}\][\n\r]?\{/g,'}]|--|{') // }]{
.split('|--|');
dechunkedData.forEach(function(data){
// prepend the last chunk
if(_this.lastChunk)
data = _this.lastChunk + data;
var result = null;
try {
result = JSON.parse(data);
} catch(e) {
_this.lastChunk = data;
// start timeout to cancel all requests
clearTimeout(_this.lastChunkTimeout);
_this.lastChunkTimeout = setTimeout(function(){
_this._timeout();
throw errors.InvalidResponse(data);
}, 1000 * 15);
return;
}
// cancel timeout and set chunk to null
clearTimeout(_this.lastChunkTimeout);
_this.lastChunk = null;
if(result)
returnValues.push(result);
});
return returnValues;
};
/**
Adds a callback to the responseCallbacks object,
which will be called if a response matching the response Id will arrive.
@method _addResponseCallback
*/
WebsocketProvider.prototype._addResponseCallback = function(payload, callback) {
var id = payload.id || payload[0].id;
var method = payload.method || payload[0].method;
this.responseCallbacks[id] = callback;
this.responseCallbacks[id].method = method;
var _this = this;
// schedule triggering the error response if a custom timeout is set
if (this._customTimeout) {
setTimeout(function () {
if (_this.responseCallbacks[id]) {
_this.responseCallbacks[id](errors.ConnectionTimeout(_this._customTimeout));
delete _this.responseCallbacks[id];
}
}, this._customTimeout);
}
};
/**
Timeout all requests when the end/error event is fired
@method _timeout
*/
WebsocketProvider.prototype._timeout = function() {
for(var key in this.responseCallbacks) {
if(this.responseCallbacks.hasOwnProperty(key)){
this.responseCallbacks[key](errors.InvalidConnection('on WS'));
delete this.responseCallbacks[key];
}
}
};
WebsocketProvider.prototype.send = function (payload, callback) {
var _this = this;
if (this.connection.readyState === this.connection.CONNECTING) {
setTimeout(function () {
_this.send(payload, callback);
}, 10);
return;
}
// try reconnect, when connection is gone
// if(!this.connection.writable)
// this.connection.connect({url: this.url});
if (this.connection.readyState !== this.connection.OPEN) {
console.error('connection not open on send()');
if (typeof this.connection.onerror === 'function') {
this.connection.onerror(errors.ClosedConnection());
} else {
console.error('no error callback');
}
callback(errors.ClosedConnection());
return;
}
this.connection.send(JSON.stringify(payload));
this._addResponseCallback(payload, callback);
};
/**
Subscribes to provider events.provider
@method on
@param {String} type 'notifcation', 'connect', 'error', 'end' or 'data'
@param {Function} callback the callback to call
*/
WebsocketProvider.prototype.on = function (type, callback) {
if(typeof callback !== 'function')
throw errors.InvalidCallback('on', 'second')
switch(type){
case 'data':
this.notificationCallbacks.push(callback);
break;
case 'connect':
this.connection.onopen = callback;
break;
case 'end':
this.connection.onclose = callback;
break;
case 'error':
this.connection.onerror = callback;
break;
// default:
// this.connection.on(type, callback);
// break;
}
};
// TODO add once
/**
Removes event listener
@method removeListener
@param {String} type 'notifcation', 'connect', 'error', 'end' or 'data'
@param {Function} callback the callback to call
*/
WebsocketProvider.prototype.removeListener = function (type, callback) {
var _this = this;
switch(type){
case 'data':
this.notificationCallbacks.forEach(function(cb, index){
if(cb === callback)
_this.notificationCallbacks.splice(index, 1);
});
break;
// TODO remvoving connect missing
// default:
// this.connection.removeListener(type, callback);
// break;
}
};
/**
Removes all event listeners
@method removeAllListeners
@param {String} type 'notifcation', 'connect', 'error', 'end' or 'data'
*/
WebsocketProvider.prototype.removeAllListeners = function (type) {
switch(type){
case 'data':
this.notificationCallbacks = [];
break;
// TODO remvoving connect properly missing
case 'connect':
this.connection.onopen = null;
break;
case 'end':
this.connection.onclose = null;
break;
case 'error':
this.connection.onerror = null;
break;
default:
// this.connection.removeAllListeners(type);
break;
}
};
/**
Resets the providers, clears all callbacks
@method reset
*/
WebsocketProvider.prototype.reset = function () {
this._timeout();
this.notificationCallbacks = [];
// this.connection.removeAllListeners('error');
// this.connection.removeAllListeners('end');
// this.connection.removeAllListeners('timeout');
this.addDefaultEvents();
};
WebsocketProvider.prototype.disconnect = function () {
if (this.connection) {
this.connection.close();
}
};
module.exports = WebsocketProvider;
|
/* See LICENSE file for copyright and license details. */
/* appearance, colors are specified in the form 0x00bbggrr or with the RGB(r, g, b) macro */
#define normbordercolor 0x00cccccc
#define normbgcolor 0x00cccccc
#define normfgcolor 0x00000000
#define selbordercolor 0x00ff6600
#define selbgcolor 0x00ff6600
#define selfgcolor 0x00ffffff
#ifdef MMod
typedef BOOL ( *RegisterShellHookWindowProc ) ( HWND );
RegisterShellHookWindowProc RegisterShellHookWindowF;
void Reset ( );
#endif
static const unsigned int borderpx = 2; /* border pixel of windows */
static const unsigned int textmargin = 5; /* margin for the text displayed on the bar */
static bool showbar = true; /* false means no bar */
static bool topbar = true; /* false means bottom bar */
/* tagging */
static const char tags[][MAXTAGLEN] = { "1", "2", "3", "4", "5", "6", "7", "8", "9" };
static unsigned int tagset[] = {1, 1}; /* after start, first tag is selected */
static Rule rules[] = {
/* class title tags mask isfloating */
{ "MozillaUIWindowClass", "- Mozilla Firefox", 1 << 8, false },
};
/* layout(s) */
static float mfact = 0.55; /* factor of master area size [0.05..0.95] */
static Layout layouts[] = {
/* symbol arrange function */
{ "[]=", tile }, /* first entry is default */
{ "><>", NULL }, /* no layout function means floating behavior */
{ "[M]", monocle },
};
/* key definitions */
#define MODKEY (MOD_CONTROL | MOD_ALT)
#define TAGKEYS(KEY,TAG) \
{ MODKEY, KEY, view, {.ui = 1 << TAG} }, \
{ MODKEY|MOD_CONTROL, KEY, toggleview, {.ui = 1 << TAG} }, \
{ MODKEY|MOD_SHIFT, KEY, tag, {.ui = 1 << TAG} }, \
{ MODKEY|MOD_CONTROL|MOD_SHIFT, KEY, toggletag, {.ui = 1 << TAG} },
/* helper for spawning shell commands in the pre dwm-5.0 fashion */
#define SHCMD(cmd) { .v = (const char*[]){ "/bin/sh", "-c", cmd, NULL } }
/* commands */
static const char *termcmd[] = { "cmd.exe", NULL };
static Key keys[] = {
/* modifier key function argument */
{ MODKEY|MOD_SHIFT, VK_RETURN, spawn, {.v = termcmd } },
{ MODKEY, 'B', togglebar, {0} },
{ MODKEY, 'J', focusstack, {.i = +1 } },
{ MODKEY, 'K', focusstack, {.i = -1 } },
{ MODKEY, 'H', setmfact, {.f = -0.05} },
{ MODKEY, 'L', setmfact, {.f = +0.05} },
{ MODKEY, 'I', showclientclassname, {0} },
{ MODKEY, VK_RETURN, zoom, {0} },
{ MODKEY, 'R', Reset, {0} },
{ MODKEY, VK_TAB, view, {0} },
{ MODKEY|MOD_SHIFT, 'C', killclient, {0} },
{ MODKEY, 'T', setlayout, {.v = &layouts[0]} },
{ MODKEY, 'F', setlayout, {.v = &layouts[1]} },
{ MODKEY, 'M', setlayout, {.v = &layouts[2]} },
{ MODKEY, VK_SPACE, setlayout, {0} },
{ MODKEY|MOD_SHIFT, VK_SPACE, togglefloating, {0} },
{ MODKEY, 'N', toggleborder, {0} },
{ MODKEY, 'E', toggleexplorer, {0} },
{ MODKEY, '0', view, {.ui = ~0 } },
{ MODKEY|MOD_SHIFT, '0', tag, {.ui = ~0 } },
TAGKEYS( '1', 0)
TAGKEYS( '2', 1)
TAGKEYS( '3', 2)
TAGKEYS( '4', 3)
TAGKEYS( '5', 4)
TAGKEYS( '6', 5)
TAGKEYS( '7', 6)
TAGKEYS( '8', 7)
TAGKEYS( '9', 8)
{ MODKEY, 'Q', quit, {0} },
};
/* button definitions */
/* click can be a tag number (starting at 0), ClkLtSymbol, ClkStatusText or ClkWinTitle */
static Button buttons[] = {
/* click button event type modifier keys function argument */
{ ClkLtSymbol, WM_LBUTTONDOWN, 0, setlayout, {0} },
{ ClkLtSymbol, WM_RBUTTONDOWN, 0, setlayout, {.v = &layouts[2]} },
{ ClkWinTitle, WM_MBUTTONDOWN, 0, zoom, {0} },
{ ClkStatusText, WM_MBUTTONDOWN, 0, spawn, {.v = termcmd } },
#if 0
{ ClkClientWin, WM_MBUTTONDOWN, MODKEY, togglefloating, {0} },
#endif
{ ClkTagBar, WM_LBUTTONDOWN, VK_MENU, tag, {0} },
{ ClkTagBar, WM_RBUTTONDOWN, VK_MENU, toggletag, {0} },
{ ClkTagBar, WM_LBUTTONDOWN, 0, view, {0} },
{ ClkTagBar, WM_RBUTTONDOWN, 0, toggleview, {0} },
};
|
// Copyright 2020 The casbin Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import React from "react";
import * as AccountBackend from "./backend/AccountBackend";
import * as NodeBackend from "./backend/NodeBackend";
import * as Setting from "./Setting";
import * as Conf from "./Conf";
import { withRouter, Link } from "react-router-dom";
import i18next from "i18next";
class Header extends React.Component {
constructor(props) {
super(props);
this.state = {
classes: props,
searchValue: "",
searchResShow: false,
nodes: [],
matchNodes: [],
};
}
componentDidMount() {
this.getNodes();
}
getMatchNodes(nodes, curSearchVal, matchNodes) {
if (!curSearchVal || !nodes) {
return;
}
for (let i = 0; i < nodes.length; i++) {
const name = nodes[i].name;
const id = nodes[i].id;
if (name.indexOf(curSearchVal) > -1 || id.indexOf(curSearchVal) > -1) {
matchNodes.push(nodes[i]);
}
}
}
onSearchValueChange(e) {
const nodes = this.state.nodes;
const curSearchVal = e.target.value;
const matchNodes = [];
this.getMatchNodes(nodes, curSearchVal, matchNodes);
this.setState({
searchValue: curSearchVal,
matchNodes: matchNodes,
});
}
addSearchValue() {
this.setState({
searchValue: `${Conf.Domain}/t ` + this.state.searchValue,
});
}
onKeyup(e) {
if (e.keyCode === 13) {
const searchSide = Conf.DefaultSearchSite;
switch (searchSide) {
case "baidu":
window.open(
`https://www.baidu.com/s?q6=${Conf.Domain}&q3=${this.state.searchValue}`
);
return;
case "bing":
window.open(
`https://cn.bing.com/search?q=site:${Conf.Domain}/t ${this.state.searchValue}`
);
return;
case "google":
window.open(
`https://www.google.com/search?q=site:${Conf.Domain}/t ${this.state.searchValue}`
);
return;
}
}
}
signout() {
if (!window.confirm(i18next.t("signout:Are you sure to log out?"))) {
return;
}
AccountBackend.signout().then((res) => {
if (res.status === "ok") {
this.props.onSignout();
this.props.history.push("/signout");
} else {
this.props.history.push("/signout");
}
});
}
renderItem() {
const isSignedIn =
this.props.account !== undefined && this.props.account !== null;
const username = this.props.account?.id;
if (!isSignedIn) {
return (
<td width="570" align="right" style={{ paddingTop: "2px" }}>
<Link to="/" className="top">
{i18next.t("general:Home")}
</Link>
<Link to="/signup" className="top">
{i18next.t("general:Sign Up")}
</Link>
<Link to="/signin" className="top">
{i18next.t("general:Sign In")}
</Link>
</td>
);
} else {
return (
<td width="570" align="right" style={{ paddingTop: "2px" }}>
<Link to="/" className="top">
{i18next.t("general:Home")}
</Link>
<Link to={`/member/${username}`} className="top">
{username}
</Link>
<Link to="/notes" className="top">
{i18next.t("general:Note")}
</Link>
<Link to="/t" className="top">
{i18next.t("general:Timeline")}
</Link>
<Link to="/settings" className="top">
{i18next.t("general:Setting")}
</Link>
{this.props.account?.isModerator ? (
<span>
<Link to="/admin" className="top">
{i18next.t("general:Admin")}
</Link>
</span>
) : null}
<a href="#;" onClick={this.signout.bind(this)} className="top">
{i18next.t("general:Sign Out")}
</a>
</td>
);
}
}
renderMobileHeader() {
const isSignedIn =
this.props.account !== undefined && this.props.account !== null;
const menuStyle = this.props.showMenu
? {
"--show-dropdown": "block",
}
: null;
if (!isSignedIn) {
return (
<div id="Top">
<div className="content">
<div style={{ paddingTop: "6px" }}>
<table cellPadding="0" cellSpacing="0" border="0" width="100%">
<tr>
<td width="5" align="left"></td>
<td width="80" align="left" style={{ paddingTop: "4px" }}>
<Link to="/" name="top">
<div id="logoMobile"></div>
</Link>
</td>
<td width="auto" align="right" style={{ paddingTop: "2px" }}>
<Link to="/" className="top">
{i18next.t("general:Home")}
</Link>
<Link to="/signup" className="top">
{i18next.t("general:Sign Up")}
</Link>
<Link to="/signin" className="top">
{i18next.t("general:Sign In")}
</Link>
</td>
<td width="10" align="left"></td>
</tr>
</table>
</div>
</div>
</div>
);
} else {
return (
<header className="site-header">
<div className="site-header-logo">
<div id="logoMobile" onClick={() => this.props.history.push("/")} />
</div>
<div className="site-header-menu">
{this.renderSearch()}
<button id="menu-entry" onClick={() => this.changeShowMenuStatus()}>
{this.props.account?.avatar === "" ? (
<img
src={Setting.getUserAvatar(this.props.account?.id)}
width={24}
border={0}
style={{ borderRadius: "32px", verticalAlign: "middle" }}
width="32"
height="32"
align="absmiddle"
alt={this.props.account?.id}
/>
) : (
<img
src={this.props.account?.avatar}
width={24}
border={0}
style={{ borderRadius: "32px", verticalAlign: "middle" }}
width="32"
height="32"
align="absmiddle"
alt={this.props.account?.id}
/>
)}
</button>
<div id="user-menu" style={menuStyle}>
<div>
<Link to={`/member/${this.props.account?.id}`} className="top">
{i18next.t("general:Homepage")}
</Link>
</div>
<div>
<Link to="/my/nodes" className="top">
{i18next.t("bar:Nodes")}
</Link>
</div>
<div>
<Link to="/my/topics" className="top">
{i18next.t("bar:Topics")}
</Link>
</div>
<div>
<Link to="/settings" className="top">
{i18next.t("general:Setting")}
</Link>
</div>
<div>
<Link to="/admin" className="top">
{i18next.t("general:Admin")}
</Link>
</div>
<div className="menu_sep"></div>
<div>
<Link to="/i" className="top">
<img
src={Setting.getStatic("/static/img/neue_image.png")}
height="14"
border="0"
align="absmiddle"
/>{" "}
{i18next.t("bar:File library")}
</Link>
</div>
<div>
<Link to="/notes" className="top">
<img
src={Setting.getStatic("/static/img/neue_notepad.png")}
height="14"
border="0"
align="absmiddle"
/>{" "}
{i18next.t("general:Note")}
</Link>
</div>
<div>
<Link to="/t" className="top">
<img
src={Setting.getStatic("/static/img/neue_comment.png")}
height="14"
border="0"
align="absmiddle"
/>{" "}
{i18next.t("general:Timeline")}
</Link>
</div>
<div className="menu_sep"></div>
<div>
<Link to="/select/language" className="top">
{i18next.t("general:Language")}
</Link>
</div>
<div className="menu_sep"></div>
<div>
<Link to="/settings/night/toggle" className="top">
<img
src={Setting.getStatic("/static/img/toggle-light.png")}
align="absmiddle"
height="10"
alt="Light"
style={{ verticalAlign: "middle" }}
/>
</Link>
</div>
<div className="menu_sep"></div>
<div style={{ padding: "10px" }}>
<div className="member-activity-bar">
<div
className="member-activity-start"
style={{ width: "5%" }}
></div>
</div>
</div>
<div className="menu_sep"></div>
<div>
<Link to="/signout" className="top">
{i18next.t("general:Sign Out")}
</Link>
</div>
</div>
</div>
</header>
);
}
}
renderSearch() {
if (Setting.PcBrowser) {
return (
<div id="Search">
<div id="qbar" className="">
<input
type="text"
maxLength="40"
name="q"
id="q"
autoComplete={"off"}
value={this.state.searchValue}
onKeyUp={(event) => this.onKeyup(event)}
onSubmit={() =>
this.window.open("https://www.google.com/search?1")
}
onChange={(event) => this.onSearchValueChange(event)}
onFocus={() => {
this.setState({
searchResShow: true,
});
}}
onBlur={() => {
setTimeout(() => {
this.setState({
searchResShow: false,
});
}, 200);
}}
/>
{this.state.searchResShow && this.state.searchValue ? (
<div
id="search-result"
className="box"
style={{ display: "block" }}
>
{this.state.matchNodes.length !== 0 ? (
<div className="cell">
<span className="fade">
节点 / Nodes
</span>
{this.state.matchNodes.map((val) => {
//TODO: maybe weshould add `active` iterm
return (
<a className="search-item" href={`/go/${val.id}`}>
{val.name} / {val.id}
</a>
);
})}
</div>
) : null}
<div className="cell">
<a
className="search-item"
href={`https://www.google.com/search?q=site:${Conf.Domain}/t ${this.state.searchValue}`}
target="_blank"
>
{" "}
Google {this.state.searchValue}{" "}
</a>
</div>
</div>
) : null}
</div>
</div>
);
}
if (this.props.account === undefined || this.props.account === null) {
return null;
}
// mobile
return (
<input
type="text"
id="site-search"
value={this.state.searchValue}
onKeyUp={(event) => this.onKeyup(event)}
onChange={(event) => this.onSearchValueChange(event)}
/>
);
}
changeShowMenuStatus() {
this.props.changeMenuStatus(!this.props.showMenu);
}
getNodes() {
if (this.state.account === null) {
return;
}
NodeBackend.getNodes().then((res) => {
this.setState({
nodes: res,
});
});
}
render() {
if (!Setting.PcBrowser) {
return this.renderMobileHeader();
}
return (
<div id="Top">
<div className="content">
<div style={{ paddingTop: "6px" }}>
<table cellPadding="0" cellSpacing="0" border="0" width="100%">
<tbody>
<tr>
<td width="110" align="left">
<Link to="/" name="top" title="way to explore">
<div id="logo" />
</Link>
</td>
<td width="auto" align="left">
{this.renderSearch()}
</td>
{this.renderItem()}
</tr>
</tbody>
</table>
</div>
</div>
</div>
);
}
}
export default withRouter(Header);
|
'use strict'
const { app, mock, assert } = require('egg-mock/bootstrap')
describe('❤️ test/controller/home.test.js:', () => {
describe('GET /', () => {
it('should status 200 and get the body', () => {
return app.httpRequest().get('/').expect(200).expect('Hello World')
})
})
})
|
"""
WSGI config for githubapi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'githubapi.settings')
application = get_wsgi_application()
|
define(["require", "exports", "@fluentui/set-version"], function (require, exports, set_version_1) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
set_version_1.setVersion('@fluentui/utilities', '8.3.9');
});
//# sourceMappingURL=version.js.map
|
/**
* Marlin 3D Printer Firmware
* Copyright (C) 2016 MarlinFirmware [https://github.com/MarlinFirmware/Marlin]
*
* Based on Sprinter and grbl.
* Copyright (C) 2011 Camiel Gubbels / Erik van der Zalm
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
// PT100 with INA826 amp on Ultimaker v2.0 electronics
// The PT100 in the Ultimaker v2.0 electronics has a high sample value for a high temperature.
// This does not match the normal thermistor behaviour so we need to set the following defines
#if (THERMISTORHEATER_0 == 20)
#define HEATER_0_RAW_HI_TEMP 16383
#define HEATER_0_RAW_LO_TEMP 0
#endif
#if (THERMISTORHEATER_1 == 20)
#define HEATER_1_RAW_HI_TEMP 16383
#define HEATER_1_RAW_LO_TEMP 0
#endif
#if (THERMISTORHEATER_2 == 20)
#define HEATER_2_RAW_HI_TEMP 16383
#define HEATER_2_RAW_LO_TEMP 0
#endif
#if (THERMISTORHEATER_3 == 20)
#define HEATER_3_RAW_HI_TEMP 16383
#define HEATER_3_RAW_LO_TEMP 0
#endif
#if (THERMISTORBED == 20)
#define HEATER_BED_RAW_HI_TEMP 16383
#define HEATER_BED_RAW_LO_TEMP 0
#endif
const short temptable_20[][2] PROGMEM = {
{ 0 * OVERSAMPLENR, 0 },
{ 227 * OVERSAMPLENR, 1 },
{ 236 * OVERSAMPLENR, 10 },
{ 245 * OVERSAMPLENR, 20 },
{ 253 * OVERSAMPLENR, 30 },
{ 262 * OVERSAMPLENR, 40 },
{ 270 * OVERSAMPLENR, 50 },
{ 279 * OVERSAMPLENR, 60 },
{ 287 * OVERSAMPLENR, 70 },
{ 295 * OVERSAMPLENR, 80 },
{ 304 * OVERSAMPLENR, 90 },
{ 312 * OVERSAMPLENR, 100 },
{ 320 * OVERSAMPLENR, 110 },
{ 329 * OVERSAMPLENR, 120 },
{ 337 * OVERSAMPLENR, 130 },
{ 345 * OVERSAMPLENR, 140 },
{ 353 * OVERSAMPLENR, 150 },
{ 361 * OVERSAMPLENR, 160 },
{ 369 * OVERSAMPLENR, 170 },
{ 377 * OVERSAMPLENR, 180 },
{ 385 * OVERSAMPLENR, 190 },
{ 393 * OVERSAMPLENR, 200 },
{ 401 * OVERSAMPLENR, 210 },
{ 409 * OVERSAMPLENR, 220 },
{ 417 * OVERSAMPLENR, 230 },
{ 424 * OVERSAMPLENR, 240 },
{ 432 * OVERSAMPLENR, 250 },
{ 440 * OVERSAMPLENR, 260 },
{ 447 * OVERSAMPLENR, 270 },
{ 455 * OVERSAMPLENR, 280 },
{ 463 * OVERSAMPLENR, 290 },
{ 470 * OVERSAMPLENR, 300 },
{ 478 * OVERSAMPLENR, 310 },
{ 485 * OVERSAMPLENR, 320 },
{ 493 * OVERSAMPLENR, 330 },
{ 500 * OVERSAMPLENR, 340 },
{ 507 * OVERSAMPLENR, 350 },
{ 515 * OVERSAMPLENR, 360 },
{ 522 * OVERSAMPLENR, 370 },
{ 529 * OVERSAMPLENR, 380 },
{ 537 * OVERSAMPLENR, 390 },
{ 544 * OVERSAMPLENR, 400 },
{ 614 * OVERSAMPLENR, 500 },
{ 681 * OVERSAMPLENR, 600 },
{ 744 * OVERSAMPLENR, 700 },
{ 805 * OVERSAMPLENR, 800 },
{ 862 * OVERSAMPLENR, 900 },
{ 917 * OVERSAMPLENR, 1000 },
{ 968 * OVERSAMPLENR, 1100 }
};
|
#!/usr/bin/env python
import os
import re
def get_program_parameters():
import argparse
description = 'Generate a FindPackage(VTK COMPONENTS) that lists all modules referenced by a set of files.'
epilogue = '''
'''
parser = argparse.ArgumentParser(description=description, epilog=epilogue,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-p', '--path', help='The path to the VTK source tree.')
parser.add_argument('-s', '--source', nargs='+', help='The path to the application file or folder.')
args = parser.parse_args()
return args.path, args.source
def IncludesToPaths(path):
"""
Build a dict that maps include files to paths.
"""
includeToPath = dict()
prog = re.compile(r'((?:vtk|QVTK).*\.h)')
for root, dirs, files in os.walk(path):
for f in files:
if prog.match(f):
includeFile = prog.findall(f)[0]
includeToPath[includeFile] = root
return includeToPath
def FindModules(path):
"""
Build a dict that maps paths to modules.
"""
pathToModule = dict()
fileProg = re.compile(r'vtk\.module$')
for root, dirs, files in os.walk(path):
for f in files:
if fileProg.match(f):
with open(os.path.join(root, f), 'r') as fid:
contents = fid.read()
args = contents.split()
try:
idx = args.index('NAME')
except ValueError:
raise RuntimeError('%s is missing a NAME field' % os.path.join(root, f))
pathToModule[root] = args[idx + 1]
return pathToModule
def FindIncludes(path):
"""
Build a set that contains vtk includes.
"""
includes = set()
includeProg = re.compile(r'((?:vtk|QVTK).*\.h)')
with open(path, 'r') as fid:
contents = fid.read()
incs = includeProg.findall(contents)
includes.update(incs)
return includes
def FindModuleFiles(path):
"""
Get a list of module files in the VTK directory.
"""
moduleFiles = [os.path.join(root, name)
for root, dirs, files in os.walk(path)
for name in files
if name == 'vtk.module']
return moduleFiles
def MakeFindPackage(modules):
"""
Make a useful find_package command.
"""
# Print a useful cmake command
res = 'find_package(VTK COMPONENTS\n'
for module in sorted(modules):
res += ' ' + module.replace('VTK::', 'vtk') + '\n'
res += ')'
return res
def GenerateFindPackage(vtkSourceDir, sourceFiles):
# Generate dict's for mapping includes to modules
includesToPaths = IncludesToPaths(vtkSourceDir)
pathsToModules = FindModules(vtkSourceDir)
# Test to see if VTK source is provided
if len(pathsToModules) == 0:
raise IOError(vtkSourceDir + ' is not a VTK source directory. It does not contain any vtk.module files.')
valid_extensions = ['.h', '.hxx', '.cpp', '.cxx', '.cc']
# Build a set of includes for all command line files
allIncludes = set()
for f in sourceFiles:
if os.path.isfile(f):
filename, file_extension = os.path.splitext(f)
if file_extension in valid_extensions:
allIncludes.update(FindIncludes(f))
else:
# We have a folder so look through all the files.
for path, dirs, files in os.walk(f):
for fn in files:
filename, file_extension = os.path.splitext(fn)
if file_extension in valid_extensions:
allIncludes.update(FindIncludes(os.path.join(path, fn)))
if len(allIncludes) == 0:
return
# Build a set that contains all modules referenced in command line files
allModules = set()
for inc in allIncludes:
if inc in includesToPaths:
module = includesToPaths[inc]
if module in pathsToModules:
allModules.add(pathsToModules[includesToPaths[inc]])
if 'VTK::RenderingCore' in allModules:
allModules.add('VTK::RenderingOpenGL2')
allModules.add('VTK::InteractionStyle')
allModules.add('VTK::RenderingFreeType')
allModules.add('VTK::RenderingGL2PSOpenGL2')
allModules.add('VTK::RenderingContextOpenGL2')
if 'VTK::DomainsChemistry' in allModules:
allModules.add('VTK::DomainsChemistryOpenGL2')
if 'VTK::RenderingVolume' in allModules:
allModules.add('VTK::RenderingVolumeOpenGL2')
if 'VTK::RenderingContext2D' in allModules:
allModules.add('VTK::RenderingContextOpenGL2')
if 'VTK::IOExport' in allModules:
allModules.add('VTK::RenderingContextOpenGL2')
allModules.add('VTK::IOExportOpenGL2')
allModules.add('VTK::IOExportPDF')
allModules.add('VTK::RenderingContextOpenGL2')
modules = {'All modules referenced in the files:': allModules}
res = list()
for k, v in modules.items():
res.append(k)
res.append((MakeFindPackage(v)))
res.append(
'Your application code includes ' + str(len(v)) + ' of ' + str(len(pathsToModules)) + ' vtk modules.')
return '\n'.join(res)
def main():
vtkSourceDir, sourceFiles = get_program_parameters()
if vtkSourceDir is None or sourceFiles is None:
raise IOError('We need a VTK source directory and the source files.')
res = GenerateFindPackage(vtkSourceDir, sourceFiles)
print(res)
if __name__ == '__main__':
main()
|
import React from 'react'
import MapFilter from 'react-mapfilter'
import { ipcRenderer, remote } from 'electron'
import {
FIELD_TYPE_STRING
} from 'react-mapfilter/es5/constants'
import xor from 'lodash/xor'
import differenceBy from 'lodash/differenceBy'
import url from 'url'
import MenuItem from '@material-ui/core/MenuItem'
import randomBytes from 'randombytes'
import { MuiThemeProvider, createMuiTheme } from '@material-ui/core/styles'
import api from '../api'
import MenuItems from './MenuItems'
import ConvertDialog from './ConvertDialog'
const theme = createMuiTheme({
palette: {
primary: {
main: '#39527b'
}
}
})
const osmServerHost = 'http://' + remote.getGlobal('osmServerHost')
const styleUrl = `${osmServerHost}/styles/mapfilter-style/style.json`
class Home extends React.Component {
constructor (props) {
super(props)
var self = this
self.state = {
features: [],
mapPosition: { center: [0, 0], zoom: 0 },
showModal: false,
mapStyle: styleUrl
}
self.getFeatures()
this.handleChangeFeatures = this.handleChangeFeatures.bind(this)
this.zoomToDataRequest = this.zoomToDataRequest.bind(this)
this.zoomToDataResponse = this.zoomToDataResponse.bind(this)
this.zoomToLatLonResponse = this.zoomToLatLonResponse.bind(this)
this.refresh = this.refresh.bind(this)
ipcRenderer.on('refresh-window', this.refresh)
ipcRenderer.on('zoom-to-data-request', this.zoomToDataRequest)
ipcRenderer.on('zoom-to-data-response', self.zoomToDataResponse)
ipcRenderer.on('zoom-to-latlon-response', self.zoomToLatLonResponse)
}
refresh () {
this.getFeatures()
}
componentWillUnmount () {
ipcRenderer.removeListener('refresh-window', this.refresh)
ipcRenderer.removeListener('zoom-to-data-request', this.zoomToDataRequest)
ipcRenderer.removeListener('zoom-to-data-response', this.zoomToDataResponse)
ipcRenderer.removeListener('zoom-to-latlon-response', this.zoomToLatLonResponse)
}
zoomToLatLonResponse (_, lat, lon) {
this.setState({
mapPosition: { center: [lat, lon], zoom: 14 }
})
}
zoomToDataResponse (_, loc) {
this.setState({
mapPosition: { center: loc, zoom: 14 }
})
}
zoomToDataRequest () {
ipcRenderer.send('zoom-to-data-get-centroid')
}
handleDatasetChange () {
return (e) => {
this.setState({ formId: e.target.value })
}
}
handleConvertFeaturesClick () {
return () => {
this.setState({ showModal: 'convert' })
}
}
handleChangeFeatures (changedFeatures) {
const { features } = this.state
const xorFeatures = xor(changedFeatures, features)
const deleted = differenceBy(xorFeatures, changedFeatures, 'id')
const added = differenceBy(xorFeatures, features, 'id')
const updated = xorFeatures.filter(f => {
return added.indexOf(f) === -1 &&
deleted.indexOf(f) === -1 &&
features.indexOf(f) === -1
})
var cb = function (err, resp) {
if (err) return this.handleError(err)
}
deleted.forEach(f => api.del(f, cb))
added.forEach(f => this.createObservation(f))
updated.forEach(f => this.updateObservation(f))
this.setState({ features: changedFeatures })
}
updateObservation (f) {
const obs = this._observationsById[f.id]
const newObs = Object.assign({}, obs)
// TODO: media is currently not updated, but it will be in the future
const WHITELIST = ['fields', 'media']
Object.keys(f.properties || {}).forEach(function (key) {
if (WHITELIST.indexOf(key) > -1) return
newObs.tags[key] = f.properties[key]
})
// Mapeo Mobile currently expects field definitions as a property on tags
;(obs.tags.fields || []).forEach(function (field, i) {
if (!f.properties || f.properties[field.id] === undefined) return
newObs.tags.fields[i].answer = f.properties[field.id]
newObs.tags.fields[i].answered = true
})
api.update(newObs, (err, obs) => {
if (err) return this.handleError(err)
// Keep a reference to the updated obs
this._observationsById[obs.id] = obs
})
}
createObservation (f, cb) {
const newObs = {
id: f.id || randomBytes(8).toString('hex'),
type: 'observation',
tags: f.properties || {}
}
if (f.geometry) {
newObs.lon = f.geometry.coordinates[0]
newObs.lat = f.geometry.coordinates[1]
}
api.create(newObs, (err, obs) => {
if (err) return this.handleError(err)
// Keep a reference to the updated obs
this._observationsById[obs.id] = obs
})
}
closeModal () {
return () => { this.setState({ showModal: false }) }
}
getFeatures () {
var self = this
api.list(function (err, resp) {
if (err) return self.handleError(err)
const observations = JSON.parse(resp.body)
const byId = self._observationsById = observations.reduce(observationIdReducer, {})
// the byId reducer removes forks, so use that for the features array
const features = Object.keys(byId)
.map(key => byId[key])
.map(observationToFeature)
self.setState({ features })
})
}
handleError (err) {
// TODO: Show some kind of error message in the UI
console.error(err)
}
handleChangeMapPosition (mapPosition) {
this.setState({ mapPosition })
}
onMenuItemClick (view) {
if (view.modal) this.props.openModal(view.name)
else this.props.changeView(view.name)
}
render () {
const { features, showModal, mapPosition } = this.state
var appBarMenuItems = []
MenuItems.forEach((view, i) => {
var id = `menu-option-${view.name}`
if (view.name === 'MapFilter') return
appBarMenuItems.push(
<MenuItem
id={id}
onClick={this.onMenuItemClick.bind(this, view)}>
{view.label}
</MenuItem>
)
})
return (<div>
<MuiThemeProvider theme={theme}>
<MapFilter
mapStyle={styleUrl}
features={features}
mapPosition={mapPosition}
onChangeMapPosition={this.handleChangeMapPosition.bind(this)}
onChangeFeatures={this.handleChangeFeatures}
fieldTypes={{
notes: FIELD_TYPE_STRING
}}
datasetName='mapeo'
resizer={resizer}
appBarMenuItems={appBarMenuItems}
appBarTitle='Mapeo' />
<ConvertDialog
open={showModal === 'convert'}
onClose={() => { this.setState({ showModal: false }) }}
features={features} />
</MuiThemeProvider>
</div>)
}
}
function observationToFeature (obs, id) {
var feature = {
id: obs.id,
type: 'Feature',
geometry: null,
properties: {}
}
if (obs.lon && obs.lat) {
feature.geometry = {
type: 'Point',
coordinates: [obs.lon, obs.lat]
}
}
const WHITELIST = ['fields']
Object.keys(obs.tags || {}).forEach(function (key) {
if (WHITELIST.indexOf(key) > -1) return
feature.properties[key] = obs.tags[key]
})
feature.properties.media = (obs.attachments || []).map(function (a) {
var id = a.id || a // the phone doesn't have id property on it's attachments.
return {
// type: 'image' -- turns on media filtering on the sidebar.
value: `${osmServerHost}/media/original/${id}`
}
})
if (!feature.properties.notes) feature.properties.notes = ' '
return feature
}
function resizer (src, size) {
const parsedUrl = url.parse(src)
// Don't resize local images
if (parsedUrl.hostname === 'localhost' || parsedUrl.hostname === '127.0.0.1') return src
return 'https://resizer.digital-democracy.org/{width}/{height}/{url}'
.replace('{width}', size)
.replace('{height}', size)
.replace('{url}', src)
}
function observationIdReducer (acc, obs) {
if (acc[obs.id]) {
// there is a fork
if (obs.timestamp > acc[obs.id].timestamp) {
// use the most recent
acc[obs.id] = obs
}
} else {
acc[obs.id] = obs
}
return acc
}
module.exports = Home
|
// ##############################
// // // Cards
// #############################
import ChartCard from "./Cards/ChartCard.jsx";
import ProfileCard from "./Cards/ProfileCard.jsx";
import RegularCard from "./Cards/RegularCard.jsx";
import StatsCard from "./Cards/StatsCard.jsx";
import TasksCard from "./Cards/TasksCard.jsx";
import PostCard from "./Cards/PostCard.jsx";
import MatchCard from "./Cards/MatchCard.jsx";
import TeamCard from "./Cards/TeamCard.jsx";
import UserCard from "./Cards/UserCard.jsx";
import PostFindCard from "./Cards/PostFindCard";
// ##############################
// // // CustomButtons
// #############################
import Button from "./CustomButtons/Button.jsx";
import IconButton from "./CustomButtons/IconButton.jsx";
// ##############################
// // // CustomInput
// #############################
import CustomInput from "./CustomInput/CustomInput.jsx";
// ##############################
// // // Footer
// #############################
import Footer from "./Footer/Footer.jsx";
// ##############################
// // // Grid
// #############################
import ItemGrid from "./Grid/ItemGrid.jsx";
import ImageGridList from "./GridList/ImageGridList.jsx";
// ##############################
// // // Header
// #############################
import Header from "./Header/Header.jsx";
import HeaderLinks from "./Header/HeaderLinks.jsx";
// ##############################
// // // Sidebar
// #############################
import Sidebar from "./Sidebar/Sidebar.jsx";
// ##############################
// // // Snackbar
// #############################
import Snackbar from "./Snackbar/Snackbar.jsx";
import SnackbarContent from "./Snackbar/SnackbarContent.jsx";
// ##############################
// // // Table
// #############################
import Table from "./Table/Table.jsx";
// ##############################
// // // Table
// #############################
import SelectWrapped from "./Select/SelectWrapped.jsx";
// ##############################
// // // Tasks
// #############################
import Tasks from "./Tasks/Tasks.jsx";
// ##############################
// // // Typography
// #############################
import P from "./Typography/P.jsx";
import Quote from "./Typography/Quote.jsx";
import Muted from "./Typography/Muted.jsx";
import Primary from "./Typography/Primary.jsx";
import Info from "./Typography/Info.jsx";
import Success from "./Typography/Success.jsx";
import Warning from "./Typography/Warning.jsx";
import Danger from "./Typography/Danger.jsx";
import Small from "./Typography/Small.jsx";
import A from "./Typography/A.jsx";
export {
// Cards
ChartCard,
ProfileCard,
RegularCard,
StatsCard,
TasksCard,
PostCard,
MatchCard,
TeamCard,
UserCard,
PostFindCard,
// CustomButtons
Button,
IconButton,
// CustomInput
CustomInput,
// Footer
Footer,
// Grid
ItemGrid,
ImageGridList,
// Header
Header,
HeaderLinks,
// Sidebar
Sidebar,
//Snackbar
Snackbar,
SnackbarContent,
// Table
Table,
// Select
SelectWrapped,
// Tasks
Tasks,
// Typography
P,
Quote,
Muted,
Primary,
Info,
Success,
Warning,
Danger,
Small,
A
};
|
// Copyright 2017 Benjamin Glatzel
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
namespace Intrinsic
{
namespace Core
{
namespace WorldFlags
{
enum Flags
{
kLoadingUnloading = 0x01
};
}
struct World
{
static void init();
static void destroy();
// <-
static void load(const _INTR_STRING& p_FilePath);
static void save(const _INTR_STRING& p_FilePath);
// <-
static void destroyNodeFull(Components::NodeRef p_Ref);
static Components::NodeRef cloneNodeFull(Components::NodeRef p_Ref);
static void alignNodeWithGround(Components::NodeRef p_NodeRef);
// <-
static void saveNodeHierarchy(const _INTR_STRING& p_FilePath,
Components::NodeRef p_RootNodeRef);
static Components::NodeRef loadNodeHierarchy(const _INTR_STRING& p_FilePath);
static void loadNodeResources(Components::NodeRef p_RootNodeRef);
// <-
static void updateDayNightCycle(float p_DeltaT);
// <-
static _INTR_STRING _filePath;
static Components::NodeRef _rootNode;
static Components::CameraRef _activeCamera;
static uint32_t _flags;
// <-
static float _currentTime;
static float _currentDayNightFactor;
static glm::quat _currentSunLightOrientation;
static glm::vec4 _currentSunLightColorAndIntensity;
// Scripting interface
_INTR_INLINE static Components::NodeRef getRootNode() { return _rootNode; }
_INTR_INLINE static void setRootNode(Components::NodeRef p_Node)
{
_rootNode = p_Node;
}
_INTR_INLINE static const Components::CameraRef getActiveCamera()
{
return _activeCamera;
}
_INTR_INLINE static void setActiveCamera(Components::CameraRef p_Camera)
{
_activeCamera = p_Camera;
}
};
}
}
|
'use strict';
var rfr = require('rfr');
var config = rfr('/config');
var logger = rfr('/stack-util/logger');
var AWS = config.AWS;
var cf = rfr('/stack-util/cloudFormation');
var path = require('path');
var fs = require('fs');
var assert = require('assert');
function createFunction(operationId, description) {
return cf.getStackOutputs().then((cfOutputs) => {
assert(cfOutputs.LambdaExecutionRoleArn, 'Missing LambdaExecutionRoleArn');
return {
Code: {
S3Bucket: cfOutputs.LambdaDeployBucket,
S3Key: config.getLambdaZipName(),
},
Description: description,
FunctionName: config.getName(operationId),
Handler: operationId.replace('-', '.'),
Role: cfOutputs.LambdaExecutionRoleArn,
Runtime: 'nodejs6.10',
Timeout: 10
};
}).then(createFunctionImpl);
}
function createFunctionImpl(params) {
return new Promise((resolve, reject) => {
let lambda = new AWS.Lambda();
lambda.createFunction(params, function(err, data) {
if (err) {
if (err.code === 'ResourceConflictException') {
resolve(updateFunction(params));
} else {
reject(err);
return;
}
}
logger.info('Created/updated Lambda function', params.FunctionName);
resolve(data);
});
});
}
function updateFunction(params) {
return updateFunctionConfiguration({
Description: params.Description,
FunctionName: params.FunctionName,
Handler: params.Handler,
Role: params.Role,
Runtime: params.Runtime,
Timeout: params.Timeout
}).then(() => {
return updateFunctionCode({
FunctionName: params.FunctionName,
S3Bucket: params.Code.S3Bucket,
S3Key: params.Code.S3Key
});
});
}
function updateFunctionConfiguration(params) {
return new Promise((resolve, reject) => {
let lambda = new AWS.Lambda();
lambda.updateFunctionConfiguration(params, (err, data) => {
if (err) {
reject(err);
return;
}
resolve(data);
});
});
}
function updateFunctionCode(params) {
return new Promise((resolve, reject) => {
let lambda = new AWS.Lambda();
lambda.updateFunctionCode(params, (err, data) => {
if (err) {
reject(err);
return;
}
resolve(data);
});
});
}
function addPermission(config) {
return new Promise((resolve, reject) => {
let params = {
Action: 'lambda:InvokeFunction',
FunctionName: config.FunctionName,
Principal: 'apigateway.amazonaws.com',
StatementId: 'apigateway-invoke-permissions',
// TODO(Justin): Add more optional stuff
// EventSourceToken: 'STRING_VALUE',
// Qualifier: 'STRING_VALUE',
// SourceAccount: 'STRING_VALUE',
// SourceArn: `arn:aws:execute-api:us-west-2:IAMAccountNumber://POST/event`
};
let lambda = new AWS.Lambda();
lambda.addPermission(params, (err, data) => {
if (err) {
if (err.code === 'ResourceConflictException') {
resolve();
}
reject(err);
return;
}
logger.info('Permissions updated', config.FunctionName);
resolve(data);
})
});
}
function createFunctionsFromSwagger() {
let swaggerJSONStr = fs.readFileSync(path.join(__dirname, '..', 'swagger', config.SWAGGER_SRC_JSON)).toString();
let api = JSON.parse(swaggerJSONStr);
if (api == null) {
logger.error("API not loaded")
}
var promises = [];
for (let path in api.paths) {
for (let method in api.paths[path]) {
let definition = api.paths[path][method];
let operationId = definition.operationId;
let description = definition.description || 'Default description';
promises.push(createFunction(operationId, description));
}
}
return Promise.all(promises).then((result) => {
var permPromises = [];
result.forEach(result => {
permPromises.push(addPermission(result));
});
return Promise.all(permPromises);
});
}
function createCustomAuthorizerFunction() {
return new Promise((resolve, reject) => {
createFunction('authorizer-Custom','Custom authorizer function for API Gateway to grant admin-only permissions').then((data) => {
// Add permissions for API Gateway to call Lambda function
addPermission(data).then(() => {
resolve(data.FunctionName);
});
}).catch((err) => {
logger.error(err);
reject(err);
})
});
}
var lambdaFunctions = [];
function listFunctions() {
return new Promise((resolve, reject) => {
lambdaFunctions = [];
listFunctionsImpl(null, config.getResourcePrefix(), function (err) {
if (err) {
logger.error(err);
reject(err);
}
resolve(lambdaFunctions);
});
});
}
function listFunctionsImpl(marker, stringPattern, callback) {
let lambda = new AWS.Lambda();
let params = {
Marker: marker,
MaxItems: 50
};
lambda.listFunctions(params, function (err, data) {
if (err) {
callback(err);
return;
}
for (let i = 0; i < data.Functions.length; i++) {
if (data.Functions[i].FunctionName.includes(stringPattern)) {
lambdaFunctions.push(data.Functions[i].FunctionName);
}
}
if (data.NextMarker !== null) {
listFunctionsImpl(data.NextMarker, stringPattern, callback);
} else {
// All functions have been retrieved
callback(null);
}
});
}
function deleteFunction(functionName) {
return new Promise((resolve, reject) => {
let lambda = new AWS.Lambda();
let params = {
FunctionName: functionName
};
lambda.deleteFunction(params, function(err, data) {
if (err) {
logger.error(err);
reject(err);
return;
}
resolve(data);
});
});
}
function deleteFunctions() {
return new Promise((resolve, reject) => {
listFunctions().then((functionsArray) => {
logger.info('Lambda functions for deletion', functionsArray);
let deleteFunctionPromises = [];
for (let i = 0; i < functionsArray.length; i++) {
deleteFunctionPromises.push(deleteFunction(functionsArray[i]));
}
Promise.all(deleteFunctionPromises).then((data) => {
logger.info('Deleted Lambda functions successfully');
resolve(data);
}).catch((err) => {
logger.error(err);
reject(err);
});
});
});
}
module.exports = {
createFunctionsFromSwagger,
createCustomAuthorizerFunction,
deleteFunctions
};
|
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* bootstrap.js v3.0.0 by @fat and @mdo
* Copyright 2013 Twitter Inc.
* http://www.apache.org/licenses/LICENSE-2.0
*/
if(!jQuery)throw new Error("Bootstrap requires jQuery");+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b()})}(window.jQuery),+function(a){"use strict";var b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function c(){f.trigger("closed.bs.alert").remove()}var d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&&e.replace(/.*(?=#[^\s]*$)/,""));var f=a(e);b&&b.preventDefault(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new c(this)),"string"==typeof b&&e[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(window.jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d)};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(a){var b="disabled",c=this.$element,d=c.is("input")?"val":"html",e=c.data();a+="Text",e.resetText||c.data("resetText",c[d]()),c[d](e[a]||this.options[a]),setTimeout(function(){"loadingText"==a?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var a=this.$element.closest('[data-toggle="buttons"]');if(a.length){var b=this.$element.find("input").prop("checked",!this.$element.hasClass("active")).trigger("change");"radio"===b.prop("type")&&a.find(".active").removeClass("active")}this.$element.toggleClass("active")};var c=a.fn.button;a.fn.button=function(c){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof c&&c;e||d.data("bs.button",e=new b(this,f)),"toggle"==c?e.toggle():c&&e.setState(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&&this.$element.on("mouseenter",a.proxy(this.pause,this)).on("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 0:this.sliding?this.$element.one("slid",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition.end&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return this.sliding?void 0:this.slide("next")},b.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var d=this.$element.find(".item.active"),e=c||d[b](),f=this.interval,g="next"==b?"left":"right",h="next"==b?"first":"last",i=this;if(!e.length){if(!this.options.wrap)return;e=this.$element.find(".item")[h]()}this.sliding=!0,f&&this.pause();var j=a.Event("slide.bs.carousel",{relatedTarget:e[0],direction:g});if(!e.hasClass("active")){if(this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var b=a(i.$indicators.children()[i.getActiveIndex()]);b&&b.addClass("active")})),a.support.transition&&this.$element.hasClass("slide")){if(this.$element.trigger(j),j.isDefaultPrevented())return;e.addClass(b),e[0].offsetWidth,d.addClass(g),e.addClass(g),d.one(a.support.transition.end,function(){e.removeClass([b,g].join(" ")).addClass("active"),d.removeClass(["active",g].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger("slid")},0)}).emulateTransitionEnd(600)}else{if(this.$element.trigger(j),j.isDefaultPrevented())return;d.removeClass("active"),e.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return f&&this.cycle(),this}};var c=a.fn.carousel;a.fn.carousel=function(c){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c),g="string"==typeof c?c:f.slide;e||d.data("bs.carousel",e=new b(this,f)),"number"==typeof c?e.to(c):g?e[g]():f.interval&&e.pause().cycle()})},a.fn.carousel.Constructor=b,a.fn.carousel.noConflict=function(){return a.fn.carousel=c,this},a(document).on("click.bs.carousel.data-api","[data-slide], [data-slide-to]",function(b){var c,d=a(this),e=a(d.attr("data-target")||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"")),f=a.extend({},e.data(),d.data()),g=d.attr("data-slide-to");g&&(f.interval=!1),e.carousel(f),(g=d.attr("data-slide-to"))&&e.data("bs.carousel").to(g),b.preventDefault()}),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var b=a(this);b.carousel(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d),this.transitioning=null,this.options.parent&&(this.$parent=a(this.options.parent)),this.options.toggle&&this.toggle()};b.DEFAULTS={toggle:!0},b.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},b.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b=a.Event("show.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.$parent&&this.$parent.find("> .panel > .in");if(c&&c.length){var d=c.data("bs.collapse");if(d&&d.transitioning)return;c.collapse("hide"),d||c.data("bs.collapse",null)}var e=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[e](0),this.transitioning=1;var f=function(){this.$element.removeClass("collapsing").addClass("in")[e]("auto"),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return f.call(this);var g=a.camelCase(["scroll",e].join("-"));this.$element.one(a.support.transition.end,a.proxy(f,this)).emulateTransitionEnd(350)[e](this.$element[0][g])}}},b.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse").removeClass("in"),this.transitioning=1;var d=function(){this.transitioning=0,this.$element.trigger("hidden.bs.collapse").removeClass("collapsing").addClass("collapse")};return a.support.transition?(this.$element[c](0).one(a.support.transition.end,a.proxy(d,this)).emulateTransitionEnd(350),void 0):d.call(this)}}},b.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()};var c=a.fn.collapse;a.fn.collapse=function(c){return this.each(function(){var d=a(this),e=d.data("bs.collapse"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c);e||d.data("bs.collapse",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.collapse.Constructor=b,a.fn.collapse.noConflict=function(){return a.fn.collapse=c,this},a(document).on("click.bs.collapse.data-api","[data-toggle=collapse]",function(b){var c,d=a(this),e=d.attr("data-target")||b.preventDefault()||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,""),f=a(e),g=f.data("bs.collapse"),h=g?"toggle":d.data(),i=d.attr("data-parent"),j=i&&a(i);g&&g.transitioning||(j&&j.find('[data-toggle=collapse][data-parent="'+i+'"]').not(d).addClass("collapsed"),d[f.hasClass("in")?"addClass":"removeClass"]("collapsed")),f.collapse(h)})}(window.jQuery),+function(a){"use strict";function b(){a(d).remove(),a(e).each(function(b){var d=c(a(this));d.hasClass("open")&&(d.trigger(b=a.Event("hide.bs.dropdown")),b.isDefaultPrevented()||d.removeClass("open").trigger("hidden.bs.dropdown"))})}function c(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}var d=".dropdown-backdrop",e="[data-toggle=dropdown]",f=function(b){a(b).on("click.bs.dropdown",this.toggle)};f.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=c(e),g=f.hasClass("open");if(b(),!g){if("ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a('<div class="dropdown-backdrop"/>').insertAfter(a(this)).on("click",b),f.trigger(d=a.Event("show.bs.dropdown")),d.isDefaultPrevented())return;f.toggleClass("open").trigger("shown.bs.dropdown"),e.focus()}return!1}},f.prototype.keydown=function(b){if(/(38|40|27)/.test(b.keyCode)){var d=a(this);if(b.preventDefault(),b.stopPropagation(),!d.is(".disabled, :disabled")){var f=c(d),g=f.hasClass("open");if(!g||g&&27==b.keyCode)return 27==b.which&&f.find(e).focus(),d.click();var h=a("[role=menu] li:not(.divider):visible a",f);if(h.length){var i=h.index(h.filter(":focus"));38==b.keyCode&&i>0&&i--,40==b.keyCode&&i<h.length-1&&i++,~i||(i=0),h.eq(i).focus()}}}};var g=a.fn.dropdown;a.fn.dropdown=function(b){return this.each(function(){var c=a(this),d=c.data("dropdown");d||c.data("dropdown",d=new f(this)),"string"==typeof b&&d[b].call(c)})},a.fn.dropdown.Constructor=f,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=g,this},a(document).on("click.bs.dropdown.data-api",b).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",e,f.prototype.toggle).on("keydown.bs.dropdown.data-api",e+", [role=menu]",f.prototype.keydown)}(window.jQuery),+function(a){"use strict";var b=function(b,c){this.options=c,this.$element=a(b),this.$backdrop=this.isShown=null,this.options.remote&&this.$element.load(this.options.remote)};b.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},b.prototype.toggle=function(a){return this[this.isShown?"hide":"show"](a)},b.prototype.show=function(b){var c=this,d=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(d),this.isShown||d.isDefaultPrevented()||(this.isShown=!0,this.escape(),this.$element.on("click.dismiss.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.backdrop(function(){var d=a.support.transition&&c.$element.hasClass("fade");c.$element.parent().length||c.$element.appendTo(document.body),c.$element.show(),d&&c.$element[0].offsetWidth,c.$element.addClass("in").attr("aria-hidden",!1),c.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:b});d?c.$element.find(".modal-dialog").one(a.support.transition.end,function(){c.$element.focus().trigger(e)}).emulateTransitionEnd(300):c.$element.focus().trigger(e)}))},b.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").attr("aria-hidden",!0).off("click.dismiss.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one(a.support.transition.end,a.proxy(this.hideModal,this)).emulateTransitionEnd(300):this.hideModal())},b.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.focus()},this))},b.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keyup.dismiss.bs.modal")},b.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.removeBackdrop(),a.$element.trigger("hidden.bs.modal")})},b.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},b.prototype.backdrop=function(b){var c=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var d=a.support.transition&&c;if(this.$backdrop=a('<div class="modal-backdrop '+c+'" />').appendTo(document.body),this.$element.on("click.dismiss.modal",a.proxy(function(a){a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus.call(this.$element[0]):this.hide.call(this))},this)),d&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;d?this.$backdrop.one(a.support.transition.end,b).emulateTransitionEnd(150):b()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(a.support.transition.end,b).emulateTransitionEnd(150):b()):b&&b()};var c=a.fn.modal;a.fn.modal=function(c,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},b.DEFAULTS,e.data(),"object"==typeof c&&c);f||e.data("bs.modal",f=new b(this,g)),"string"==typeof c?f[c](d):g.show&&f.show(d)})},a.fn.modal.Constructor=b,a.fn.modal.noConflict=function(){return a.fn.modal=c,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(b){var c=a(this),d=c.attr("href"),e=a(c.attr("data-target")||d&&d.replace(/.*(?=#[^\s]+$)/,"")),f=e.data("modal")?"toggle":a.extend({remote:!/#/.test(d)&&d},e.data(),c.data());b.preventDefault(),e.modal(f,this).one("hide",function(){c.is(":visible")&&c.focus()})}),a(document).on("show.bs.modal",".modal",function(){a(document.body).addClass("modal-open")}).on("hidden.bs.modal",".modal",function(){a(document.body).removeClass("modal-open")})}(window.jQuery),+function(a){"use strict";var b=function(a,b){this.type=this.options=this.enabled=this.timeout=this.hoverState=this.$element=null,this.init("tooltip",a,b)};b.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},b.prototype.init=function(b,c,d){this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d);for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focus",i="hover"==g?"mouseleave":"blur";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},b.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},b.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type);return clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show),void 0):c.show()},b.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type);return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide),void 0):c.hide()},b.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){if(this.$element.trigger(b),b.isDefaultPrevented())return;var c=this.tip();this.setContent(),this.options.animation&&c.addClass("fade");var d="function"==typeof this.options.placement?this.options.placement.call(this,c[0],this.$element[0]):this.options.placement,e=/\s?auto?\s?/i,f=e.test(d);f&&(d=d.replace(e,"")||"top"),c.detach().css({top:0,left:0,display:"block"}).addClass(d),this.options.container?c.appendTo(this.options.container):c.insertAfter(this.$element);var g=this.getPosition(),h=c[0].offsetWidth,i=c[0].offsetHeight;if(f){var j=this.$element.parent(),k=d,l=document.documentElement.scrollTop||document.body.scrollTop,m="body"==this.options.container?window.innerWidth:j.outerWidth(),n="body"==this.options.container?window.innerHeight:j.outerHeight(),o="body"==this.options.container?0:j.offset().left;d="bottom"==d&&g.top+g.height+i-l>n?"top":"top"==d&&g.top-l-i<0?"bottom":"right"==d&&g.right+h>m?"left":"left"==d&&g.left-h<o?"right":d,c.removeClass(k).addClass(d)}var p=this.getCalculatedOffset(d,g,h,i);this.applyPlacement(p,d),this.$element.trigger("shown.bs."+this.type)}},b.prototype.applyPlacement=function(a,b){var c,d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),a.top=a.top+g,a.left=a.left+h,d.offset(a).addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;if("top"==b&&j!=f&&(c=!0,a.top=a.top+f-j),/bottom|top/.test(b)){var k=0;a.left<0&&(k=-2*a.left,a.left=0,d.offset(a),i=d[0].offsetWidth,j=d[0].offsetHeight),this.replaceArrow(k-e+i,i,"left")}else this.replaceArrow(j-f,j,"top");c&&d.offset(a)},b.prototype.replaceArrow=function(a,b,c){this.arrow().css(c,a?50*(1-a/b)+"%":"")},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},b.prototype.hide=function(){function b(){"in"!=c.hoverState&&d.detach()}var c=this,d=this.tip(),e=a.Event("hide.bs."+this.type);return this.$element.trigger(e),e.isDefaultPrevented()?void 0:(d.removeClass("in"),a.support.transition&&this.$tip.hasClass("fade")?d.one(a.support.transition.end,b).emulateTransitionEnd(150):b(),this.$element.trigger("hidden.bs."+this.type),this)},b.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},b.prototype.hasContent=function(){return this.getTitle()},b.prototype.getPosition=function(){var b=this.$element[0];return a.extend({},"function"==typeof b.getBoundingClientRect?b.getBoundingClientRect():{width:b.offsetWidth,height:b.offsetHeight},this.$element.offset())},b.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},b.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},b.prototype.tip=function(){return this.$tip=this.$tip||a(this.options.template)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},b.prototype.validate=function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},b.prototype.enable=function(){this.enabled=!0},b.prototype.disable=function(){this.enabled=!1},b.prototype.toggleEnabled=function(){this.enabled=!this.enabled},b.prototype.toggle=function(b){var c=b?a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type):this;c.tip().hasClass("in")?c.leave(c):c.enter(c)},b.prototype.destroy=function(){this.hide().$element.off("."+this.type).removeData("bs."+this.type)};var c=a.fn.tooltip;a.fn.tooltip=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof c&&c;e||d.data("bs.tooltip",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.tooltip.Constructor=b,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=c,this}}(window.jQuery),+function(a){"use strict";var b=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");b.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),b.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),b.prototype.constructor=b,b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content")[this.options.html?"html":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},b.prototype.hasContent=function(){return this.getTitle()||this.getContent()},b.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")},b.prototype.tip=function(){return this.$tip||(this.$tip=a(this.options.template)),this.$tip};var c=a.fn.popover;a.fn.popover=function(c){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof c&&c;e||d.data("bs.popover",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.popover.Constructor=b,a.fn.popover.noConflict=function(){return a.fn.popover=c,this}}(window.jQuery),+function(a){"use strict";function b(c,d){var e,f=a.proxy(this.process,this);this.$element=a(c).is("body")?a(window):a(c),this.$body=a("body"),this.$scrollElement=this.$element.on("scroll.bs.scroll-spy.data-api",f),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||(e=a(c).attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.offsets=a([]),this.targets=a([]),this.activeTarget=null,this.refresh(),this.process()}b.DEFAULTS={offset:10},b.prototype.refresh=function(){var b=this.$element[0]==window?"offset":"position";this.offsets=a([]),this.targets=a([]);var c=this;this.$body.find(this.selector).map(function(){var d=a(this),e=d.data("target")||d.attr("href"),f=/^#\w/.test(e)&&a(e);return f&&f.length&&[[f[b]().top+(!a.isWindow(c.$scrollElement.get(0))&&c.$scrollElement.scrollTop()),e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){c.offsets.push(this[0]),c.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,d=c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(b>=d)return g!=(a=f.last()[0])&&this.activate(a);for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(!e[a+1]||b<=e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,a(this.selector).parents(".active").removeClass("active");var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate")};var c=a.fn.scrollspy;a.fn.scrollspy=function(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=c,this},a(window).on("load",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);b.scrollspy(b.data())})})}(window.jQuery),+function(a){"use strict";var b=function(b){this.element=a(b)};b.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.attr("data-target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a")[0],f=a.Event("show.bs.tab",{relatedTarget:e});if(b.trigger(f),!f.isDefaultPrevented()){var g=a(d);this.activate(b.parent("li"),c),this.activate(g,g.parent(),function(){b.trigger({type:"shown.bs.tab",relatedTarget:e})})}}},b.prototype.activate=function(b,c,d){function e(){f.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),b.addClass("active"),g?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu")&&b.closest("li.dropdown").addClass("active"),d&&d()}var f=c.find("> .active"),g=d&&a.support.transition&&f.hasClass("fade");g?f.one(a.support.transition.end,e).emulateTransitionEnd(150):e(),f.removeClass("in")};var c=a.fn.tab;a.fn.tab=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new b(this)),"string"==typeof c&&e[c]()})},a.fn.tab.Constructor=b,a.fn.tab.noConflict=function(){return a.fn.tab=c,this},a(document).on("click.bs.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(b){b.preventDefault(),a(this).tab("show")})}(window.jQuery),+function(a){"use strict";var b=function(c,d){this.options=a.extend({},b.DEFAULTS,d),this.$window=a(window).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(c),this.affixed=this.unpin=null,this.checkPosition()};b.RESET="affix affix-top affix-bottom",b.DEFAULTS={offset:0},b.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},b.prototype.checkPosition=function(){if(this.$element.is(":visible")){var c=a(document).height(),d=this.$window.scrollTop(),e=this.$element.offset(),f=this.options.offset,g=f.top,h=f.bottom;"object"!=typeof f&&(h=g=f),"function"==typeof g&&(g=f.top()),"function"==typeof h&&(h=f.bottom());var i=null!=this.unpin&&d+this.unpin<=e.top?!1:null!=h&&e.top+this.$element.height()>=c-h?"bottom":null!=g&&g>=d?"top":!1;this.affixed!==i&&(this.unpin&&this.$element.css("top",""),this.affixed=i,this.unpin="bottom"==i?e.top-d:null,this.$element.removeClass(b.RESET).addClass("affix"+(i?"-"+i:"")),"bottom"==i&&this.$element.offset({top:document.body.offsetHeight-h-this.$element.height()}))}};var c=a.fn.affix;a.fn.affix=function(c){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof c&&c;e||d.data("bs.affix",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.affix.Constructor=b,a.fn.affix.noConflict=function(){return a.fn.affix=c,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var b=a(this),c=b.data();c.offset=c.offset||{},c.offsetBottom&&(c.offset.bottom=c.offsetBottom),c.offsetTop&&(c.offset.top=c.offsetTop),b.affix(c)})})}(window.jQuery);
|
import time
from django.core.management import BaseCommand
from bot.bot import bot
class Command(BaseCommand):
def handle(self, *args, **options):
print("Started Development Bot Client")
try:
bot.polling()
except Exception as e:
print(e)
time.sleep(5)
self.handle(*args, **options)
|
/*
* Academic License - for use in teaching, academic research, and meeting
* course requirements at degree granting institutions only. Not for
* government, commercial, or other organizational use.
*
* sprdmpF105.h
*
* Code generation for function 'sprdmpF105'
*
*/
#ifndef SPRDMPF105_H
#define SPRDMPF105_H
/* Include files */
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "tmwtypes.h"
#include "mex.h"
#include "emlrt.h"
#include "rtwtypes.h"
#include "EOM_eq_types.h"
/* Function Declarations */
extern void sprdmpF105(const emlrtStack *sp, const real_T in1[6], const real_T
in2[198], real_T out1[546], real_T out2[6], real_T out3[6], real_T out4[6]);
#endif
/* End of code generation (sprdmpF105.h) */
|
from scipy.misc import imread, imresize
import numpy as np
def detect_ingrs(recipe, vocab):
try:
ingr_names = [ingr['text'] for ingr in recipe['ingredients'] if ingr['text']]
except:
ingr_names = []
print "Could not load ingredients! Moving on..."
detected = set()
for name in ingr_names:
name = name.replace(' ','_')
name_ind = vocab.get(name)
if name_ind:
detected.add(name_ind)
'''
name_words = name.lower().split(' ')
for i in xrange(len(name_words)):
name_ind = vocab.get('_'.join(name_words[i:]))
if name_ind:
detected.add(name_ind)
break
'''
return list(detected) + [vocab['</i>']]
def process_image(impath,imsize):
try:
img = imread(impath)
if img.ndim == 2: #grayscale
img = img[:,:,None][:,:,[0,0,0]]
H0, W0 = img.shape[0], img.shape[1]
img = imresize(img, float(imsize) / min(H0, W0))
fail = 0
except:
print "Could not load image...Using black one instead."
img = np.zeros((imsize,imsize,3))
fail =1
return img,fail
def read_image(filename):
img = imread(filename)
if img.ndim == 2:
img = img[:, :, None][:, :, [0, 0, 0]]
img = imresize(img, (224,224))
return img
|
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from main.models import Todo
from django.http import HttpResponseRedirect
# Create your views here.
def home(request):
todo_items = Todo.objects.all().order_by("-added_date")
return render(request, 'main/index.html', {
"todo_items": todo_items
})
@csrf_exempt
def add_todo(request):
current_date = timezone.now()
content = request.POST["content"]
created_obj = Todo.objects.create(added_date=current_date, text=content)
length_of_todos = Todo.objects.all().count()
return HttpResponseRedirect("/")
@csrf_exempt
def delete_todo(request, todo_id):
Todo.objects.get(id=todo_id).delete()
return HttpResponseRedirect("/")
|
"use strict";
const mergeSort = require("../code-challenges/challenge-21/mergeSort.js");
describe(" insertion Function Checking", () => {
test("should ", () => {
let arr1 = [8, 4, 23, 42, 16, 15];
let arr2 = [20, 18, 12, 8, 5, -2];
mergeSort(arr1);
mergeSort(arr2);
expect(arr1).toStrictEqual([4, 8, 15, 16, 23, 42]);
expect(arr2).toStrictEqual([-2, 5, 8, 12, 18, 20]);
});
});
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from io import BytesIO
def txFromHex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
self.setup_clean_chain = False
def setup_nodes(self):
#This test requires mocktime
enable_mocktime()
return start_nodes(self.num_nodes, self.options.tmpdir)
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { self.nodes[0].getnewaddress() : 0.11,
self.nodes[1].getnewaddress() : 0.22,
self.nodes[0].getaccountaddress("from1") : 0.33,
self.nodes[1].getaccountaddress("toself") : 0.44 }
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category":"receive","amount":Decimal("0.1")},
{"txid":txid, "account" : "watchonly"} )
#DigitalGoldCoin: Disabled RBF
#self.run_rbf_opt_in_test()
# Check that the opt-in-rbf flag works properly, for sent and received
# transactions.
def run_rbf_opt_in_test(self):
# Check whether a transaction signals opt-in RBF itself
def is_opt_in(node, txid):
rawtx = node.getrawtransaction(txid, 1)
for x in rawtx["vin"]:
if x["sequence"] < 0xfffffffe:
return True
return False
# Find an unconfirmed output matching a certain txid
def get_unconfirmed_utxo_entry(node, txid_to_match):
utxo = node.listunspent(0, 0)
for i in utxo:
if i["txid"] == txid_to_match:
return i
return None
# 1. Chain a few transactions that don't opt-in.
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
assert(not is_opt_in(self.nodes[0], txid_1))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
# Tx2 will build off txid_1, still not opting in to RBF.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
# Create tx2 using createrawtransaction
inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
assert(not is_opt_in(self.nodes[1], txid_2))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
# Tx3 will opt-in to RBF
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
inputs = [{"txid": txid_2, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[1].getnewaddress(): 0.998}
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
tx3_modified = txFromHex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = bytes_to_hex_str(tx3_modified.serialize())
tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert(is_opt_in(self.nodes[0], txid_3))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
# that does.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
tx4_signed = self.nodes[1].signrawtransaction(tx4)["hex"]
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
assert(not is_opt_in(self.nodes[1], txid_4))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
# Replace tx3, and check that tx4 becomes unknown
tx3_b = tx3_modified
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
tx3_b = bytes_to_hex_str(tx3_b.serialize())
tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex']
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
assert(is_opt_in(self.nodes[0], txid_3b))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
# Check gettransaction as well:
for n in self.nodes[0:2]:
assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown")
# After mining a transaction, it's no longer BIP125-replaceable
self.nodes[0].generate(1)
assert(txid_3b not in self.nodes[0].getrawmempool())
assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no")
assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
if __name__ == '__main__':
ListTransactionsTest().main()
|
var EventEmitter = require('events').EventEmitter;
var util = require('util');
var heartbeats = require('heartbeats');
var routeController = require('./lib/controllers/routeController');
var connectionController = require('./lib/controllers/connectionController');
var executionController = require('./lib/controllers/executionController');
var helper = require('./lib/utils/helper');
var parser = require('./lib/utils/parser');
var middleware = require('./lib/utils/middleware');
var heart;
util.inherits(Samsaara, EventEmitter);
function Samsaara() {
EventEmitter.call(this);
routeController.setParser(parser);
connectionController.initialize(this, null);
executionController.initialize(this);
middleware.initialize(this);
}
Samsaara.prototype.nameSpace = function(namespaceName) {
return executionController.nameSpace(namespaceName);
};
Samsaara.prototype.createNamespace = function(namespaceName, methods) {
return executionController.createNamespace(namespaceName, methods);
};
Samsaara.prototype.expose = function(set) {
return executionController.expose(set);
};
Samsaara.prototype.use = function(module, options) {
return middleware.use(module, options);
};
Samsaara.prototype.setState = function(state, cb) {
return this.core.setState(state, cb);
};
Samsaara.prototype.initialize = function(opts) {
opts = opts || {};
heart = heartbeats.createHeart(2000, 'samsaara');
connectionController.setTransport(opts.socketType || 'ws', true);
this.core = connectionController.newConnection(opts.socket, 'core');
middleware.load();
initializeClient(this, this.core, opts);
return this;
};
Object.defineProperty(Samsaara.prototype, 'state', {
get: function() {
return this.core ? this.core.state : null;
}
});
// Initialize client instance
function initializeClient(samsaara, core, opts) {
routeController.addRoute('INIT', initializationRouteHandler);
exposeStateHandler(samsaara);
}
// Route Handlers
function initializationRouteHandler(connection, headerbits, incomingPacket) {
var parsedPacket = parser.parsePacket(incomingPacket);
var connectionOwner;
var connectionRouteID;
var heartbeatInterval;
if (typeof parsedPacket === 'object') {
connectionOwner = parsedPacket.connectionOwner;
connectionRouteID = parsedPacket.connectionRouteID;
heartbeatInterval = parsedPacket.heartbeatInterval;
helper.addReadOnlyBaseProperty(connection, 'routeID', connectionOwner);
routeController.addRoute(connectionRouteID, executionRouteHandler);
connection.queue.emptyToRoute(connectionOwner);
setHeartbeats(connection, heartbeatInterval);
}
}
function executionRouteHandler(connection, headerbits, incomingPacket) {
var parsedPacket = parser.parsePacket(incomingPacket);
if (parsedPacket !== undefined && parsedPacket.func !== undefined) {
parsedPacket.sender = connection.id;
executionController.executeFunction(connection, connection, parsedPacket);
}
}
// State Change Handler
function exposeStateHandler(samsaara) {
samsaara.nameSpace('internal').expose({
setState: function(state, cb) {
var connection = this;
var attributeName;
for (attributeName in state) {
connection.state[attributeName] = state[attributeName];
}
samsaara.emit('stateChange', connection.state, connection);
cb(true);
},
initialized: function(success, cb) {
samsaara.emit('initialized', success);
if (typeof cb === 'function') {
cb(true);
}
}
});
}
// Heartbeats
function setHeartbeats(connection, heartbeatInterval) {
heart.setHeartrate(heartbeatInterval);
heart.createEvent(1, function() {
if (connection.outgoingPulse.missedBeats > 0) {
connection.socket.send('H');
}
});
}
module.exports = new Samsaara();
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2008-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import binascii
import os
import os.path
import pathlib
import platform
import shutil
import subprocess
import textwrap
import setuptools
from setuptools import extension as setuptools_extension
from setuptools.command import build_ext as setuptools_build_ext
from setuptools.command import develop as setuptools_develop
import distutils
from distutils.command import build as distutils_build
try:
import setuptools_rust
except ImportError:
setuptools_rust = None
from typing import List
CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<0.30.0)'
# Dependencies needed both at build- and run-time
COMMON_DEPS = [
'edgedb==0.22.0',
'parsing~=2.0',
]
RUNTIME_DEPS = [
'asyncpg~=0.25.0',
'httptools>=0.3.0',
'immutables>=0.16',
'uvloop~=0.16.0',
'click~=7.1',
'cryptography~=35.0',
'graphql-core~=3.1.5',
'psutil~=5.8',
'setproctitle~=1.2',
'wcwidth~=0.2',
] + COMMON_DEPS
DOCS_DEPS = [
'docutils~=0.17.0',
'lxml~=4.6.3',
'Pygments~=2.10.0',
'Sphinx~=4.2.0',
'sphinxcontrib-asyncio~=0.3.0',
]
TEST_DEPS = [
# Code QA
'black~=21.7b0',
'coverage~=5.5',
'flake8~=3.9.2',
'flake8-bugbear~=21.4.3',
'pycodestyle~=2.7.0',
'pyflakes~=2.3.1',
# Needed for test_docs_sphinx_ext
'requests-xml~=0.2.3',
# For rebuilding GHA workflows
'Jinja2~=2.11',
'MarkupSafe~=1.1',
'PyYAML~=5.4',
'mypy==0.941',
# mypy stub packages; when updating, you can use mypy --install-types
# to install stub packages and then pip freeze to read out the specifier
'types-click~=7.1',
'types-docutils~=0.17.0,<0.17.6', # incomplete nodes.document.__init__
'types-Jinja2~=2.11',
'types-MarkupSafe~=1.1',
'types-pkg-resources~=0.1.3',
'types-typed-ast~=1.4.2',
'types-requests~=2.25.6',
'prometheus_client~=0.11.0',
] + DOCS_DEPS
BUILD_DEPS = [
CYTHON_DEPENDENCY,
'packaging>=21.0',
'setuptools-rust~=0.12.1',
'wheel', # needed by PyYAML and immutables, refs pypa/pip#5865
] + COMMON_DEPS
RUST_VERSION = '1.53.0' # Also update docs/internal/dev.rst
EDGEDBCLI_REPO = 'https://github.com/edgedb/edgedb-cli'
# This can be a branch, tag, or commit
EDGEDBCLI_COMMIT = 'master'
EXTRA_DEPS = {
'test': TEST_DEPS,
'docs': DOCS_DEPS,
'build': BUILD_DEPS,
}
EXT_CFLAGS = ['-O2']
EXT_LDFLAGS: List[str] = []
ROOT_PATH = pathlib.Path(__file__).parent.resolve()
if platform.uname().system != 'Windows':
EXT_CFLAGS.extend([
'-std=c99', '-fsigned-char', '-Wall', '-Wsign-compare', '-Wconversion'
])
def _compile_parsers(build_lib, inplace=False):
import parsing
import edb.edgeql.parser.grammar.single as edgeql_spec
import edb.edgeql.parser.grammar.block as edgeql_spec2
import edb.edgeql.parser.grammar.sdldocument as schema_spec
for spec in (edgeql_spec, edgeql_spec2, schema_spec):
spec_path = pathlib.Path(spec.__file__).parent
subpath = pathlib.Path(str(spec_path)[len(str(ROOT_PATH)) + 1:])
pickle_name = spec.__name__.rpartition('.')[2] + '.pickle'
pickle_path = subpath / pickle_name
cache = build_lib / pickle_path
cache.parent.mkdir(parents=True, exist_ok=True)
parsing.Spec(spec, pickleFile=str(cache), verbose=True)
if inplace:
shutil.copy2(cache, ROOT_PATH / pickle_path)
def _compile_build_meta(build_lib, version, pg_config, runstate_dir,
shared_dir, version_suffix):
from edb.common import verutils
parsed_version = verutils.parse_version(version)
vertuple = list(parsed_version._asdict().values())
vertuple[2] = int(vertuple[2])
if version_suffix:
vertuple[4] = tuple(version_suffix.split('.'))
vertuple = tuple(vertuple)
pg_config_path = pathlib.Path(pg_config)
if not pg_config_path.is_absolute():
pg_config_path = f"_ROOT / {str(pg_config_path)!r}"
else:
pg_config_path = repr(str(pg_config_path))
if runstate_dir:
runstate_dir_path = pathlib.Path(runstate_dir)
if not runstate_dir_path.is_absolute():
runstate_dir_path = f"_ROOT / {str(runstate_dir_path)!r}"
else:
runstate_dir_path = repr(str(runstate_dir_path))
else:
runstate_dir_path = "None # default to <data-dir>"
shared_dir_path = pathlib.Path(shared_dir)
if not shared_dir_path.is_absolute():
shared_dir_path = f"_ROOT / {str(shared_dir_path)!r}"
else:
shared_dir_path = repr(str(shared_dir_path))
content = textwrap.dedent('''\
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2008-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
#
# THIS FILE HAS BEEN AUTOMATICALLY GENERATED.
#
import pathlib
_ROOT = pathlib.Path(__file__).parent
PG_CONFIG_PATH = {pg_config_path}
RUNSTATE_DIR = {runstate_dir_path}
SHARED_DATA_DIR = {shared_dir_path}
VERSION = {version!r}
''').format(
version=vertuple,
pg_config_path=pg_config_path,
runstate_dir_path=runstate_dir_path,
shared_dir_path=shared_dir_path,
)
directory = build_lib / 'edb'
if not directory.exists():
directory.mkdir(parents=True)
with open(directory / '_buildmeta.py', 'w+t') as f:
f.write(content)
def _get_env_with_openssl_flags():
env = dict(os.environ)
cflags = env.get('EDGEDB_BUILD_OPENSSL_CFLAGS')
ldflags = env.get('EDGEDB_BUILD_OPENSSL_LDFLAGS')
if not (cflags or ldflags) and platform.system() == 'Darwin':
try:
openssl_prefix = pathlib.Path(subprocess.check_output(
['brew', '--prefix', 'openssl'], text=True
).strip())
except (FileNotFoundError, subprocess.CalledProcessError):
openssl_prefix = None
else:
pc_path = str(openssl_prefix / 'lib' / 'pkgconfig')
if 'PKG_CONFIG_PATH' in env:
env['PKG_CONFIG_PATH'] += f':{pc_path}'
else:
env['PKG_CONFIG_PATH'] = pc_path
try:
cflags = subprocess.check_output(
['pkg-config', '--cflags', 'openssl'], text=True, env=env
).strip()
ldflags = subprocess.check_output(
['pkg-config', '--libs', 'openssl'], text=True, env=env
).strip()
except FileNotFoundError:
# pkg-config is not installed
if openssl_prefix:
cflags = f'-I{openssl_prefix / "include"!s}'
ldflags = f'-L{openssl_prefix / "lib"!s}'
else:
return env
except subprocess.CalledProcessError:
# Cannot find flags with pkg-config
return env
if cflags:
if 'CPPFLAGS' in env:
env['CPPFLAGS'] += f' {cflags}'
elif 'CFLAGS' in env:
env['CFLAGS'] += f' {cflags}'
else:
env['CPPFLAGS'] = cflags
if ldflags:
if 'LDFLAGS' in env:
env['LDFLAGS'] += f' {ldflags}'
else:
env['LDFLAGS'] = ldflags
return env
def _compile_postgres(build_base, *,
force_build=False, fresh_build=True,
run_configure=True, build_contrib=True):
proc = subprocess.run(
['git', 'submodule', 'status', 'postgres'],
stdout=subprocess.PIPE, universal_newlines=True, check=True)
status = proc.stdout
if status[0] == '-':
print('postgres submodule not initialized, '
'run `git submodule init; git submodule update`')
exit(1)
source_stamp = _get_pg_source_stamp()
postgres_build = (build_base / 'postgres').resolve()
postgres_src = ROOT_PATH / 'postgres'
postgres_build_stamp = postgres_build / 'stamp'
if postgres_build_stamp.exists():
with open(postgres_build_stamp, 'r') as f:
build_stamp = f.read()
else:
build_stamp = None
is_outdated = source_stamp != build_stamp
if is_outdated or force_build:
system = platform.system()
if system == 'Darwin':
uuidlib = 'e2fs'
elif system == 'Linux':
uuidlib = 'e2fs'
else:
raise NotImplementedError('unsupported system: {}'.format(system))
if fresh_build and postgres_build.exists():
shutil.rmtree(postgres_build)
build_dir = postgres_build / 'build'
if not build_dir.exists():
build_dir.mkdir(parents=True)
if run_configure or fresh_build or is_outdated:
env = _get_env_with_openssl_flags()
subprocess.run([
str(postgres_src / 'configure'),
'--prefix=' + str(postgres_build / 'install'),
'--with-openssl',
'--with-uuid=' + uuidlib,
], check=True, cwd=str(build_dir), env=env)
subprocess.run(
['make', 'MAKELEVEL=0', '-j', str(max(os.cpu_count() - 1, 1))],
cwd=str(build_dir), check=True)
if build_contrib or fresh_build or is_outdated:
subprocess.run(
[
'make', '-C', 'contrib', 'MAKELEVEL=0', '-j',
str(max(os.cpu_count() - 1, 1))
],
cwd=str(build_dir), check=True)
subprocess.run(
['make', 'MAKELEVEL=0', 'install'],
cwd=str(build_dir), check=True)
if build_contrib or fresh_build or is_outdated:
subprocess.run(
['make', '-C', 'contrib', 'MAKELEVEL=0', 'install'],
cwd=str(build_dir), check=True)
with open(postgres_build_stamp, 'w') as f:
f.write(source_stamp)
def _check_rust():
import packaging.version
try:
rustc_ver = (
subprocess.check_output(["rustc", '-V'], text=True).split()[1]
.rstrip("-nightly")
)
if (
packaging.version.parse(rustc_ver)
< packaging.version.parse(RUST_VERSION)
):
raise RuntimeError(
f'please upgrade Rust to {RUST_VERSION} to compile '
f'edgedb from source')
except FileNotFoundError:
raise RuntimeError(
f'please install rustc >= {RUST_VERSION} to compile '
f'edgedb from source (see https://rustup.rs/)')
def _get_edgedbcli_rev(name):
output = subprocess.check_output(
['git', 'ls-remote', EDGEDBCLI_REPO, name],
universal_newlines=True,
).strip()
if not output:
return None
rev, _ = output.split()
return rev
def _get_pg_source_stamp():
output = subprocess.check_output(
['git', 'submodule', 'status', 'postgres'], universal_newlines=True,
)
revision, _, _ = output[1:].partition(' ')
# I don't know why we needed the first empty char, but we don't want to
# force everyone to rebuild postgres either
source_stamp = output[0] + revision
return source_stamp
def _compile_cli(build_base, build_temp):
_check_rust()
rust_root = build_base / 'cli'
env = dict(os.environ)
env['CARGO_TARGET_DIR'] = str(build_temp / 'rust' / 'cli')
env['PSQL_DEFAULT_PATH'] = build_base / 'postgres' / 'install' / 'bin'
git_name = env.get("EDGEDBCLI_GIT_REV")
if not git_name:
git_name = EDGEDBCLI_COMMIT
# The name can be a branch or tag, so we attempt to look it up
# with ls-remote. If we don't find anything, we assume it's a
# commit hash.
git_rev = _get_edgedbcli_rev(git_name)
if not git_rev:
git_rev = git_name
subprocess.run(
[
'cargo', 'install',
'--verbose', '--verbose',
'--git', EDGEDBCLI_REPO,
'--rev', git_rev,
'--bin', 'edgedb',
'--root', rust_root,
'--features=dev_mode',
'--locked',
'--debug',
],
env=env,
check=True,
)
cli_dest = ROOT_PATH / 'edb' / 'cli' / 'edgedb'
# Delete the target first, to avoid "Text file busy" errors during
# the copy if the CLI is currently running.
try:
cli_dest.unlink()
except FileNotFoundError:
pass
shutil.copy(
rust_root / 'bin' / 'edgedb',
cli_dest,
)
def _build_ui(build_base, build_temp):
from edb import buildmeta
ui_root = build_base / 'edgedb-studio'
if not ui_root.exists():
subprocess.run(
[
'git',
'clone',
'https://github.com/edgedb/edgedb-studio.git',
ui_root,
],
check=True
)
else:
subprocess.run(
['git', 'pull'],
check=True,
cwd=ui_root,
)
dest = buildmeta.get_shared_data_dir_path() / 'ui'
if dest.exists():
shutil.rmtree(dest)
# install deps
subprocess.run(['yarn'], check=True, cwd=ui_root)
# run build
env = dict(os.environ)
# With CI=true (set in GH CI) `yarn build` fails if there are any
# warnings. We don't need this check in our build so we're disabling
# this behavior.
env['CI'] = ''
subprocess.run(
['yarn', 'build'],
check=True,
cwd=ui_root / 'web',
env=env
)
shutil.copytree(ui_root / 'web' / 'build', dest)
class build(distutils_build.build):
user_options = distutils_build.build.user_options + [
('pg-config=', None, 'path to pg_config to use with this build'),
('runstatedir=', None, 'directory to use for the runtime state'),
('shared-dir=', None, 'directory to use for shared data'),
('version-suffix=', None, 'dot-separated local version suffix'),
]
def initialize_options(self):
super().initialize_options()
self.pg_config = None
self.runstatedir = None
self.shared_dir = None
self.version_suffix = None
def finalize_options(self):
super().finalize_options()
if self.pg_config is None:
self.pg_config = os.environ.get("EDGEDB_BUILD_PG_CONFIG")
if self.runstatedir is None:
self.runstatedir = os.environ.get("EDGEDB_BUILD_RUNSTATEDIR")
if self.shared_dir is None:
self.shared_dir = os.environ.get("EDGEDB_BUILD_SHARED_DIR")
if self.version_suffix is None:
self.version_suffix = os.environ.get("EDGEDB_BUILD_VERSION_SUFFIX")
def run(self, *args, **kwargs):
super().run(*args, **kwargs)
build_lib = pathlib.Path(self.build_lib)
_compile_parsers(build_lib)
if (
self.pg_config
or self.runstatedir
or self.shared_dir
or self.version_suffix
):
_compile_build_meta(
build_lib,
self.distribution.metadata.version,
self.pg_config,
self.runstatedir,
self.shared_dir,
self.version_suffix,
)
class develop(setuptools_develop.develop):
def run(self, *args, **kwargs):
from edb import buildmeta
from edb.common import devmode
try:
buildmeta.get_build_metadata_value("SHARED_DATA_DIR")
except buildmeta.MetadataError:
# buildmeta path resolution needs this
devmode.enable_dev_mode()
build = self.get_finalized_command('build')
build_temp = pathlib.Path(build.build_temp).resolve()
build_base = pathlib.Path(build.build_base).resolve()
_compile_cli(build_base, build_temp)
scripts = self.distribution.entry_points['console_scripts']
patched_scripts = []
for s in scripts:
s = f'{s}_dev'
patched_scripts.append(s)
patched_scripts.append('edb = edb.tools.edb:edbcommands')
patched_scripts.append('edgedb = edb.cli:rustcli')
self.distribution.entry_points['console_scripts'] = patched_scripts
super().run(*args, **kwargs)
_compile_parsers(build_base / 'lib', inplace=True)
_compile_postgres(build_base)
_build_ui(build_base, build_temp)
class ci_helper(setuptools.Command):
description = "echo specified hash or build info for CI"
user_options = [
('type=', None,
'one of: cli, rust, ext, parsers, postgres, bootstrap, '
'build_temp, build_lib'),
]
def run(self):
import edb as _edb
from edb.buildmeta import hash_dirs, get_cache_src_dirs
build = self.get_finalized_command('build')
pkg_dir = pathlib.Path(_edb.__path__[0])
if self.type == 'parsers':
parser_hash = hash_dirs(
[(pkg_dir / 'edgeql/parser/grammar', '.py')],
extra_files=[pkg_dir / 'edgeql-parser/src/keywords.rs'],
)
print(binascii.hexlify(parser_hash).decode())
elif self.type == 'postgres':
print(_get_pg_source_stamp().strip())
elif self.type == 'bootstrap':
bootstrap_hash = hash_dirs(
get_cache_src_dirs(),
extra_files=[pkg_dir / 'server/bootstrap.py'],
)
print(binascii.hexlify(bootstrap_hash).decode())
elif self.type == 'rust':
rust_hash = hash_dirs([
(pkg_dir / 'edgeql-parser', '.rs'),
(pkg_dir / 'edgeql-rust', '.rs'),
(pkg_dir / 'graphql-rewrite', '.rs'),
], extra_files=[
pkg_dir / 'edgeql-parser/Cargo.toml',
pkg_dir / 'edgeql-rust/Cargo.toml',
pkg_dir / 'graphql-rewrite/Cargo.toml',
])
print(binascii.hexlify(rust_hash).decode())
elif self.type == 'ext':
ext_hash = hash_dirs([
(pkg_dir, '.pyx'),
(pkg_dir, '.pyi'),
(pkg_dir, '.pxd'),
(pkg_dir, '.pxi'),
])
print(binascii.hexlify(ext_hash).decode())
elif self.type == 'cli':
print(_get_edgedbcli_rev(EDGEDBCLI_COMMIT) or EDGEDBCLI_COMMIT)
elif self.type == 'build_temp':
print(pathlib.Path(build.build_temp).resolve())
elif self.type == 'build_lib':
print(pathlib.Path(build.build_lib).resolve())
else:
raise RuntimeError(
f'Illegal --type={self.type}; can only be: '
'cli, rust, ext, postgres, bootstrap, parsers,'
'build_temp or build_lib'
)
def initialize_options(self):
self.type = None
def finalize_options(self):
pass
class build_postgres(setuptools.Command):
description = "build postgres"
user_options = [
('configure', None, 'run ./configure'),
('build-contrib', None, 'build contrib'),
('fresh-build', None, 'rebuild from scratch'),
]
def initialize_options(self):
self.configure = False
self.build_contrib = False
self.fresh_build = False
def finalize_options(self):
pass
def run(self, *args, **kwargs):
build = self.get_finalized_command('build')
_compile_postgres(
pathlib.Path(build.build_base).resolve(),
force_build=True,
fresh_build=self.fresh_build,
run_configure=self.configure,
build_contrib=self.build_contrib)
class build_ext(setuptools_build_ext.build_ext):
user_options = setuptools_build_ext.build_ext.user_options + [
('cython-annotate', None,
'Produce a colorized HTML version of the Cython source.'),
('cython-directives=', None,
'Cython compiler directives'),
]
def initialize_options(self):
# initialize_options() may be called multiple times on the
# same command object, so make sure not to override previously
# set options.
if getattr(self, '_initialized', False):
return
super(build_ext, self).initialize_options()
if os.environ.get('EDGEDB_DEBUG'):
self.cython_always = True
self.cython_annotate = True
self.cython_directives = "linetrace=True"
self.define = 'PG_DEBUG,CYTHON_TRACE,CYTHON_TRACE_NOGIL'
self.debug = True
else:
self.cython_always = False
self.cython_annotate = None
self.cython_directives = None
self.debug = False
self.build_mode = os.environ.get('BUILD_EXT_MODE', 'both')
def finalize_options(self):
# finalize_options() may be called multiple times on the
# same command object, so make sure not to override previously
# set options.
if getattr(self, '_initialized', False):
return
if self.build_mode not in {'both', 'py-only', 'rust-only', 'skip'}:
raise RuntimeError(f'Illegal BUILD_EXT_MODE={self.build_mode}; '
f'can only be "both", "py-only" or "skip".')
if self.build_mode not in {'both', 'py-only'}:
super(build_ext, self).finalize_options()
return
import pkg_resources
# Double check Cython presence in case setup_requires
# didn't go into effect (most likely because someone
# imported Cython before setup_requires injected the
# correct egg into sys.path.
try:
import Cython
except ImportError:
raise RuntimeError(
'please install {} to compile edgedb from source'.format(
CYTHON_DEPENDENCY))
cython_dep = pkg_resources.Requirement.parse(CYTHON_DEPENDENCY)
if Cython.__version__ not in cython_dep:
raise RuntimeError(
'edgedb requires {}, got Cython=={}'.format(
CYTHON_DEPENDENCY, Cython.__version__
))
from Cython.Build import cythonize
directives = {
'language_level': '3'
}
if self.cython_directives:
for directive in self.cython_directives.split(','):
k, _, v = directive.partition('=')
if v.lower() == 'false':
v = False
if v.lower() == 'true':
v = True
directives[k] = v
self.distribution.ext_modules[:] = cythonize(
self.distribution.ext_modules,
compiler_directives=directives,
annotate=self.cython_annotate,
include_path=["edb/server/pgproto/"])
super(build_ext, self).finalize_options()
def run(self):
if self.build_mode != 'skip':
super().run()
else:
distutils.log.info(f'Skipping build_ext because '
f'BUILD_EXT_MODE={self.build_mode}')
class build_cli(setuptools.Command):
description = "build the EdgeDB CLI"
user_options: List[str] = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self, *args, **kwargs):
build = self.get_finalized_command('build')
_compile_cli(
pathlib.Path(build.build_base).resolve(),
pathlib.Path(build.build_temp).resolve(),
)
class build_ui(setuptools.Command):
description = "build EdgeDB UI"
user_options: List[str] = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self, *args, **kwargs):
from edb import buildmeta
from edb.common import devmode
try:
buildmeta.get_build_metadata_value("SHARED_DATA_DIR")
except buildmeta.MetadataError:
# buildmeta path resolution needs this
devmode.enable_dev_mode()
build = self.get_finalized_command('build')
_build_ui(
pathlib.Path(build.build_base).resolve(),
pathlib.Path(build.build_temp).resolve(),
)
class build_parsers(setuptools.Command):
description = "build the parsers"
user_options = [
('inplace', None,
'ignore build-lib and put compiled parsers into the source directory '
'alongside your pure Python modules')]
def initialize_options(self):
self.inplace = None
def finalize_options(self):
pass
def run(self, *args, **kwargs):
build = self.get_finalized_command('build')
if self.inplace:
build_base = pathlib.Path(build.build_base).resolve()
_compile_parsers(build_base / 'lib', inplace=True)
else:
build_lib = pathlib.Path(build.build_lib)
_compile_parsers(build_lib)
COMMAND_CLASSES = {
'build': build,
'build_ext': build_ext,
'develop': develop,
'build_postgres': build_postgres,
'build_cli': build_cli,
'build_parsers': build_parsers,
'build_ui': build_ui,
'ci_helper': ci_helper,
}
if setuptools_rust is not None:
rust_extensions = [
setuptools_rust.RustExtension(
"edb._edgeql_rust",
path="edb/edgeql-rust/Cargo.toml",
binding=setuptools_rust.Binding.RustCPython,
),
setuptools_rust.RustExtension(
"edb._graphql_rewrite",
path="edb/graphql-rewrite/Cargo.toml",
binding=setuptools_rust.Binding.RustCPython,
),
]
class build_rust(setuptools_rust.build.build_rust):
def run(self):
_check_rust()
build_ext = self.get_finalized_command("build_ext")
if build_ext.build_mode not in {'both', 'rust-only'}:
distutils.log.info(f'Skipping build_rust because '
f'BUILD_EXT_MODE={build_ext.build_mode}')
return
self.plat_name = build_ext.plat_name
copy_list = []
if not build_ext.inplace:
for ext in self.distribution.rust_extensions:
# Always build in-place because later stages of the build
# may depend on the modules having been built
dylib_path = pathlib.Path(
build_ext.get_ext_fullpath(ext.name))
build_ext.inplace = True
target_path = pathlib.Path(
build_ext.get_ext_fullpath(ext.name))
build_ext.inplace = False
copy_list.append((dylib_path, target_path))
# Workaround a bug in setuptools-rust: it uses
# shutil.copyfile(), which is not safe w.r.t mmap,
# so if the target module has been previously loaded
# bad things will happen.
if target_path.exists():
target_path.unlink()
target_path.parent.mkdir(parents=True, exist_ok=True)
os.environ['CARGO_TARGET_DIR'] = str(
pathlib.Path(build_ext.build_temp) / 'rust' / 'extensions',
)
super().run()
for src, dst in copy_list:
shutil.copyfile(src, dst)
COMMAND_CLASSES['build_rust'] = build_rust
else:
rust_extensions = []
def _version():
from edb import buildmeta
return buildmeta.get_version_from_scm(ROOT_PATH)
setuptools.setup(
version=_version(),
setup_requires=BUILD_DEPS,
python_requires='>=3.9.0',
name='edgedb-server',
description='EdgeDB Server',
author='MagicStack Inc.',
author_email='hello@magic.io',
packages=['edb'],
include_package_data=True,
cmdclass=COMMAND_CLASSES,
entry_points={
'console_scripts': [
'edgedb-server = edb.server.main:main',
],
},
ext_modules=[
setuptools_extension.Extension(
"edb.server.cache.stmt_cache",
["edb/server/cache/stmt_cache.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.protocol.protocol",
["edb/protocol/protocol.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.pgproto.pgproto",
["edb/server/pgproto/pgproto.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.dbview.dbview",
["edb/server/dbview/dbview.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.protocol.binary",
["edb/server/protocol/binary.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.protocol.notebook_ext",
["edb/server/protocol/notebook_ext.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.protocol.ui_ext",
["edb/server/protocol/ui_ext.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.protocol.edgeql_ext",
["edb/server/protocol/edgeql_ext.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.protocol.protocol",
["edb/server/protocol/protocol.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.server.pgcon.pgcon",
["edb/server/pgcon/pgcon.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
setuptools_extension.Extension(
"edb.graphql.extension",
["edb/graphql/extension.pyx"],
extra_compile_args=EXT_CFLAGS,
extra_link_args=EXT_LDFLAGS),
],
rust_extensions=rust_extensions,
install_requires=RUNTIME_DEPS,
extras_require=EXTRA_DEPS,
)
|
import argparse
import git
from .utils import github_url_to_owner_repo
from autorelease import GitHubUser, ProjectOptions
class AutoreleaseParsingHelper(object):
def __init__(self, parser=None, disable_defaults=False):
if parser is None:
parser = argparse.ArgumentParser()
self.parser = parser
self.parser.add_argument('-q', '--quiet', action='store_true')
self.make_objects = []
def add_github_parsing(self):
self.parser.add_argument('-u', '--username', type=str,
help='GitHub username')
self.parser.add_argument('--token', type=str,
help='authorization token')
self.make_objects.append(GitHubUser)
def add_project_parsing(self):
self.parser.add_argument('--repo_owner', type=str)
self.parser.add_argument('--repo_name', type=str)
self.parser.add_argument('--project_name', type=str)
self.make_objects.append(ProjectOptions)
def add_repo_parsing(self):
self.parser.add_argument('--repo', type=str, default='.')
def parse_args(self, args=None):
opts = self.parser.parse_args(args=args)
return AutoreleaseParsedArguments(opts, self.make_objects)
class AutoreleaseParsedArguments(object):
def __init__(self, opts, make_objects):
self.opts = opts
self.make_objects = make_objects
self._upstream = None
self._origin = None
self._github_user = None
self._project = None
self._repo = None
def __getattr__(self, name):
return getattr(self.opts, name)
def _remotes(self, internal):
if internal is None and self.repo is not None:
self.set_upstream_origin()
@property
def upstream(self):
self._remotes(self._upstream)
return self._upstream
@property
def origin(self):
self._remotes(self._origin)
return self._origin
def set_upstream_origin(self, repo=None):
if repo is None:
repo = self.repo
# input is git.Repo object
upstream = [r for r in repo.remotes if r.name == 'upstream']
origin = [r for r in repo.remotes if r.name == 'origin']
if not upstream:
upstream = origin
if upstream == origin == []:
raise RuntimeError("Can't guess data for this repository")
assert len(upstream) == len(origin) == 1
self.upstream = upstream[0]
self.origin = origin[0]
def guess_project(self):
guess = {k: None
for k in ['repo_owner', 'repo_name', 'project_name']}
if self.repo is not None:
(owner, name) = github_url_to_owner_repo(self.upstream.url)
guess = {'repo_owner': owner,
'repo_name': name,
'project_name': name}
return guess
def guess_github_user(self):
guess = {k: None for k in ['username', 'token']}
if self.repo is not None:
(user, _) = github_url_to_owner_repo(self.origin.url)
guess.update({'username': user})
return guess
@property
def repo(self):
if self._repo is None and hasattr(self.opts, 'repo'):
self._repo = git.Repo(self.opts.repo)
self._repo.remotes.origin.fetch()
return self._repo
@property
def github_user(self):
if self._github_user is None and GitHubUser in self.make_objects:
self._github_user = self._make_object(GitHubUser,
self.guess_github_user,
opts=self.opts)
return self._github_user
@property
def project(self):
if self._project is None and ProjectOptions in self.make_objects:
self._project = self._make_object(ProjectOptions,
self.guess_project,
self.opts)
return self._project
@staticmethod
def _make_object(obj_cls, guesser, opts):
# here's an abstract function....
if opts is None:
raise RuntimeError(
"Can't retrieve object before parsing arguments. "
+ "Run AutoreleaseParsingHelper.parse_args() first.")
kwargs = guesser()
opts_dct = vars(opts)
kwargs_opts = {k: opts_dct[k] for k in kwargs
if opts_dct[k] is not None}
kwargs.update(kwargs_opts)
kwargs = {k: v for (k, v) in kwargs.items() if v is not None}
try:
return obj_cls(**kwargs)
except TypeError:
return None
# alternate: this is stricter and requires all values non-None
#if any([v is None for v in kwargs.values()]):
#return None
#else:
#return obj_cls(**kwargs)
|
import presence from "../../utils/presences.js";
import tempmute from "../../utils/tempmute.js";
import poll from "../../utils/poll.js";
import banners from '../../utils/banner.js';
import MessageModel2 from '../../database/models/mutedmembers.js';
import MessageModel3 from '../../database/models/poll.js';
import discordbl from '../../utils/discordbotlist.js';
//Start ready event
export default async bot => {
//Intervals
setInterval(presence, 900000, bot);
if (process.env.EXTERNAL === "yes") {
discordbl(bot);
setInterval(discordbl, 1800000, bot);
}
//"De-restriction" function once the penalty time has expired
const doc = await MessageModel2.findOne();
if (doc) {
tempmute(bot);
}
//Polls have a limit, with this we edit them so that they mark "Poll completed"
const doc2 = await MessageModel3.findOne();
if (doc2) {
poll(bot);
}
if (bot.guilds.cache.get("402555684849451028")) banners(bot);
//Show the inviter on the welcome message. Luckily, fetch invites do not have a rate-limit
try {
const guildsToFetch = bot.guilds.cache.filter(e => e.me.hasPermission("MANAGE_GUILD")).array();
for (const guild of guildsToFetch) {
guild.inviteCount = await guild.getInviteCount().catch(err => {
console.log(err);
return {};
});
}
} catch (err) {
console.error(`In shard ${bot.shard.id || bot.shard.ids[0]} there was an error fetching invites.`)
}
//All internal operations ended
presence(bot);
console.log(`Gidget is alive! Version ${bot.botVersion} from shard ${bot.shard.id || bot.shard.ids[0]}`);
};
|
from authentication.account.models import EmailAddress
from authentication.socialaccount import providers
from authentication.socialaccount.providers.base import AuthAction, ProviderAccount
from authentication.socialaccount.providers.oauth2.provider import OAuth2Provider
class SalesforceAccount(ProviderAccount):
def get_profile_url(self):
return self.account.extra_data.get('link')
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
dflt = super(SalesforceAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class SalesforceProvider(OAuth2Provider):
id = 'salesforce'
name = 'Salesforce'
package = 'authentication.socialaccount.providers.salesforce'
account_class = SalesforceAccount
def get_default_scope(self):
return ['id', 'openid']
def get_auth_params(self, request, action):
ret = super(SalesforceProvider, self).get_auth_params(request, action)
if action == AuthAction.REAUTHENTICATE:
ret['approval_prompt'] = 'force'
return ret
def extract_uid(self, data):
return str(data['user_id'])
def extract_common_fields(self, data):
return dict(email=data.get('email'),
last_name=data.get('family_name'),
first_name=data.get('given_name'),
username=data.get('preferred_username'))
def extract_email_addresses(self, data):
# a salesforce user must have an email, but it might not be verified
email = EmailAddress(email=data.get('email'),
primary=True,
verified=data.get('email_verified'))
return [email]
providers.registry.register(SalesforceProvider)
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_BWTest_ExampleVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_BWTest_ExampleVersionString[];
|
# 1. 导包
from celery import Celery
# 2. 导入 项目 环境
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meiduo_mall.settings.dev")
# 3. 实例
app = Celery('celer_tasks')
# 4. 加载消息队列的配置
app.config_from_object('celery_tasks.config')
# 5. 自动查找 任务
app.autodiscover_tasks(['celery_tasks.sms','celery_tasks.email'])
|
#!/usr/bin/env python3
"""Common functions needed for warmups."""
from math import sqrt
from functools import reduce
def prime(n):
"""
Return True if x is prime, False otherwise.
:param n: integer n
:return: True or False
"""
if n == 2:
return True
if n % 2 == 0:
return False
if len(factors(n)) == 2:
return True
else:
return False
def factors(n):
"""
Find the set of factors for n.
:param n: integer n
:return: set of factors.
"""
n_factors = ([i, n//i]
for i in range(1, int(sqrt(n) + 1)) if n % i == 0)
return set(reduce(list.__add__, n_factors))
|
macDetailCallback("0018a6000000/24",[{"d":"2006-06-13","t":"add","a":"118 N. Howard St. #72\nBaltimore MD 21201\n\n","c":"UNITED STATES","o":"Persistent Systems, LLC"},{"d":"2015-08-27","t":"change","a":"118 N. Howard St. #72 Baltimore MD US 21201","c":"US","o":"Persistent Systems, LLC"}]);
|