repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
Matt-Crow/SmallPythonPrograms
|
torchlight2statCalc.py
|
<reponame>Matt-Crow/SmallPythonPrograms<filename>torchlight2statCalc.py
# Credit, stat calculations: torchlight.wikia.com/wiki/Stats_(T2)
# todo: HP and Mana
def perc(num):
"""
Returns num as a percentage:
Example:
perc(0.05) returns
"5%"
"""
return str(num * 100) + '%'
def calcWeaponBonus(strength):
return strength * 0.005
def calcCritMult(strength):
mult = 1.5
mult += 0.004 * strength
if mult > 4.5:
mult = 4.5
return mult
def calcCritChance(dexterity):
chance = 0
chance += ((0.002002 - 0.000002 * dexterity) * dexterity)
if chance > 0.5:
chance = 0.5
return chance
# n/d
def calcDodgeChance(dexterity):
chance = 0
chance += ((0.002002 - 0.000002 * dexterity) * dexterity) # guess
if chance > 0.75:
chance = 0.75
return chance
#n/d
def calcFumblePen(dexterity):
return 0
def calcEleBonus(focus):
return focus * 0.005
#n/d. Cap?
def calcExecuteChance(focus):
# chance to attack with two weapons at the same time
chance = 0.098
chance += ((0.002002 - 0.000002 * focus) * focus)
return chance
def calcArmorBonus(vitality):
return vitality * 0.0025
def calcBlockChance(vitality):
chance = ((0.002002 - 0.000002 * vitality) * vitality)
if chance > 0.75:
chance = 0.75
return chance
statFunctions = {
"Weapon Bonus": calcWeaponBonus,
"Critical Hit Multiplier": calcCritMult,
"Critical Hit Chance": calcCritChance,
"Dodge Chance": calcDodgeChance,
"Fumble Penalty": calcFumblePen,
"Elemental Damage Bonus": calcEleBonus,
"Execute Chance": calcExecuteChance,
"Armor Bonus": calcArmorBonus,
"Block Chance": calcBlockChance
}
def calcForValue(functionName, value):
"""
Calculates how many stat points
you would need to invest for
functionName to equal value
"""
statPoints = 0
f = statFunctions[functionName]
while f(statPoints) < value and statPoints < 1000:
statPoints += 1
print('\n')
print("In order to have a " + functionName)
print("value of " + perc(value))
print("you would have to invest " + str(statPoints))
print("into its respective stat.")
print('\n')
return statPoints
def askStatAndValue():
statList = []
for stat in statFunctions.keys():
statList.append(stat)
inpStat = -1
while inpStat < 0 or inpStat >= len(statList):
for i in range(0, len(statList)):
print("#" + str(i) + ": " + statList[i])
inpStat = raw_input("Enter the number next to the stat you want to calculate:")
try:
inpStat = int(float(inpStat))
except:
inpStat = -1
inpVal = -1
while inpVal < 0:
inpVal = raw_input("Enter the desired value for that stat: (for example, use 50 for 50%)")
try:
inpVal = int(float(inpVal))
except:
inpVal = -1
calcForValue(statList[inpStat], float(inpVal) / 100)
def askAndCalcStats():
statNames = ("strength", "dexterity", "focus", "vitality")
inp = [0, 0, 0, 0]
for i in range(0, 4):
inp[i] = float(raw_input("Enter " + statNames[i] + ":"))
displayAllStats(inp[0], inp[1], inp[2], inp[3])
#n/d
def displayAllStats(s, d, f, v):
print("Given stats:")
print("Strength: " + str(s))
print("Dexterity: " + str(d))
print("Focus: " + str(f))
print("Vitality: " + str(v))
print(" ")
print("Weapon damage bonus: " + perc(calcWeaponBonus(s)))
print("Critical hit multiplier: " + perc(calcCritMult(s)))
print("Critical hit chance: " + perc(calcCritChance(d)))
print("Dodge chance: " + perc(calcDodgeChance(d)))
print("Fumble penalty: " + "TODO")
print("Magic damage bonus: " + perc(calcEleBonus(f)))
print("Execute chance: " + perc(calcExecuteChance(f)))
print("Armor bonus: " + perc(calcArmorBonus(v)))
print("Block chance: " + perc(calcBlockChance(v)))
def run():
inp = -2
while inp != -1:
print("OPTIONS:")
print("Enter -1 to quit")
print("Enter 0 to calculate stats based on attribute values")
print("Enter 1 to calculate attribute values needed for a stat value")
inp = raw_input("Type your answer and press enter/return")
if inp == "0":
askAndCalcStats()
elif inp == "1":
askStatAndValue()
else:
print("GLEHH!")
inp = -1
run()
|
v-yves-es/jdpe2
|
src/main/java/chapter10/AbstractVehicleOption.java
|
/*
* Java Design Pattern Essentials - Second Edition, by <NAME>
* Copyright 2012, Ability First Limited
*
* This source code is provided to accompany the book and is provided AS-IS without warranty of any kind.
* It is intended for educational and illustrative purposes only, and may not be re-published
* without the express written permission of the publisher.
*/
package chapter10;
public abstract class AbstractVehicleOption extends AbstractVehicle {
protected Vehicle decoratedVehicle;
public AbstractVehicleOption(Vehicle vehicle) {
super(vehicle.getEngine(), vehicle.getColour());
decoratedVehicle = vehicle;
}
}
|
galin-kostadinov/Software-Engineering
|
C++/Programming Basics with C++/13. Exercise - Nested loops/simple_task/NumberPyramid.cpp
|
<gh_stars>1-10
#include <iostream>
using namespace std;
int main() {
int n;
cin >> n;
int counter = 0;
for (int rows = 1; rows <= n; rows++) {
for (int columns = 1; columns <= rows; columns++) {
++counter;
cout << counter << " ";
if (counter == n) {
break;
}
}
cout << endl;
if (counter == n) {
break;
}
}
return 0;
}
|
skystebnicki/chameleon
|
test/unit/icons/font/SubArrowLefticon.test.js
|
import React from 'react';
import ReactShallowRenderer from 'react-test-renderer/shallow';
import SubArrowLeftIcon from 'chamel/icons/font/SubArrowLeftIcon';
/**
* Test rendering the SubArrowLefticon
*/
describe("SubArrowLeftIcon Component", () => {
// Basic validation that render works in edit mode and returns children
it("Should render", () => {
const renderer = new ReactShallowRenderer();
const renderedDocument = renderer.render(
<SubArrowLeftIcon/>
);
expect(renderedDocument.props.children).toBe('subdirectory_arrow_left');
});
});
|
SoldierAb/k-view-next
|
build/webpack.dev.js
|
const path = require('path')
const { merge } = require('webpack-merge')
const aseConf = require('./webpack.base')
module.exports = merge(aseConf, {
entry: {
main: path.resolve(__dirname, '../site/pages/dev/main.js')
}
})
|
jsalt2019-diadet/hyperion
|
hyperion/bin/segments-to-bin-vad.py
|
#!/usr/bin/env python
# Copyright 2019 Johns Hopkins University (Author: <NAME>)
# Apache 2.0.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from six.moves import xrange
import sys
import os
import argparse
import time
import logging
import numpy as np
import pandas as pd
from hyperion.hyp_defs import config_logger
from hyperion.utils import SegmentList
from hyperion.io import DataWriterFactory as DWF
def segments_to_bin_vad(segments_file, num_frames_file, frame_shift, output_path, part_idx, num_parts):
num_frames = None
if num_frames_file is not None:
utt2num_frames = pd.read_csv(num_frames_file, sep='\s+', header=None, names=['file_id','num_frames'], index_col=0)
segments = SegmentList.load(segments_file)
if num_parts > 1:
segments = segments.split(part_idx, num_parts)
with DWF.create(output_path) as writer:
for file_id in segments.uniq_file_id:
logging.info('processing VAD for %s' % (file_id))
if num_frames_file is not None:
num_frames = int(utt2num_frames.loc[file_id]['num_frames'])
vad = segments.to_bin_vad(file_id, frame_shift=frame_shift, num_frames=num_frames)
num_speech_frames = np.sum(vad)
logging.info('for %s detected %d/%d (%.2f %%) speech frames' % (
file_id, num_speech_frames, num_frames, num_speech_frames/num_frames*100))
writer.write(file_id, vad)
if __name__ == "__main__":
parser=argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
fromfile_prefix_chars='@',
description='Segments file to binary vad')
parser.add_argument('--segments', dest='segments_file', required=True,
help='kaldi format segments file')
parser.add_argument('--num-frames', dest='num_frames_file', default=None,
help='num. frames in feature matrix')
parser.add_argument('--frame-shift', dest='frame_shift', default=10, type=float,
help='frame shift of feature matrix in ms.')
parser.add_argument('--output-path', dest='output_path', required=True,
help='wspecifier for binary vad file')
parser.add_argument('--part-idx', dest='part_idx', type=int, default=1,
help=('splits the list of files in num-parts and process part_idx'))
parser.add_argument('--num-parts', dest='num_parts', type=int, default=1,
help=('splits the list of files in num-parts and process part_idx'))
parser.add_argument('-v', '--verbose', dest='verbose', default=1, choices=[0, 1, 2, 3], type=int,
help='Verbose level')
args=parser.parse_args()
config_logger(args.verbose)
del args.verbose
logging.debug(args)
segments_to_bin_vad(**vars(args))
|
VincentWei/mgallery
|
src/ebook_display.c
|
<reponame>VincentWei/mgallery
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <string.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <assert.h>
#include <minigui/common.h>
#include <minigui/minigui.h>
#include <minigui/gdi.h>
#include <minigui/window.h>
#include <minigui/control.h>
#include <minigui/mgext.h>
#include <minigui/mywindows.h>
#include <minigui/filedlg.h>
#include "../include/pmp.h"
#include "../include/text_id.h"
#include "ebook_scrolled.h"
#include "ebook_text.h"
#include "ebook_scrollview_impl.h"
#include "ebook_textedit_impl.h"
#include "ebook_listmodel.h"
#define ID_TIMER 100
#define ptescr (&ptedata->svdata.scrdata)
#define MIID_BASE_EBOOK 1
typedef enum
{
AUTO_PAGE = 0,
PAGE_TIME,
MARK_TAG,
}menu_item_t;
typedef struct{
BOOL auto_page;
int page_time;
}book_status_t;
extern struct msg_domain *pmp_domains[];
extern int pmp_ebook_start_read(char* file_name, LOGFONT* font, int* line);
extern void pmp_ebook_end_read(void);
extern char* pmp_ebook_pre_read(void);
extern char* pmp_ebook_next_read(void);
extern char* pmp_ebook_cur_read(void);
extern int parse_file_to_blk(LOGFONT* font);
extern void pmp_ebook_tag(int line);
//extern int select_page_time(HWND h_book);
//static BOOL open_file(HWND hMLEditWnd, char* filefullname);
BOOL is_scroll_top(HWND hwnd);
BOOL is_scroll_bottom(HWND hwnd);
void set_scroll_bottom(HWND hMLEditWnd);
static BOOL have_scoll_bar(HWND hwnd);
static int pmp_edit_proc(HWND hwnd, int message, WPARAM wparam, LPARAM lparam);
static void _ebook_show_menu(HWND parent, HWND hMLEditWnd, book_status_t* book_status);
static PMP_MEDIA_FILE_LIST* _ebook_show_files(HWND hwnd);
static int line;
static WNDPROC old_edit_proc;
static char fullname[PATH_MAX];
static book_status_t book_status =
{
FALSE,
5,
};
static int ebookWinProc(HWND hwnd, int message, WPARAM wparam, LPARAM lparam)
{
static HWND hMLEditWnd;
static char* buffer;
#if 0
static PLOGFONT font_gb;
static PLOGFONT font_utf8;
static PLOGFONT font_big5;
static PLOGFONT font_arry[3];
static int font_flag;
#endif
static int close_stat = 0;
static PMP_MEDIA_FILE_LIST* file_list = NULL;
switch (message)
{
case MSG_CREATE:
pmp_app_notify_opened(PMP_APP_EBOOK, hwnd);
file_list = _ebook_show_files(hwnd);
if (file_list == NULL)
{
SendMessage(hwnd, MSG_CLOSE, 0, 0);
return 0;
}
/*
while (file_list->file_names == NULL
||*(file_list->file_names) == '\0')
{
PMP_RELEASE_MEDIA_FILE_LIST(file_list);
file_list = _ebook_show_files(hwnd);
}
*/
strcpy(fullname, file_list->dir);
strcat(fullname, file_list->file_names);
printf("%s\n", fullname);
hMLEditWnd = CreateWindowEx (CTRL_MEDIT, "",
// WS_CHILD|ES_BASELINE|WS_VSCROLL|ES_AUTOWRAP|ES_READONLY,
WS_BORDER|WS_CHILD|WS_VISIBLE|ES_BASELINE|WS_VSCROLL|ES_AUTOWRAP|ES_READONLY,
0,
0,
0, 0, 320, 218,
hwnd, 0);
old_edit_proc = SetWindowCallbackProc(hMLEditWnd, pmp_edit_proc);
SendMessage(hMLEditWnd, EM_SETLINEHEIGHT, 20, 0);
#if 0
font_utf8 = GetSystemFont(SYSLOGFONT_WCHAR_DEF);
font_gb = CreateLogFont (NULL, "song", "GB2312",
FONT_WEIGHT_REGULAR, FONT_SLANT_ROMAN,
FONT_FLIP_NIL, FONT_OTHER_NIL,
FONT_UNDERLINE_NONE, FONT_STRUCKOUT_NONE,
4, 0);
font_utf8 = CreateLogFont (NULL, "song", "utf-8",
FONT_WEIGHT_REGULAR, FONT_SLANT_ROMAN,
FONT_FLIP_NIL, FONT_OTHER_NIL,
FONT_UNDERLINE_NONE, FONT_STRUCKOUT_NONE,
4, 0);
font_big5 = CreateLogFont (NULL, "song", "big5",
FONT_WEIGHT_REGULAR, FONT_SLANT_ROMAN,
FONT_FLIP_NIL, FONT_OTHER_NIL,
FONT_UNDERLINE_NONE, FONT_STRUCKOUT_NONE,
4, 0);
font_arry[0]=font_gb;
font_arry[1]=font_big5;
font_arry[2]=font_utf8;
font_flag = 0;
SetWindowFont(hMLEditWnd, font_arry[font_flag]);
#endif
if (pmp_ebook_start_read(fullname, GetSystemFont(SYSLOGFONT_WCHAR_DEF), &line) == -1)
{
close_stat = 1;
pmp_show_info(hwnd, _(IDS_PMP_EBOOK_ERROR), _(IDS_PMP_EBOOK_ERRORINFO),MB_ICONHAND);
printf("pmp_ebook_start_read error\n");
SendMessage(hwnd, MSG_CLOSE, 0, 0);
return 0;
}
if ((buffer = pmp_ebook_cur_read()) == NULL)
{
close_stat = 1;
printf("pmp_ebook_next_read error\n");
SendMessage(hwnd, MSG_CLOSE, 0, 0);
return 0;
}
SendMessage(hMLEditWnd, MSG_SETTEXT, 0, (UINT)buffer);
EnableScrollBar(hMLEditWnd, SB_VERT, TRUE);
if (have_scoll_bar(hMLEditWnd))
{
int svm_x = SendMessage(hMLEditWnd, SVM_GETCONTENTX, 0, 0);
SendNotifyMessage(hMLEditWnd, SVM_SETCONTPOS, svm_x, line);
}
printf("\n\n");
break;
case MSG_SETFOCUS:
pmp_set_captionbar_title(_(IDS_PMP_EBOOK_TITLE)/*"ebook"*/);
break;
case MSG_KEYDOWN:
if (file_list == NULL) return 0;
switch(wparam)
{
case SCANCODE_CURSORBLOCKUP:
if (!have_scoll_bar(hMLEditWnd)
||is_scroll_top(hMLEditWnd))
{
#ifdef DEBUG
printf("scroll to top\n");
#endif
buffer = pmp_ebook_pre_read();
SendMessage(hMLEditWnd, MSG_SETTEXT, 0, (UINT)buffer);
set_scroll_bottom(hMLEditWnd);
}
else
{
SendMessage(hMLEditWnd, MSG_VSCROLL, SB_LINEUP, 0);
}
return 0;
break;
case SCANCODE_CURSORBLOCKDOWN:
if (!have_scoll_bar(hMLEditWnd)
||is_scroll_bottom(hMLEditWnd))
{
#ifdef DEBUG
printf("scroll to bottom\n");
#endif
buffer = pmp_ebook_next_read();
SendMessage(hMLEditWnd, MSG_SETTEXT, 0, (UINT)buffer);
}
else
{
SendMessage(hMLEditWnd, MSG_VSCROLL, SB_LINEDOWN, 0);
}
return 0;
break;
case SCANCODE_CURSORBLOCKLEFT:
if (!have_scoll_bar(hMLEditWnd)
||is_scroll_top(hMLEditWnd))
{
#ifdef DEBUG
printf("scroll to top\n");
#endif
buffer = pmp_ebook_pre_read();
SendMessage(hMLEditWnd, MSG_SETTEXT, 0, (UINT)buffer);
set_scroll_bottom(hMLEditWnd);
}
else
{
SendMessage(hMLEditWnd, MSG_VSCROLL, SB_PAGEUP, 0);
}
return 0;
break;
case SCANCODE_CURSORBLOCKRIGHT:
if (!have_scoll_bar(hMLEditWnd)
||is_scroll_bottom(hMLEditWnd))
{
#ifdef DEBUG
printf("scroll to top\n");
#endif
buffer = pmp_ebook_next_read();
SendMessage(hMLEditWnd, MSG_SETTEXT, 0, (UINT)buffer);
}
else
{
SendMessage(hMLEditWnd, MSG_VSCROLL, SB_PAGEDOWN, 0);
}
return 0;
break;
case SCANCODE_F1:
{
_ebook_show_menu(hwnd, hMLEditWnd, &book_status);
}
break;
case SCANCODE_ESCAPE:
PostMessage(hwnd, MSG_CLOSE, 0, 0);
return 0;
break;
default:
break;
}
return 0;
break; //MSG_KEYDOWN
case MSG_TIMER:
if (wparam == ID_TIMER)
{
SendMessage(hwnd, MSG_KEYDOWN, SCANCODE_CURSORBLOCKRIGHT, 0);
}
break;
case MSG_ERASEBKGND:
return 0;
case MSG_CLOSE:
{
printf("get MSG_CLOSE+++++++++++++++\n");
if (file_list)
{
PMP_RELEASE_MEDIA_FILE_LIST (file_list);
}
if (close_stat == 0)
{
pmp_ebook_end_read();
}
if (IsTimerInstalled(hwnd, ID_TIMER))
{
KillTimer(hwnd, ID_TIMER);
}
book_status.auto_page = FALSE;
DestroyAllControls(hwnd);
DestroyMainWindow(hwnd);
MainWindowThreadCleanup(hwnd);
pmp_app_notify_closed(PMP_APP_EBOOK);
return 0;
}
break;
}
SetWindowAdditionalData2(hwnd, 0);
return pmp_def_wnd_proc(hwnd, message, wparam, lparam);
}
static void InitebookInfo (PMAINWINCREATE pCreateInfo, UINT add_date, HWND h_host)
{
pCreateInfo->dwStyle = WS_BORDER | WS_VISIBLE;
pCreateInfo->dwExStyle = 0;
pCreateInfo->spCaption = "";
pCreateInfo->hMenu = 0;
pCreateInfo->hCursor = GetSystemCursor(0);
pCreateInfo->hIcon = 0;
pCreateInfo->MainWindowProc = ebookWinProc;
pCreateInfo->lx = 0;
pCreateInfo->ty = 20;
pCreateInfo->rx = 320;
pCreateInfo->by = 240;
pCreateInfo->iBkColor = COLOR_lightgray;
pCreateInfo->dwAddData = add_date;
pCreateInfo->hHosting = h_host;
}
static MAINWINCREATE create_info;
void ex_ebook_display_book(HWND h_host)
{
HWND hwnd;
InitebookInfo(&create_info, (UINT)fullname, h_host);
hwnd = CreateMainWindow(&create_info);
}
static int nPos_old = -1;
BOOL is_scroll_top(HWND hwnd)
{
int nPos;
GetScrollPos(hwnd, SB_VERT, &nPos);
#if 0 //def DEBUG
printf("sv_info = %d\n", nPos);
#endif
nPos_old = nPos;
return (nPos == 0);
}
BOOL is_scroll_bottom(HWND hwnd)
{
int nPos;
GetScrollPos(hwnd, SB_VERT, &nPos);
#if 0 //def DEBUG
printf("sv_info = %d\n", nPos);
#endif
if (nPos == nPos_old && nPos != 0)
{
return TRUE;
}
else
{
nPos_old = nPos;
return FALSE;
}
}
void set_scroll_bottom(HWND hwnd)
{
int count = 0;
count = SendMessage(hwnd, SVM_GETITEMCOUNT, 0, 0);
#if 0 //def DEBUG
printf("the count of edit is %d\n", count);
#endif
SendMessage(hwnd, SVM_SHOWITEM, count-1, 0);
}
static BOOL have_scoll_bar(HWND hwnd)
{
int cont_height = 0;
int visi_height = 0;
cont_height = SendMessage(hwnd, SVM_GETCONTHEIGHT, 0, 0);
visi_height = SendMessage(hwnd, SVM_GETVISIBLEHEIGHT, 0, 0);
if (visi_height < cont_height)
{
return TRUE;
}
else
{
return FALSE;
}
}
static void tePaint(HWND hwnd, HDC hdc, RECT *rcDraw)
{
RECT *rc = rcDraw;
PTEDATA ptedata;
int h, indent = 0;
ptedata = (PTEDATA)GetWindowAdditionalData2(hwnd);
h = ptedata->nLineHeight - 1;
if (GetWindowStyle(hwnd) & ES_BASELINE) {
SetPenColor (hdc, GetWindowElementColorEx (hwnd, FGC_CONTROL_NORMAL));
while (h < RECTHP(rc)) {
indent = (h == ptedata->nLineHeight - 1) ? ptedata->titleIndent : 0;
DrawHDotLine (hdc, rc->left + indent, rc->top+h, RECTWP(rc)-indent);
h += ptedata->nLineHeight;
}
}
}
void scrollview_draw (HWND hwnd, HDC hdc, PSVDATA psvdata)
{
list_t *me;
PSVITEMDATA pci;
RECT rcDraw;
int h = 0;
RECT rcVis;
PSVLIST psvlist = &psvdata->svlist;
rcDraw.left = 0;
rcDraw.top = 0;
rcDraw.right = psvscr->nContWidth;
rcDraw.bottom = psvscr->nContHeight;
scrolled_content_to_window (psvscr, &rcDraw.left, &rcDraw.top);
scrolled_content_to_window (psvscr, &rcDraw.right, &rcDraw.bottom);
scrolled_get_visible_rect (psvscr, &rcVis);
ClipRectIntersect (hdc, &rcVis);
mglist_for_each (me, psvlist) {
pci = (PSVITEMDATA)mglist_entry (me);
rcDraw.top += h;
rcDraw.bottom = rcDraw.top + pci->nItemHeight;
if (rcDraw.bottom < rcVis.top) {
h = pci->nItemHeight;
continue;
}
if (rcDraw.top > rcVis.bottom)
break;
if (((MgList *)psvlist)->iop.drawItem && pci->nItemHeight > 0) {
((MgList *)psvlist)->iop.drawItem (hwnd, (HSVITEM)pci, hdc, &rcDraw);
}
h = pci->nItemHeight;
}
}
static int pmp_edit_proc(HWND hwnd, int message, WPARAM wparam, LPARAM lparam)
{
if (message == MSG_PAINT)
{
RECT rcDraw, rcVis;
PTEDATA ptedata = NULL;
HDC hdc = BeginPaint (hwnd);
HDC mem_dc = CreateCompatibleDC(hdc);
SetBrushColor(mem_dc, COLOR_lightwhite);
FillBox(mem_dc, 0, 0, 320, 220);
ptedata = (PTEDATA) GetWindowAdditionalData2 (hwnd);
scrolled_get_visible_rect (ptescr, &rcVis);
ClipRectIntersect (mem_dc, &rcVis);
scrolled_get_content_rect (ptescr, &rcDraw);
scrolled_visible_to_window (ptescr, &rcDraw.left, &rcDraw.top);
scrolled_visible_to_window (ptescr, &rcDraw.right, &rcDraw.bottom);
tePaint (hwnd, mem_dc, &rcDraw);
scrollview_draw (hwnd, mem_dc, &ptedata->svdata);
if ((ptedata->ex_flags & TEST_EX_SETFOCUS)){
ptedata->ex_flags &= ~TEST_EX_SETFOCUS;
}
else if (ptedata->ex_flags & TEST_EX_KILLFOCUS) {
ptedata->ex_flags &= ~TEST_EX_KILLFOCUS;
}
BitBlt(mem_dc, 0, 0 ,0, 0, hdc, 0, 0, 0);
DeleteMemDC(mem_dc);
EndPaint (hwnd, hdc);
return 0;
}
else if (message == MSG_ERASEBKGND)
{
return 0;
}
return old_edit_proc(hwnd, message, wparam, lparam);
}
static int auto_page_str[] = {
IDS_PMP_EBOOK_OFF,
IDS_PMP_EBOOK_ON,
}; /*N_("OFF"), N_("ON") };*/
static int _ebook_autopage_callback (void* context, int id, int op, char *text_buff)
{
book_status_t* book_status = (book_status_t*)context;
switch (op) {
case PMP_MI_OP_DEF:
sprintf(text_buff, _(IDS_PMP_EBOOK_AUTOPAGE)/*"auto page\t%s"*/, _(auto_page_str[book_status->auto_page]));
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_PREV:
book_status->auto_page = (book_status->auto_page + 2 - 1) % 2;
sprintf(text_buff, _(IDS_PMP_EBOOK_AUTOPAGE)/*"auto page\t%s"*/, _(auto_page_str[book_status->auto_page]));
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_NEXT:
book_status->auto_page = (book_status->auto_page + 1) % 2;
sprintf(text_buff, _(IDS_PMP_EBOOK_AUTOPAGE)/*"auto page\t%s"*/, _(auto_page_str[book_status->auto_page]));
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_ENTER:
return PMP_MI_ST_SELECTED;
}
return PMP_MI_ST_NONE;
}
static int _ebook_pagetime_callback(void* context, int id, int op, char* text_buff)
{
book_status_t* book_status = (book_status_t*)context;
switch (op) {
case PMP_MI_OP_DEF:
sprintf(text_buff, _(IDS_PMP_EBOOK_PAGETIME)/*"page time\t%ds"*/, book_status->page_time);
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_PREV:
if (book_status->page_time > 5)
book_status->page_time--;
sprintf(text_buff, _(IDS_PMP_EBOOK_PAGETIME)/*"page time\t%ds"*/, book_status->page_time);
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_NEXT:
if (book_status->page_time < 20)
book_status->page_time++;
sprintf(text_buff, _(IDS_PMP_EBOOK_PAGETIME)/*"page time\t%ds"*/, book_status->page_time);
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_ENTER:
return PMP_MI_ST_SELECTED;
}
return PMP_MI_ST_NONE;
}
static int _ebook_marktag_callback(void* context, int id, int op, char* text_buff)
{
switch (op) {
case PMP_MI_OP_DEF:
strcpy(text_buff, _(IDS_PMP_EBOOK_MARKTAG)); /*mark tag*/
return PMP_MI_ST_UPDATED;
case PMP_MI_OP_ENTER:
return PMP_MI_ST_SELECTED;
}
return PMP_MI_ST_NONE;
}
static PMP_MENU_ITEM __ebook_menu_items[] =
{
{AUTO_PAGE + MIID_BASE_EBOOK, _ebook_autopage_callback},
{PAGE_TIME + MIID_BASE_EBOOK, _ebook_pagetime_callback},
{MARK_TAG + MIID_BASE_EBOOK, _ebook_marktag_callback},
{0, NULL},
};
static void _ebook_show_menu(HWND parent, HWND hMLEditWnd, book_status_t* book_status)
{
int i;
int line;
book_status_t menu_context = *book_status;
i = pmp_show_menu(parent, _(IDS_PMP_EBOOK_OPTION)/*"ebook options"*/, __ebook_menu_items, &menu_context);
i -= MIID_BASE_EBOOK;
switch (i)
{
case AUTO_PAGE:
book_status->auto_page = menu_context.auto_page;
if (book_status->auto_page == TRUE)
{
if (!IsTimerInstalled(parent, ID_TIMER))
{
SetTimer(parent, ID_TIMER, 100*(book_status->page_time));
}
}
else
{
if (IsTimerInstalled(parent, ID_TIMER))
{
KillTimer(parent, ID_TIMER);
}
}
break;
case PAGE_TIME:
book_status->page_time = menu_context.page_time;
if (book_status->auto_page == TRUE)
{
if (IsTimerInstalled(parent, ID_TIMER))
{
ResetTimer(parent, ID_TIMER, 100*(book_status->page_time));
}
else
{
SetTimer(parent, ID_TIMER, 100*(book_status->page_time));
}
}
else
{
if (IsTimerInstalled(parent, ID_TIMER))
{
KillTimer(parent, ID_TIMER);
}
}
break;
case MARK_TAG:
line = SendMessage(hMLEditWnd, SVM_GETCONTENTY, 0, 0);
printf("ebook--to mark a tag\n");
pmp_ebook_tag(line);
break;
}
}
static BOOL _ebook_filter(const char *path, const char* file_name)
{
char* filter_str = ".txt";
const char* postfix = rindex(file_name, '.');
if (postfix)
{
if (strcasecmp(postfix, filter_str) == 0)
{
return TRUE;
}
}
return FALSE;
}
static BITMAP* _ebook_file_icon;
static const BITMAP* _ebook_get_file_icon (const char *path, const char* file_name, DWORD mode)
{
return _ebook_file_icon;
}
static PMP_MEDIA_FILE_LIST* _ebook_show_files(HWND hwnd)
{
BITMAP* _ebook_folder_icon;
PMP_MEDIA_FILE_LIST* my_list;
PMP_DIR_SHOW_INFO dir_show_info [] =
{
{"music", _(IDS_PMP_MUSIC_DIRNAME), NULL},
{"video", _(IDS_PMP_VIDEO_DIR_ALIAS), NULL},
{"ebook", _(IDS_PMP_EBOOK_DIRNAME), NULL},
{"picture", _(IDS_PMP_PIC_DIR_ALIAS), NULL},
{"recorder", _(IDS_PMP_RECORDER_DIRNAME), NULL},
{"..", _(IDS_PMP_DOTDOT), NULL},
{"*", "*", NULL},
{NULL, NULL, NULL},
};
_ebook_folder_icon = pmp_image_res_acquire_bitmap("/res/ebook/folder.png");
_ebook_file_icon = pmp_image_res_acquire_bitmap("/res/ebook/txt.png");
dir_show_info[0].icon = _ebook_folder_icon;
dir_show_info[1].icon = _ebook_folder_icon;
dir_show_info[2].icon = _ebook_folder_icon;
dir_show_info[3].icon = _ebook_folder_icon;
dir_show_info[4].icon = _ebook_folder_icon;
dir_show_info[5].icon = _ebook_folder_icon;
dir_show_info[6].icon = _ebook_folder_icon;
my_list = pmp_select_media_files (hwnd, PMP_MEDIA_TOP_DIR, PMP_MEDIA_NAME_EBOOK, _ebook_filter, _ebook_get_file_icon, dir_show_info, PMP_BROWSER_MODE_LIST);
pmp_image_res_release_bitmap("/res/ebook/folder.png", _ebook_folder_icon);
pmp_image_res_release_bitmap("/res/ebook/txt.png", _ebook_file_icon);
return my_list;
}
|
Mithras11/SoftUni-Software-Engineering
|
JavaScript/JS_Advanced/Prototypes/5.ClassHierarchy.js
|
function solve() {
class Figure {
constructor(units = 'cm') {
this.units = units;
}
changeUnits(newUnits) {
this.units = newUnits;
}
toString() {
return `Figures units: ${this.units}`;
}
_convertInput(value) {
if (this.units === 'm') {
return value /= 100;
}
if (this.units === 'mm') {
return value *= 10;
}
return value;
}
}
class Circle extends Figure {
constructor(radius, units) {
super(units);
this._radius = radius;
}
get area() {
return Math.PI * this.radius ** 2;
}
get radius() {
return this._convertInput(this._radius);
}
toString() {
return super.toString() + ` Area: ${this.area} - radius: ${this.radius}`
}
}
class Rectangle extends Figure {
constructor(width, height, units) {
super(units);
this._width = width;
this._height = height;
}
get area() {
return this.width * this.height;
}
get width() {
return this._convertInput(this._width);
}
get height() {
return this._convertInput(this._height);
}
toString() {
return super.toString() + ` Area: ${this.area} - width: ${this.width}, height: ${this.height}`;
}
}
return {
Figure,
Circle,
Rectangle
}
}
|
chgzm/design-pattern
|
Bridge/Novel.java
|
<filename>Bridge/Novel.java
public class Novel implements BookImpl {
@Override
public void showContent() {
System.out.println("I'm Novel.");
}
}
|
wcnnkh/framework
|
context/src/main/java/io/basc/framework/context/support/DefaultContext.java
|
package io.basc.framework.context.support;
import io.basc.framework.factory.NoArgsInstanceFactory;
public class DefaultContext extends AbstractConfigurableContext {
private final NoArgsInstanceFactory instanceFactory;
public DefaultContext(boolean cache, NoArgsInstanceFactory instanceFactory) {
super(cache);
this.instanceFactory = instanceFactory;
}
@Override
protected NoArgsInstanceFactory getTargetInstanceFactory() {
return instanceFactory;
}
}
|
uk-gov-mirror/ONSdigital.census-worth-self-help
|
site/cypress/integration/ui_tests/e2e_tests/06-videos.spec.js
|
<filename>site/cypress/integration/ui_tests/e2e_tests/06-videos.spec.js
/// <reference types="Cypress" />
const globalTestData = require('../../../fixtures/globalTestData');
// pages
const homepage = require('../../../fixtures/pages/homepagePage');
describe("Videos in articles", function() {
beforeEach(function () {
Cypress.env('RETRIES', 2);
cy.visit('');
cy.get(homepage.homepageLogo).should('be.visible');
});
it('The field officer can see a video on an article [ONS-169]', function () {
cy.visit(globalTestData.videoArticlePath);
cy.url().should('include', globalTestData.videoArticlePath);
cy.get('source').should('have.attr', 'type', 'video/mp4');
});
it('The field officer should not see a video on an article if one has not been uploaded [ONS-169]', function () {
cy.visit(globalTestData.deepArticlePath);
cy.get('source').should('not.be.visible');
})
});
|
poyang31/hw_2021_12
|
main.py
|
import uvicorn
from src.analysis import Analysis
from src.crawler import Crawler
from src.kernel import Config
from src.web_api import app
if __name__ == "__main__":
config = Config()
background_tasks = [
Analysis,
Crawler
]
# Instance subprocesses
subprocesses = map(lambda x: x(config), background_tasks)
# Start subprocesses
for do in subprocesses:
do.start()
# Block main process
uvicorn.run(app, host="0.0.0.0", port=7351)
# Stop subprocesses
for do in subprocesses:
do.terminate()
|
MultivacX/letcode2020
|
algorithms/easy/0961. N-Repeated Element in Size 2N Array.h
|
<filename>algorithms/easy/0961. N-Repeated Element in Size 2N Array.h
// 961. N-Repeated Element in Size 2N Array
// https://leetcode.com/problems/n-repeated-element-in-size-2n-array/
// Runtime: 40 ms, faster than 96.01% of C++ online submissions for N-Repeated Element in Size 2N Array.
// Memory Usage: 24.6 MB, less than 98.05% of C++ online submissions for N-Repeated Element in Size 2N Array.
class Solution {
public:
int repeatedNTimes(vector<int>& A) {
unordered_set<int> s;
for (int a : A)
if (!s.insert(a).second)
return a;
return 0;
}
};
|
RutgersUniversityVirtualWorlds/minecraftworlds
|
work/decompile-82634944/net/minecraft/server/PlayerSelector.java
|
package net.minecraft.server;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PlayerSelector {
private static final Pattern a = Pattern.compile("^@([pare])(?:\\[([\\w=,!-]*)\\])?$");
private static final Pattern b = Pattern.compile("\\G([-!]?[\\w-]*)(?:$|,)");
private static final Pattern c = Pattern.compile("\\G(\\w+)=([-!]?[\\w-]*)(?:$|,)");
private static final Set<String> d = Sets.newHashSet(new String[] { "x", "y", "z", "dx", "dy", "dz", "rm", "r"});
public static EntityPlayer getPlayer(ICommandListener icommandlistener, String s) {
return (EntityPlayer) getEntity(icommandlistener, s, EntityPlayer.class);
}
public static <T extends Entity> T getEntity(ICommandListener icommandlistener, String s, Class<? extends T> oclass) {
List list = getPlayers(icommandlistener, s, oclass);
return list.size() == 1 ? (Entity) list.get(0) : null;
}
public static IChatBaseComponent getPlayerNames(ICommandListener icommandlistener, String s) {
List list = getPlayers(icommandlistener, s, Entity.class);
if (list.isEmpty()) {
return null;
} else {
ArrayList arraylist = Lists.newArrayList();
Iterator iterator = list.iterator();
while (iterator.hasNext()) {
Entity entity = (Entity) iterator.next();
arraylist.add(entity.getScoreboardDisplayName());
}
return CommandAbstract.a((List) arraylist);
}
}
public static <T extends Entity> List<T> getPlayers(ICommandListener icommandlistener, String s, Class<? extends T> oclass) {
Matcher matcher = PlayerSelector.a.matcher(s);
if (matcher.matches() && icommandlistener.a(1, "@")) {
Map map = c(matcher.group(2));
if (!b(icommandlistener, map)) {
return Collections.emptyList();
} else {
String s1 = matcher.group(1);
BlockPosition blockposition = a(map, icommandlistener.getChunkCoordinates());
Vec3D vec3d = b(map, icommandlistener.d());
List list = a(icommandlistener, map);
ArrayList arraylist = Lists.newArrayList();
Iterator iterator = list.iterator();
while (iterator.hasNext()) {
World world = (World) iterator.next();
if (world != null) {
ArrayList arraylist1 = Lists.newArrayList();
arraylist1.addAll(a(map, s1));
arraylist1.addAll(b(map));
arraylist1.addAll(c(map));
arraylist1.addAll(d(map));
arraylist1.addAll(c(icommandlistener, map));
arraylist1.addAll(e(map));
arraylist1.addAll(f(map));
arraylist1.addAll(a(map, vec3d));
arraylist1.addAll(g(map));
arraylist.addAll(a(map, oclass, (List) arraylist1, s1, world, blockposition));
}
}
return a((List) arraylist, map, icommandlistener, oclass, s1, vec3d);
}
} else {
return Collections.emptyList();
}
}
private static List<World> a(ICommandListener icommandlistener, Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
if (h(map)) {
arraylist.add(icommandlistener.getWorld());
} else {
Collections.addAll(arraylist, icommandlistener.h().worldServer);
}
return arraylist;
}
private static <T extends Entity> boolean b(ICommandListener icommandlistener, Map<String, String> map) {
String s = b(map, "type");
s = s != null && s.startsWith("!") ? s.substring(1) : s;
if (s != null && !EntityTypes.b(s)) {
ChatMessage chatmessage = new ChatMessage("commands.generic.entity.invalidType", new Object[] { s});
chatmessage.getChatModifier().setColor(EnumChatFormat.RED);
icommandlistener.sendMessage(chatmessage);
return false;
} else {
return true;
}
}
private static List<Predicate<Entity>> a(Map<String, String> map, String s) {
ArrayList arraylist = Lists.newArrayList();
final String s1 = b(map, "type");
final boolean flag = s1 != null && s1.startsWith("!");
if (flag) {
s1 = s1.substring(1);
}
boolean flag1 = !s.equals("e");
boolean flag2 = s.equals("r") && s1 != null;
if ((s1 == null || !s.equals("e")) && !flag2) {
if (flag1) {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
return entity instanceof EntityHuman;
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
} else {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
return EntityTypes.a(entity, s) != flag;
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static List<Predicate<Entity>> b(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
final int i = a(map, "lm", -1);
final int j = a(map, "l", -1);
if (i > -1 || j > -1) {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
if (!(entity instanceof EntityPlayer)) {
return false;
} else {
EntityPlayer entityplayer = (EntityPlayer) entity;
return (i <= -1 || entityplayer.expLevel >= i) && (j <= -1 || entityplayer.expLevel <= j);
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static List<Predicate<Entity>> c(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
String s = b(map, "m");
if (s == null) {
return arraylist;
} else {
final boolean flag = s.startsWith("!");
if (flag) {
s = s.substring(1);
}
final WorldSettings.EnumGamemode worldsettings_enumgamemode;
try {
int i = Integer.parseInt(s);
worldsettings_enumgamemode = WorldSettings.EnumGamemode.a(i, WorldSettings.EnumGamemode.NOT_SET);
} catch (Throwable throwable) {
worldsettings_enumgamemode = WorldSettings.EnumGamemode.a(s, WorldSettings.EnumGamemode.NOT_SET);
}
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
if (!(entity instanceof EntityPlayer)) {
return false;
} else {
EntityPlayer entityplayer = (EntityPlayer) entity;
WorldSettings.EnumGamemode worldsettings_enumgamemode = entityplayer.playerInteractManager.getGameMode();
return flag ? worldsettings_enumgamemode != worldsettings_enumgamemode1 : worldsettings_enumgamemode == worldsettings_enumgamemode1;
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
return arraylist;
}
}
private static List<Predicate<Entity>> d(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
final String s = b(map, "team");
final boolean flag = s != null && s.startsWith("!");
if (flag) {
s = s.substring(1);
}
if (s != null) {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
if (!(entity instanceof EntityLiving)) {
return false;
} else {
EntityLiving entityliving = (EntityLiving) entity;
ScoreboardTeamBase scoreboardteambase = entityliving.aO();
String s = scoreboardteambase == null ? "" : scoreboardteambase.getName();
return s.equals(s1) != flag;
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static List<Predicate<Entity>> c(final ICommandListener icommandlistener, Map<String, String> map) {
final Map map1 = a(map);
return (List) (map1.isEmpty() ? Collections.emptyList() : Lists.newArrayList(new Predicate[] { new Predicate() {
public boolean a(Entity entity) {
if (entity == null) {
return false;
} else {
Scoreboard scoreboard = icommandlistener.h().getWorldServer(0).getScoreboard();
Iterator iterator = map.entrySet().iterator();
Entry entry;
boolean flag;
int i;
do {
if (!iterator.hasNext()) {
return true;
}
entry = (Entry) iterator.next();
String s = (String) entry.getKey();
flag = false;
if (s.endsWith("_min") && s.length() > 4) {
flag = true;
s = s.substring(0, s.length() - 4);
}
ScoreboardObjective scoreboardobjective = scoreboard.getObjective(s);
if (scoreboardobjective == null) {
return false;
}
String s1 = entity instanceof EntityPlayer ? entity.getName() : entity.getUniqueID().toString();
if (!scoreboard.b(s1, scoreboardobjective)) {
return false;
}
ScoreboardScore scoreboardscore = scoreboard.getPlayerScoreForObjective(s1, scoreboardobjective);
i = scoreboardscore.getScore();
if (i < ((Integer) entry.getValue()).intValue() && flag) {
return false;
}
} while (i <= ((Integer) entry.getValue()).intValue() || flag);
return false;
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
}}));
}
private static List<Predicate<Entity>> e(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
final String s = b(map, "name");
final boolean flag = s != null && s.startsWith("!");
if (flag) {
s = s.substring(1);
}
if (s != null) {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
return entity != null && entity.getName().equals(s) != flag;
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static List<Predicate<Entity>> f(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
final String s = b(map, "tag");
final boolean flag = s != null && s.startsWith("!");
if (flag) {
s = s.substring(1);
}
if (s != null) {
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
return entity == null ? false : ("".equals(s) ? entity.P().isEmpty() != flag : entity.P().contains(s) != flag);
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static List<Predicate<Entity>> a(Map<String, String> map, final Vec3D vec3d) {
double d0 = (double) a(map, "rm", -1);
double d1 = (double) a(map, "r", -1);
final boolean flag = d0 < -0.5D;
final boolean flag1 = d1 < -0.5D;
if (flag && flag1) {
return Collections.emptyList();
} else {
double d2 = Math.max(d0, 1.0E-4D);
final double d3 = d2 * d2;
double d4 = Math.max(d1, 1.0E-4D);
final double d5 = d4 * d4;
return Lists.newArrayList(new Predicate[] { new Predicate() {
public boolean a(Entity entity) {
if (entity == null) {
return false;
} else {
double d0 = vec3d.c(entity.locX, entity.locY, entity.locZ);
return (flag || d0 >= d1) && (flag1 || d0 <= d2);
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
}});
}
}
private static List<Predicate<Entity>> g(Map<String, String> map) {
ArrayList arraylist = Lists.newArrayList();
final int i;
final int j;
if (map.containsKey("rym") || map.containsKey("ry")) {
i = MathHelper.b(a(map, "rym", 0));
j = MathHelper.b(a(map, "ry", 359));
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
if (entity == null) {
return false;
} else {
int i = MathHelper.b(MathHelper.d(entity.yaw));
return j > k ? i >= j || i <= k : i >= j && i <= k;
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
if (map.containsKey("rxm") || map.containsKey("rx")) {
i = MathHelper.b(a(map, "rxm", 0));
j = MathHelper.b(a(map, "rx", 359));
arraylist.add(new Predicate() {
public boolean a(Entity entity) {
if (entity == null) {
return false;
} else {
int i = MathHelper.b(MathHelper.d(entity.pitch));
return j > k ? i >= j || i <= k : i >= j && i <= k;
}
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
});
}
return arraylist;
}
private static <T extends Entity> List<T> a(Map<String, String> map, Class<? extends T> oclass, List<Predicate<Entity>> list, String s, World world, BlockPosition blockposition) {
ArrayList arraylist = Lists.newArrayList();
String s1 = b(map, "type");
s1 = s1 != null && s1.startsWith("!") ? s1.substring(1) : s1;
boolean flag = !s.equals("e");
boolean flag1 = s.equals("r") && s1 != null;
int i = a(map, "dx", 0);
int j = a(map, "dy", 0);
int k = a(map, "dz", 0);
int l = a(map, "r", -1);
Predicate predicate = Predicates.and(list);
Predicate predicate1 = Predicates.and(IEntitySelector.a, predicate);
int i1 = world.players.size();
int j1 = world.entityList.size();
boolean flag2 = i1 < j1 / 16;
final AxisAlignedBB axisalignedbb;
if (!map.containsKey("dx") && !map.containsKey("dy") && !map.containsKey("dz")) {
if (l >= 0) {
axisalignedbb = new AxisAlignedBB((double) (blockposition.getX() - l), (double) (blockposition.getY() - l), (double) (blockposition.getZ() - l), (double) (blockposition.getX() + l + 1), (double) (blockposition.getY() + l + 1), (double) (blockposition.getZ() + l + 1));
if (flag && flag2 && !flag1) {
arraylist.addAll(world.b(oclass, predicate1));
} else {
arraylist.addAll(world.a(oclass, axisalignedbb, predicate1));
}
} else if (s.equals("a")) {
arraylist.addAll(world.b(oclass, predicate));
} else if (!s.equals("p") && (!s.equals("r") || flag1)) {
arraylist.addAll(world.a(oclass, predicate1));
} else {
arraylist.addAll(world.b(oclass, predicate1));
}
} else {
axisalignedbb = a(blockposition, i, j, k);
if (flag && flag2 && !flag1) {
Predicate predicate2 = new Predicate() {
public boolean a(Entity entity) {
return entity != null && axisalignedbb.b(entity.getBoundingBox());
}
public boolean apply(Object object) {
return this.a((Entity) object);
}
};
arraylist.addAll(world.b(oclass, Predicates.and(predicate1, predicate2)));
} else {
arraylist.addAll(world.a(oclass, axisalignedbb, predicate1));
}
}
return arraylist;
}
private static <T extends Entity> List<T> a(List<T> list, Map<String, String> map, ICommandListener icommandlistener, Class<? extends T> oclass, String s, final Vec3D vec3d) {
int i = a(map, "c", !s.equals("a") && !s.equals("e") ? 1 : 0);
if (!s.equals("p") && !s.equals("a") && !s.equals("e")) {
if (s.equals("r")) {
Collections.shuffle((List) list);
}
} else {
Collections.sort((List) list, new Comparator() {
public int a(Entity entity, Entity entity1) {
return ComparisonChain.start().compare(entity.e(vec3d.x, vec3d.y, vec3d.z), entity1.e(vec3d.x, vec3d.y, vec3d.z)).result();
}
public int compare(Object object, Object object1) {
return this.a((Entity) object, (Entity) object1);
}
});
}
Entity entity = icommandlistener.f();
if (entity != null && oclass.isAssignableFrom(entity.getClass()) && i == 1 && ((List) list).contains(entity) && !"r".equals(s)) {
list = Lists.newArrayList(new Entity[] { entity});
}
if (i != 0) {
if (i < 0) {
Collections.reverse((List) list);
}
list = ((List) list).subList(0, Math.min(Math.abs(i), ((List) list).size()));
}
return (List) list;
}
private static AxisAlignedBB a(BlockPosition blockposition, int i, int j, int k) {
boolean flag = i < 0;
boolean flag1 = j < 0;
boolean flag2 = k < 0;
int l = blockposition.getX() + (flag ? i : 0);
int i1 = blockposition.getY() + (flag1 ? j : 0);
int j1 = blockposition.getZ() + (flag2 ? k : 0);
int k1 = blockposition.getX() + (flag ? 0 : i) + 1;
int l1 = blockposition.getY() + (flag1 ? 0 : j) + 1;
int i2 = blockposition.getZ() + (flag2 ? 0 : k) + 1;
return new AxisAlignedBB((double) l, (double) i1, (double) j1, (double) k1, (double) l1, (double) i2);
}
private static BlockPosition a(Map<String, String> map, BlockPosition blockposition) {
return new BlockPosition(a(map, "x", blockposition.getX()), a(map, "y", blockposition.getY()), a(map, "z", blockposition.getZ()));
}
private static Vec3D b(Map<String, String> map, Vec3D vec3d) {
return new Vec3D(a(map, "x", vec3d.x, true), a(map, "y", vec3d.y, false), a(map, "z", vec3d.z, true));
}
private static double a(Map<String, String> map, String s, double d0, boolean flag) {
return map.containsKey(s) ? (double) MathHelper.a((String) map.get(s), MathHelper.floor(d0)) + (flag ? 0.5D : 0.0D) : d0;
}
private static boolean h(Map<String, String> map) {
Iterator iterator = PlayerSelector.d.iterator();
String s;
do {
if (!iterator.hasNext()) {
return false;
}
s = (String) iterator.next();
} while (!map.containsKey(s));
return true;
}
private static int a(Map<String, String> map, String s, int i) {
return map.containsKey(s) ? MathHelper.a((String) map.get(s), i) : i;
}
private static String b(Map<String, String> map, String s) {
return (String) map.get(s);
}
public static Map<String, Integer> a(Map<String, String> map) {
HashMap hashmap = Maps.newHashMap();
Iterator iterator = map.keySet().iterator();
while (iterator.hasNext()) {
String s = (String) iterator.next();
if (s.startsWith("score_") && s.length() > "score_".length()) {
hashmap.put(s.substring("score_".length()), Integer.valueOf(MathHelper.a((String) map.get(s), 1)));
}
}
return hashmap;
}
public static boolean isList(String s) {
Matcher matcher = PlayerSelector.a.matcher(s);
if (!matcher.matches()) {
return false;
} else {
Map map = c(matcher.group(2));
String s1 = matcher.group(1);
int i = !"a".equals(s1) && !"e".equals(s1) ? 1 : 0;
return a(map, "c", i) != 1;
}
}
public static boolean isPattern(String s) {
return PlayerSelector.a.matcher(s).matches();
}
private static Map<String, String> c(String s) {
HashMap hashmap = Maps.newHashMap();
if (s == null) {
return hashmap;
} else {
int i = 0;
int j = -1;
for (Matcher matcher = PlayerSelector.b.matcher(s); matcher.find(); j = matcher.end()) {
String s1 = null;
switch (i++) {
case 0:
s1 = "x";
break;
case 1:
s1 = "y";
break;
case 2:
s1 = "z";
break;
case 3:
s1 = "r";
}
if (s1 != null && !matcher.group(1).isEmpty()) {
hashmap.put(s1, matcher.group(1));
}
}
if (j < s.length()) {
Matcher matcher1 = PlayerSelector.c.matcher(j == -1 ? s : s.substring(j));
while (matcher1.find()) {
hashmap.put(matcher1.group(1), matcher1.group(2));
}
}
return hashmap;
}
}
}
|
Galeria-Kaufhof/ets-elasticsearch-rest-connector
|
ets-elasticsearch-rest-connector-core/src/main/scala/de/kaufhof/ets/elasticsearchrestconnector/core/client/model/mapping/MappingProperty.scala
|
package de.kaufhof.ets.elasticsearchrestconnector.core.client.model.mapping
import play.api.libs.json._
import scala.util.Try
trait MappingProperty
object MappingProperty {
def apply(mappingProperty: MappingProperty): (String, JsValue) = {
mappingProperty match {
case PercolatorMappingProperty => TypeMappingProperty(TypeMappingProperty(EnumMappingTypes.Percolator))
case s: StringMappingProperty => StringMappingProperty(s)
case l: LongMappingProperty => LongMappingProperty(l)
case b: BooleanMappingProperty => BooleanMappingProperty(b)
case o: ObjectMappingProperty => ObjectMappingProperty(o)
case t: TypeMappingProperty => TypeMappingProperty(t)
case k: KeywordMappingProperty => KeywordMappingProperty(k)
case q: QueryMappingProperty => QueryMappingProperty(q)
case j: JoinMappingProperty => JoinMappingProperty(j)
}
}
}
|
TheAbbay/vima
|
server/src/services/preferences/preferences.class.js
|
const { Service } = require('feathers-mongoose')
exports.Preferences = class Preferences extends Service {
}
|
StudentUniverse/su-datepicker-angular
|
demo/defaultDatepicker/defaultDatepickerExampleCtrl.js
|
<reponame>StudentUniverse/su-datepicker-angular
function defaultDatepickerExampleCtrl($scope) {
$scope.date = new Date();
$scope.calendarDate = new Date();
$scope.customClass = function(date){
if (angular.isDate(date) && angular.isDate($scope.date)) {
if ($scope.date.getFullYear() === date.getFullYear() &&
$scope.date.getMonth() === date.getMonth() &&
$scope.date.getDate() === date.getDate()) {
return 'active-date';
}
}
};
$scope.selectDate = function(date){
$scope.date = date;
};
}
|
Sambitcr-7/DSA-C-
|
16.4.knapsack.c++
|
<filename>16.4.knapsack.c++
#include <iostream>
using namespace std;
int knapasck(int value[], int wt[], int n, int w){
if(n==0 || w==0){
return 0;
}
if(wt[n-1]>w){
return knapasck(value, wt, n-1, w);
}
return max(knapasck(value,wt,n-1,w-wt[n-1])+value[n-1],knapasck(value ,wt,n-1,w));
}
int main()
{
int wt[]={10,20,30};
int value[]={100,50,150};
int w=50;
cout<<knapasck<<(wt,value,3,w)<<endl;
return 0;
}
|
code-dot-org/code-dot-org
|
dashboard/test/models/pd/application/application_base_test.rb
|
require 'test_helper'
module Pd::Application
class ApplicationBaseTest < ActiveSupport::TestCase
include ApplicationConstants
include Pd::Application::ActiveApplicationModels
include Pd::SharedApplicationConstants
freeze_time
test 'required fields' do
application = ApplicationBase.new
refute application.valid?
assert_equal(
[
'Form data is required',
'User is required',
'Application type is not included in the list',
'Application year is not included in the list',
'Type is required'
],
application.errors.full_messages
)
end
test 'derived classes override type and year' do
application = TEACHER_APPLICATION_CLASS.new
assert_equal TEACHER_APPLICATION, application.application_type
assert_equal APPLICATION_CURRENT_YEAR, application.application_year
end
test 'default status is unreviewed' do
application = ApplicationBase.new
assert_equal 'unreviewed', application.status
assert application.unreviewed?
end
test 'can update status' do
application = create FACILITATOR_APPLICATION_FACTORY
assert application.unreviewed?
application.update(status: 'pending')
assert application.pending?
application.reload
assert application.pending?
end
test 'regional partner name' do
partner = build :regional_partner
application = build FACILITATOR_APPLICATION_FACTORY, regional_partner: partner
assert_equal partner.name, application.regional_partner_name
end
test 'school name' do
school_info = build :school_info
teacher = build :teacher, school_info: school_info
application = build FACILITATOR_APPLICATION_FACTORY, user: teacher
assert_equal school_info.effective_school_name.titleize, application.school_name
end
test 'district name' do
school_info = create :school_info
teacher = build :teacher, school_info: school_info
application = build FACILITATOR_APPLICATION_FACTORY, user: teacher
assert_equal school_info.effective_school_district_name.titleize, application.district_name
end
test 'total score' do
application = ApplicationBase.new
# initially nil
assert_nil application.total_score
# non-numeric only, still nil
# Also handles nil values
application.response_scores = {
q1: 'Yes',
q2: nil,
}.to_json
assert_nil application.total_score
# Numeric with 0
application.response_scores = {
q1: 'Yes',
q2: nil,
q3: '0',
q4: 0
}.to_json
assert_equal 0, application.total_score
# Numeric non-zero
application.response_scores = {
q1: 'Yes',
q2: nil,
q3: '1',
q4: 2,
q5: '0',
}.to_json
assert_equal 3, application.total_score
end
test 'answer_with_additional_text for a string answer' do
answer_hash = {
string_question: 'Other:',
string_question_other: 'my explanation'
}
full_answer = ApplicationBase.answer_with_additional_text answer_hash, :string_question
assert_equal 'Other: my explanation', full_answer
end
test 'answer_with_additional_text for a string answer and custom other text' do
answer_hash = {
string_question: 'A custom answer:',
string_question_explanation: 'my custom explanation'
}
full_answer = ApplicationBase.answer_with_additional_text answer_hash,
:string_question, 'A custom answer:', :string_question_explanation
assert_equal 'A custom answer: my custom explanation', full_answer
end
test 'answer_with_additional_text for an array answer' do
answer_hash = {
array_question: [
'An answer',
'Other:'
],
array_question_other: 'my explanation'
}
full_answer = ApplicationBase.answer_with_additional_text answer_hash, :array_question
assert_equal(
[
'An answer',
'Other: my explanation',
],
full_answer
)
end
test 'answer_with_additional_text for an array answer and custom other text' do
answer_hash = {
array_question: [
'A supplied answer',
'A custom answer:'
],
array_question_other: 'my custom explanation'
}
full_answer = ApplicationBase.answer_with_additional_text answer_hash,
:array_question, 'A custom answer:', :array_question_other
assert_equal(
[
'A supplied answer',
'A custom answer: my custom explanation',
],
full_answer
)
end
test 'full answers' do
application = ApplicationBase.new
application.stubs(additional_text_fields:
[
[:string_question_with_extra],
[:array_question_with_extra]
]
)
form_data = {
regular_string_question: 'regular string answer',
regular_array_question: ['regular array answer'],
string_question_with_extra: 'Other:',
string_question_with_extra_other: 'my other string answer',
array_question_with_extra: ['Other:'],
array_question_with_extra_other: 'my other array answer',
filtered_question: 'to be removed'
}
application.stubs(sanitize_form_data_hash: form_data)
ApplicationBase.stubs(filtered_labels: form_data.except(:filtered_question))
expected_full_answers = {
regular_string_question: 'regular string answer',
regular_array_question: ['regular array answer'],
string_question_with_extra: 'Other: my other string answer',
array_question_with_extra: ['Other: my other array answer'],
}
assert_equal expected_full_answers, application.full_answers
end
test 'date_accepted formats the accepted date as iso8601' do
application = ApplicationBase.new
assert_nil application.date_accepted
# March 9, 2018 10:15am
application.accepted_at = DateTime.new(2018, 3, 9, 10, 15)
assert_equal '2018-03-09', application.date_accepted
end
test 'memoized full_answers' do
application = ApplicationBase.new
application.stubs(additional_text_fields:
[
[:string_question_with_extra]
]
)
form_data = {
regular_string_question: 'regular string answer',
string_question_with_extra: 'Other:',
string_question_with_extra_other: 'my other string answer',
}
application.stubs(sanitize_form_data_hash: form_data)
ApplicationBase.stubs(filtered_labels: form_data)
expected_full_answers = {
regular_string_question: 'regular string answer',
string_question_with_extra: 'Other: my other string answer',
}
assert_nil application.instance_variable_get(:@full_answers)
assert_equal expected_full_answers, application.full_answers
assert_equal expected_full_answers, application.instance_variable_get(:@full_answers)
application.form_data = nil
assert_nil application.instance_variable_get(:@full_answers)
end
test 'queue_email creates an associated unsent Email record' do
application = create TEACHER_APPLICATION_FACTORY
application.expects(:deliver_email).never
assert_creates Email do
application.queue_email :test_email
end
email = Email.last
assert_equal application, email.application
assert_equal 'test_email', email.email_type
assert_equal application.status, email.application_status
assert_nil email.sent_at
end
test 'queue_email with deliver_now sends email and creates an associated sent Email record' do
application = create TEACHER_APPLICATION_FACTORY
application.expects(:deliver_email)
assert_creates Email do
application.queue_email :test_email, deliver_now: true
end
email = Email.last
assert_equal application, email.application
assert_equal 'test_email', email.email_type
assert_equal application.status, email.application_status
assert_not_nil email.sent_at
end
test 'record status change with user' do
application = create TEACHER_APPLICATION_FACTORY
workshop_admin = create :workshop_admin
application.update(status: 'pending')
application.update_status_timestamp_change_log(workshop_admin)
expected_entry = {
title: 'pending',
changing_user_id: workshop_admin.id,
changing_user_name: workshop_admin.name,
time: Time.now
}
assert_equal(
[expected_entry],
(application.sanitize_status_timestamp_change_log)
)
application.update(status: 'approved')
application.update_status_timestamp_change_log(workshop_admin)
assert_equal(
[
expected_entry,
expected_entry.dup.update({title: 'approved'})
], application.sanitize_status_timestamp_change_log
)
end
test 'record status change without user' do
application = create TEACHER_APPLICATION_FACTORY
application.update(status: 'accepted')
application.update_status_timestamp_change_log(nil)
assert_equal(
[{
title: 'accepted',
changing_user_id: nil,
changing_user_name: nil,
time: Time.now
}],
application.sanitize_status_timestamp_change_log
)
end
test 'formatted_partner_contact_email' do
application = create :pd_facilitator1920_application
partner = create :regional_partner
# no partner
assert_nil application.formatted_partner_contact_email
# partner w no contact info
application.regional_partner = partner
assert_nil application.formatted_partner_contact_email
# name only? still nil
partner.contact_name = '<NAME>'
assert_nil application.formatted_partner_contact_email
# email only? still nil
partner.contact_name = nil
assert_nil application.formatted_partner_contact_email
# program manager but no contact_name or contact_email
program_manager = (create :regional_partner_program_manager, regional_partner: partner).program_manager
assert_equal "\"#{program_manager.name}\" <#{program_manager.email}>", application.formatted_partner_contact_email
# name and email
partner.contact_name = '<NAME>'
partner.contact_email = '<EMAIL>'
assert_equal "\"We Teach Code\" <<EMAIL>>", application.formatted_partner_contact_email
end
test 'formatted_applicant_email uses user account email' do
application = create :pd_teacher_application
assert application.user.email.present?
formatted_email = "\"#{application.applicant_full_name}\" <#{application.user.email}>"
assert_equal formatted_email, application.formatted_applicant_email
end
test 'formatted_applicant_email uses alternate email if no user account email' do
teacher_without_email = create :teacher, :with_school_info, :demigrated
teacher_without_email.update_attribute(:email, '')
teacher_without_email.update_attribute(:hashed_email, '')
application = create :pd_teacher_application, user: teacher_without_email
assert teacher_without_email.email.blank?
formatted_alternate_email = "\"#{application.applicant_full_name}\" <#{application.sanitize_form_data_hash[:alternate_email]}>"
assert_equal formatted_alternate_email, application.formatted_applicant_email
end
test 'formatted_applicant_email raises error if no user email or alternate email' do
teacher_without_email = create :teacher, :with_school_info, :demigrated
teacher_without_email.update_attribute(:email, '')
teacher_without_email.update_attribute(:hashed_email, '')
application_hash_without_email = build :pd_teacher_application_hash, alternate_email: ''
application_without_email = create :pd_teacher_application, user: teacher_without_email, form_data: application_hash_without_email.to_json
assert teacher_without_email.email.blank?
assert application_without_email.sanitize_form_data_hash[:alternate_email].blank?
assert_raises_matching("invalid email address for application #{application_without_email.id}") do
application_without_email.formatted_applicant_email
end
end
test 'deleting an application also deletes its unsent emails' do
# Create two applications, each with sent and unsent email
application_a = create TEACHER_APPLICATION_FACTORY
application_b = create TEACHER_APPLICATION_FACTORY
[application_a, application_b].each do |application|
application.stubs(:deliver_email)
application.queue_email :test_email
application.queue_email :test_email, deliver_now: true
assert_equal 2, application.emails.count
assert_equal 1, application.emails.unsent.count
end
# Destroy one of the applications
application_a.destroy
# Unsent email for that application was destroyed
assert_equal 0, application_a.emails.unsent.count
# Sent email for that application was not destroyed
assert_equal 1, application_a.emails.count
# Email for the other application was not destroyed
assert_equal 2, application_b.emails.count
ensure
application_a.emails.destroy_all
application_b.emails.destroy_all
end
end
end
|
yeji0407/democratization-expertise
|
de-community/src/main/java/com/de/enterprise/Enterprises.java
|
package com.de.enterprise;
import java.sql.Timestamp;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
@Getter
@Setter
@ToString
@Entity
@Table(name = "tb_enterprises")
public class Enterprises {
@Id
@Column
@GeneratedValue(strategy= GenerationType.IDENTITY)
/* 기업 고유 번호 */
private Integer enterpriseno;
/* 기업 사업자 번호 */
private Integer enterprisebizno;
/* 기업 아이디 */
private String enterpriseid;
/* 기업 이름 */
private String enterprisename;
/* 기업 비밀번호 */
private String enterprisepassword;
/* 기업 프로필 이미지 */
private String enterpriseimg;
/* 기업 소개 */
private String enterpriseabout;
/* 기업 사이트 주소 */
private String enterpriseurl;
/* 기업 이메일 */
private String enterpriseemail;
/* 기업 승인 여부 */
private Integer enterpriseat;
/* 기업 지역 */
private String enterprisearea;
/* 가입 일시 */
@CreationTimestamp
private Timestamp registerdate;
/* 수정 일시 */
@UpdateTimestamp
private Timestamp updatedate;
/* users no (seq)*/
private Integer userno;
/* 유저 승인여부 : 없음, 대기0, 승인1, 거부:2 */
private Integer userat;
/* 유저 활성여부 ㅣ 활성: 0, 비활성:1*/
private Integer activeat;
/* 회사 대표 계정 여부 : 없음, 일반:0, 대표:1 */
private Integer representat;
}
|
SysadminWorld/MExInt
|
mexint@karel.gudera/server/send_unsent_messages.js
|
var ews = require('ews-javascript-api');
ews.EwsLogging.DebugLogEnabled = false;
var args = process.argv.slice(2);
var messages = [];
var IDs = [];
var data = "";
process.stdin.setEncoding('utf8');
process.stdin.on('readable', () => {
var chunk = process.stdin.read();
if (chunk !== null)
data += chunk;
});
process.stdin.on('end', () => {
var [authData_base64, IDs_base64, messages_base64] = data.split('\n');
var [ URL,
username,
password,
authType,
TLS ] = Buffer.from(authData_base64, "base64").toString("utf-8").split('\n');
var IDsArray = (IDs_base64 == "") ? [] : Buffer.from(IDs_base64, "base64").toString("utf-8").split('\n');
var messagesArray = (messages_base64 == "") ? [] : Buffer.from(messages_base64, "base64").toString("utf-8").split('\n');
if ( TLS == "false" )
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
for ( var i = 0; i < IDsArray.length; i++ )
IDs.push(new ews.ItemId(IDsArray[i]));
try
{
var exch = new ews.ExchangeService(ews.ExchangeVersion.Exchange2007_SP1);
exch.Url = new ews.Uri(URL);
if ( authType == "Basic" )
{
exch.Credentials = new ews.ExchangeCredentials(username, password);
}
else if ( authType == "NTLM" )
{
exch.Credentials = new ews.ExchangeCredentials("xxx", "xxx");
var ntlmXHR = require("./ntlmXHRApi");
var ntlmXHRApi = new ntlmXHR.ntlmXHRApi(username, password);
exch.XHRApi = ntlmXHRApi;
}
for ( var i = 0; i < messagesArray.length; i += 2 )
{
var message = new ews.EmailMessage(exch);
var mimeContent = new ews.MimeContent();
var bccRecipients = (messagesArray[i] == "") ? [] : messagesArray[i].split(',');
mimeContent.Content = messagesArray[i+1];
message.MimeContent = mimeContent;
for ( var j = 0; j < bccRecipients.length; j++ )
message.BccRecipients.Add(bccRecipients[j]);
messages.push(message);
}
}
catch (ex)
{
process.stdout.write("ERROR");
process.exit(1);
}
var d = require('domain').create()
d.on('error', function (error) {
process.stdout.write("ERROR");
process.exit(1);
});
// d.run(function () {
// var Promise = require('bluebird');
// Promise.reduce(messages, function (totalNotUsed, resp) {
// // first send all local messages
// return resp.SendAndSaveCopy().then(function () {
// return null;
// }, function (error) {
// process.stdout.write("ERROR");
// process.exit(1);
// });
// }, null).then(function (totalNotUsed) {
// // done sending local messages
// exch.BindToItems(IDs, new ews.PropertySet(ews.BasePropertySet.IdOnly)).then(function (response) {
// Promise.reduce(response.Responses, function (totalNotUsed, resp) {
// // second send messages stored on server
// return resp.Item.Load(new ews.PropertySet(ews.BasePropertySet.IdOnly,
// [ews.EmailMessageSchema.MimeContent])).then(function () {
// resp.Item.SendAndSaveCopy().then(function () {
// }, function (error) {
// process.stdout.write("ERROR");
// process.exit(1);
// });
// return null;
// });
// }, null).then(function (totalNotUsed) {
// // done sending server messages
// });
// }, function (error) {
// process.stdout.write("ERROR");
// process.exit(1);
// });
// });
// });
d.run(function () {
for ( var i = 0; i < messages.length; i++ )
{
messages[i].SendAndSaveCopy().then(function () {
}, function (error) {
process.stdout.write("ERROR");
process.exit(1);
});
}
for ( var i = 0; i < IDs.length; i++ )
{
ews.EmailMessage.Bind(exch, IDs[i], new ews.PropertySet(ews.BasePropertySet.IdOnly,
[ews.EmailMessageSchema.MimeContent])).then(function (response) {
response.SendAndSaveCopy().then(function () {
}, function (error) {
process.stdout.write("ERROR");
process.exit(1);
});
}, function (error) {
process.stdout.write("ERROR");
process.exit(1);
});
}
});
});
|
bcloutier/PSNM
|
PythonPrograms/Programs/PythonCode/Heat_Eq_1D_Spectral_FE.py
|
#!/usr/bin/env python
"""
Solving Heat Equation using pseudo-spectral and Forward Euler
u_t= \alpha*u_xx
BC= u(0)=0, u(2*pi)=0
IC=sin(x)
"""
import math
import numpy
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator
# Grid
N = 64 # Number of steps
h = 2*math.pi/N # step size
x = h*numpy.arange(0,N) # discretize x-direction
alpha = 0.5 # Thermal Diffusivity constant
t = 0
dt = .001
# Initial conditions
v = numpy.sin(x)
I = complex(0,1)
k = numpy.array([I*y for y in range(0,N/2) + [0] + range(-N/2+1,0)])
k2=k**2;
# Setting up Plot
tmax = 5; tplot = .1;
plotgap = int(round(tplot/dt))
nplots = int(round(tmax/tplot))
data = numpy.zeros((nplots+1,N))
data[0,:] = v
tdata = [t]
for i in xrange(nplots):
v_hat = numpy.fft.fft(v)
for n in xrange(plotgap):
v_hat = v_hat+dt*alpha*k2*v_hat # FE timestepping
v = numpy.real(numpy.fft.ifft(v_hat)) # back to real space
data[i+1,:] = v
# real time vector
t = t+plotgap*dt
tdata.append(t)
# Plot using mesh
xx,tt = (numpy.mat(A) for A in (numpy.meshgrid(x,tdata)))
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(xx, tt, data,rstride=1, cstride=1, cmap=cm.jet,
linewidth=0, antialiased=False)
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.xlabel('x')
plt.ylabel('t')
plt.show()
|
zhaofeng092/python_auto_office
|
B站/Python自动化办公 · 一课通(适合小白)/Chapter1/S1-1-2/LessonCode/1.2templete.py
|
<filename>B站/Python自动化办公 · 一课通(适合小白)/Chapter1/S1-1-2/LessonCode/1.2templete.py<gh_stars>10-100
from xlutils.copy import copy
import xlrd
import xlwt
tem_excel = xlrd.open_workbook('D:/日统计.xls', formatting_info=True)
tem_sheet = tem_excel.sheet_by_index(0)
new_excel = copy(tem_excel)
new_sheet = new_excel.get_sheet(0)
style = xlwt.XFStyle()
font = xlwt.Font()
font.name = '微软雅黑'
font.bold = True
font.height = 360
style.font = font
borders = xlwt.Borders()
borders.top = xlwt.Borders.THIN
borders.bottom = xlwt.Borders.THIN
borders.left = xlwt.Borders.THIN
borders.right = xlwt.Borders.THIN
style.borders = borders
alignment = xlwt.Alignment()
alignment.horz = xlwt.Alignment.HORZ_CENTER
alignment.vert = xlwt.Alignment.VERT_CENTER
style.alignment = alignment
"""
new_sheet.write(2, 1, 12)
new_sheet.write(3, 1, 18)
new_sheet.write(4, 1, 19)
new_sheet.write(5, 1, 15)
"""
new_sheet.write(2, 1, 12, style)
new_sheet.write(3, 1, 18, style)
new_sheet.write(4, 1, 19, style)
new_sheet.write(5, 1, 15, style)
new_excel.save('D:/填写.xls')
|
chaabni/unomi
|
api/src/main/java/org/oasis_open/contextserver/api/PropertyMergeStrategyType.java
|
<reponame>chaabni/unomi<gh_stars>1-10
package org.oasis_open.contextserver.api;
/*
* #%L
* context-server-api
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2014 - 2015 Jahia Solutions
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import javax.xml.bind.annotation.XmlTransient;
public class PropertyMergeStrategyType implements PluginType {
private String id;
private String filter;
private long pluginId;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getFilter() {
return filter;
}
public void setFilter(String filter) {
this.filter = filter;
}
@XmlTransient
public long getPluginId() {
return pluginId;
}
public void setPluginId(long pluginId) {
this.pluginId = pluginId;
}
}
|
mkubliniak/XChange
|
xchange-gateio/src/main/java/org/knowm/xchange/gateio/dto/trade/GateioPlaceOrderReturn.java
|
package org.knowm.xchange.gateio.dto.trade;
import org.knowm.xchange.gateio.dto.GateioBaseResponse;
import com.fasterxml.jackson.annotation.JsonProperty;
public class GateioPlaceOrderReturn extends GateioBaseResponse {
private final String orderNumber;
/**
* Constructor
*/
private GateioPlaceOrderReturn(@JsonProperty("result") boolean result, @JsonProperty("orderNumber") String orderNumber,
@JsonProperty("msg") String message) {
super(result, message);
this.orderNumber = orderNumber;
}
public String getOrderId() {
return orderNumber;
}
@Override
public String toString() {
return "GateioPlaceOrderReturn [orderNumber=" + orderNumber + "]";
}
}
|
gianiaco/NAPPA
|
android_prefetching_lib/src/main/java/nl/vu/cs/s2group/prefetch/PrefetchStrategyImpl4.java
|
package nl.vu.cs.s2group.prefetch;
import androidx.annotation.NonNull;
import android.util.Log;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import nl.vu.cs.s2group.PrefetchingLib;
import nl.vu.cs.s2group.graph.ActivityNode;
import nl.vu.cs.s2group.prefetchurl.ParameteredUrl;
import nl.vu.cs.s2group.room.dao.SessionDao;
public class PrefetchStrategyImpl4 implements PrefetchStrategy {
private float threshold;
private HashMap<Long, String> reversedHashMap = new HashMap<>();
private static ScheduledThreadPoolExecutor poolExecutor = new ScheduledThreadPoolExecutor(1);
public PrefetchStrategyImpl4(float threshold) {
this.threshold = threshold;
}
public static int lastN = 2;
@NonNull
@Override
public List<String> getTopNUrlToPrefetchForNode(ActivityNode node, Integer maxNumber) {
Map<String, Long> activityMap = PrefetchingLib.activityMap;
for (String key : activityMap.keySet()){
reversedHashMap.put(activityMap.get(key), key);
}
List<ActivityNode> probableNodes = getMostProbableNodes(node);
for (ActivityNode node1 : probableNodes) {
for (int i=probableNodes.lastIndexOf(node1)+1;i<probableNodes.size();i++) {
ActivityNode node2 = probableNodes.get(i);
if(node1.prob<node2.prob){
ActivityNode temp=node1;
probableNodes.set(probableNodes.lastIndexOf(node1),node2);
probableNodes.set(probableNodes.lastIndexOf(node2),temp);
node1=node2;
}
}
}
List<String> listUrlToPrefetch = new LinkedList<>();
maxNumber = (int) (threshold*probableNodes.size() +1);
for (int i=0; i<maxNumber; i++) {
listUrlToPrefetch.addAll(computeCandidateUrl2(probableNodes.get(i), node));
Log.e("PREFSTRAT9","SELECTED --> " + probableNodes.get(i).activityName + " index: " + probableNodes.get(i).prob);
}
//return computeCandidateUrl(node);
return listUrlToPrefetch;
}
/**
* Will calculate the total probabilities of access for each individual successor for a {@code node}.
* Will then recursively calculate the probability for the successor of each successor and will sum it if a successor is already been accessed from another path:
*
* node->successorA->...->successorN
* if(successorN is known) -> sum probabilities (indipendent events)
*
* @param node Current activity to be considered for prefetching
* total Total number of hits in the sessions taken in consideration
* successorCountMap List containing all the probable nodes, corresponding to those nodes that
* have a probability exceeding the prescribed threshold.
* @return The set of probable nodes {@code List<ActivityNode>} with respect to the initial activity {@code node}
*/
private List<ActivityNode> getMostProbableNodes(ActivityNode node) {
HashMap<Long, Integer> successorCountMap = new HashMap<>();
List<ActivityNode> probableNodes = new LinkedList<>();
successorCountMap = zeroContextNodes(node,successorCountMap);
int total = 0;
for(Long candidate : successorCountMap.keySet()) {
total+=successorCountMap.get(candidate);
Log.d("PREFSTRAT4","actName :"+reversedHashMap.get(candidate)+" hit: "+successorCountMap.get(candidate));
}
//////////////////////////// Will calculate the probability to access a node by partial match based on a 0-order markov-model
//////////////////////////// https://pdfs.semanticscholar.org/f9dc/bf7b0c900335932d9a651b9c21d8a59c3679.pdf
for (Long succ : successorCountMap.keySet()) {
float prob = 0;
if(total>0) prob= (float)successorCountMap.get(succ)/total; // * succ.pageRank or others
ActivityNode node1 = PrefetchingLib.getActivityGraph().getByName(reversedHashMap.get(succ));
node1.prob=prob;
probableNodes.add(node1);
Log.e("PREFSTRAT4", "Computed probability: " + prob + " for " + node1.activityName);
}
return probableNodes;
}
private HashMap<Long, Integer> zeroContextNodes(ActivityNode node, HashMap<Long, Integer> successorCountMap){
List<SessionDao.SessionAggregate> sessionAggregate = node.getSessionAggregateList(lastN);
//Log.d("PREFSTRAT4 visit",node.activityName);
for (SessionDao.SessionAggregate succ : sessionAggregate) {
//Log.d("PREFSTRAT4 parent of",succ.actName);
successorCountMap = zeroContextNodes(PrefetchingLib.getActivityGraph().getByName(reversedHashMap.get(succ.idActDest)),successorCountMap);
if(successorCountMap.containsKey(succ.idActDest)){
//Log.d("PREFSTRAT4 update count",succ.countSource2Dest+" "+successorCountMap.get(succ.idActDest)+"");
successorCountMap.put(succ.idActDest, succ.countSource2Dest.intValue()+successorCountMap.get(succ.idActDest));
}
else {successorCountMap.put(succ.idActDest, succ.countSource2Dest.intValue()); }//Log.d("PREFSTRAT4 insert count",succ.countSource2Dest+"");}
}
return successorCountMap;
}
private List<String> computeCandidateUrl2(ActivityNode toBeChecked, ActivityNode node) {
node.parameteredUrlMap.keySet();
List<String> candidates = new LinkedList<>();
Map<String, String> extrasMap = PrefetchingLib.getExtrasMap().get(PrefetchingLib.getActivityIdFromName(node.activityName));
for (ParameteredUrl parameteredUrl : toBeChecked.parameteredUrlList) {
if (extrasMap.keySet().containsAll(parameteredUrl.getParamKeys())) {
candidates.add(
parameteredUrl.fillParams(extrasMap)
);
}
}
//}
for (String candidate: candidates) {
Log.e("PREFSTRAT4", candidate + " for: " + toBeChecked.activityName);
}
return candidates;
}
}
|
noms-digital-studio/ndelius2
|
test/assets/javascripts/offendersummary/reducers/offenderConvictionsTest.js
|
import {
INCREMENT_MAX_CONVICTIONS_VISIBLE,
OFFENDER_CONVICTIONS_LOAD_ERROR,
RECEIVE_OFFENDER_CONVICTIONS
} from '../constants/ActionTypes'
import offenderConvictions from './offenderConvictions'
import { expect } from 'chai'
describe('offenderConvictionsReducer', () => {
let state
describe('when in default state', () => {
beforeEach(() => {
state = offenderConvictions(undefined, { type: '"@@redux/INIT"' })
})
it('fetching is true', () => {
expect(state.fetching).to.equal(true)
})
it('load error not set', () => {
expect(state.loadError).to.equal(false)
})
it('maxConvictionsVisible defaults to 3', () => {
expect(state.maxConvictionsVisible).to.equal(3)
})
})
describe('when RECEIVE_OFFENDER_CONVICTIONS action received', () => {
beforeEach(() => {
state = offenderConvictions({ fetching: true, loadError: true }, {
type: RECEIVE_OFFENDER_CONVICTIONS,
convictions: [{ type: 'bad' }]
})
})
it('details set', () => {
expect(state.convictions).to.have.length(1)
})
it('fetching toggled off', () => {
expect(state.fetching).to.equal(false)
})
it('load error is cleared', () => {
expect(state.loadError).to.equal(false)
})
})
describe('when OFFENDER_CONVICTIONS_LOAD_ERROR action received', () => {
beforeEach(() => {
state = offenderConvictions({ fetching: true }, {
type: OFFENDER_CONVICTIONS_LOAD_ERROR,
error: new Error('Boom!')
})
})
it('load error set', () => {
expect(state.loadError).to.equal(true)
})
it('fetching toggled off', () => {
expect(state.fetching).to.equal(false)
})
})
describe('when INCREMENT_MAX_CONVICTIONS_VISIBLE action received', () => {
beforeEach(() => {
state = offenderConvictions({ maxConvictionsVisible: 3 }, {
type: INCREMENT_MAX_CONVICTIONS_VISIBLE,
incrementBy: 10
})
})
it('maxConvictionsVisible incremented by count supplied', () => {
expect(state.maxConvictionsVisible).to.equal(13)
})
})
})
|
Lab41/faunus
|
src/test/java/com/thinkaurelius/faunus/formats/edgelist/rdf/RDFBlueprintsHandlerTest.java
|
package com.thinkaurelius.faunus.formats.edgelist.rdf;
import com.thinkaurelius.faunus.FaunusEdge;
import com.thinkaurelius.faunus.FaunusElement;
import com.thinkaurelius.faunus.FaunusVertex;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
/**
* @author <NAME> (http://markorodriguez.com)
*/
public class RDFBlueprintsHandlerTest extends TestCase {
public void testMD5HashUniqueness() throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("MD5");
final Set<Long> ids = new HashSet<Long>();
int loops = 1000000;
for (int i = 0; i < loops; i++) {
ids.add(ByteBuffer.wrap(md.digest(("http://test#" + UUID.randomUUID().toString()).getBytes())).getLong());
}
assertEquals(ids.size(), loops);
}
public void testCrc64HashUniqueness() throws NoSuchAlgorithmException {
final Set<Long> ids = new HashSet<Long>();
int loops = 1000000;
for (int i = 0; i < loops; i++) {
ids.add(Crc64.digest(("http://test#" + UUID.randomUUID().toString()).getBytes()));
}
assertEquals(ids.size(), loops);
}
public void testUseFragments() throws Exception {
Configuration config = new Configuration();
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_USE_LOCALNAME, true);
config.setStrings(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_FORMAT, "n-triples");
RDFBlueprintsHandler handler = new RDFBlueprintsHandler(config);
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#created> <http://tinkerpop.com#ripple> .");
handler.next();
handler.next();
assertEquals(((FaunusEdge) handler.next()).getLabel(), "created");
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com/created> <http://tinkerpop.com#ripple> .");
handler.next();
handler.next();
assertEquals(((FaunusEdge) handler.next()).getLabel(), "created");
handler.parse("<http://dbpedia.org/resource/Abraham_Lincoln> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://dbpedia.org/ontology/Person> .");
assertEquals(handler.next().getProperty("name"), "Abraham_Lincoln");
assertEquals(handler.next().getProperty("name"), "Person");
assertEquals(((FaunusEdge) handler.next()).getLabel(), "type");
assertFalse(handler.hasNext());
}
public void testAsProperties() throws Exception {
Configuration config = new Configuration();
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_USE_LOCALNAME, true);
config.setStrings(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_AS_PROPERTIES, "http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
config.setStrings(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_FORMAT, "n-triples");
RDFBlueprintsHandler handler = new RDFBlueprintsHandler(config);
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#created> <http://tinkerpop.com#ripple> .");
assertTrue(handler.hasNext());
assertTrue(handler.hasNext());
handler.next();
assertTrue(handler.hasNext());
handler.next();
assertTrue(handler.hasNext());
assertEquals(((FaunusEdge) handler.next()).getLabel(), "created");
assertFalse(handler.hasNext());
handler.parse("<http://dbpedia.org/resource/Abraham_Lincoln> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://dbpedia.org/ontology/Person> .");
FaunusVertex subject = (FaunusVertex) handler.next();
assertEquals(subject.getProperty("name"), "Abraham_Lincoln");
assertEquals(subject.getProperty("type"), "Person");
assertFalse(handler.hasNext());
}
public void testLiteralProperties() throws Exception {
Configuration config = new Configuration();
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_USE_LOCALNAME, true);
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_LITERAL_AS_PROPERTY, true);
config.set(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_FORMAT, "n-triples");
RDFBlueprintsHandler handler = new RDFBlueprintsHandler(config);
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#age> \"32\"^^<http://www.w3.org/2001/XMLSchema#int> .");
FaunusElement subject = handler.next();
assertEquals(subject.getProperty("name"), "josh");
assertEquals(subject.getProperty("age"), 32);
assertFalse(handler.hasNext());
handler.parse("<http://tinkerpop.com#marko> <http://tinkerpop.com#firstname> \"marko\"^^<http://www.w3.org/2001/XMLSchema#string> .");
subject = handler.next();
assertEquals(subject.getProperty("name"), "marko");
assertEquals(subject.getProperty("firstname"), "marko");
assertFalse(handler.hasNext());
handler.parse("<http://tinkerpop.com#stephen> <http://tinkerpop.com#location> \"1.023\"^^<http://www.w3.org/2001/XMLSchema#double> .");
subject = handler.next();
assertEquals(subject.getProperty("name"), "stephen");
assertEquals(subject.getProperty("location"), 1.023d);
assertFalse(handler.hasNext());
handler.parse("<http://tinkerpop.com#stephen> <http://tinkerpop.com#alive> \"true\"^^<http://www.w3.org/2001/XMLSchema#boolean> .");
subject = handler.next();
assertEquals(subject.getProperty("name"), "stephen");
assertEquals(subject.getProperty("alive"), true);
assertFalse(handler.hasNext());
handler.parse("<http://tinkerpop.com#stephen> <http://tinkerpop.com#ttl> \"1234567890005543\"^^<http://www.w3.org/2001/XMLSchema#long> .");
subject = handler.next();
assertEquals(subject.getProperty("name"), "stephen");
assertEquals(subject.getProperty("ttl"), 1234567890005543l);
assertFalse(handler.hasNext());
handler.parse("<http://tinkerpop.com#stephen> <http://tinkerpop.com#height> \"0.45\"^^<http://www.w3.org/2001/XMLSchema#float> .");
subject = handler.next();
assertEquals(subject.getProperty("name"), "stephen");
assertEquals(subject.getProperty("height"), 0.45f);
assertFalse(handler.hasNext());
}
public void testMultiLineParse() throws Exception {
Configuration config = new Configuration();
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_USE_LOCALNAME, true);
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_LITERAL_AS_PROPERTY, true);
config.set(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_FORMAT, "n-triples");
RDFBlueprintsHandler handler = new RDFBlueprintsHandler(config);
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#age> \"32\"^^<http://www.w3.org/2001/XMLSchema#int> .");
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#knows> <http://tinkerpop.com#marko> .");
FaunusVertex josh = (FaunusVertex) handler.next();
assertEquals(josh.getProperty("age"), 32);
assertEquals(josh.getProperty("name"), "josh");
assertEquals(josh.getPropertyKeys().size(), 3);
josh = (FaunusVertex) handler.next();
assertEquals(josh.getProperty("name"), "josh");
assertEquals(josh.getPropertyKeys().size(), 2);
FaunusVertex marko = (FaunusVertex) handler.next();
assertEquals(marko.getProperty("name"), "marko");
assertEquals(marko.getPropertyKeys().size(), 2);
FaunusEdge knows = (FaunusEdge) handler.next();
assertEquals(knows.getLabel(), "knows");
assertEquals(knows.getPropertyKeys().size(), 1);
assertFalse(handler.hasNext());
}
/*
TODO: Make multiline work with buffering
public void testMultiLineTriple() throws Exception {
Configuration config = new Configuration();
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_USE_LOCALNAME, true);
config.setBoolean(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_LITERAL_AS_PROPERTY, true);
config.set(RDFInputFormat.FAUNUS_GRAPH_INPUT_RDF_FORMAT, "n-triples");
RDFBlueprintsHandler handler = new RDFBlueprintsHandler(config);
handler.parse("<http://tinkerpop.com#josh> <http://tinkerpop.com#age> ");
handler.parse("\"32\"^^<http://www.w3.org/2001/XMLSchema#int> .");
FaunusVertex josh = (FaunusVertex) handler.next();
assertEquals(josh.getProperty("age"), 32);
assertEquals(josh.getProperty("name"), "josh");
assertEquals(josh.getPropertyKeys().size(), 3);
assertFalse(handler.hasNext());
}*/
}
|
kit-algo/ch_potentials
|
code/routingkit2/src/str.cpp
|
#include "str.h"
#include <string.h>
namespace RoutingKit2{
bool str_eq(const char*l, const char*r)noexcept{
return !strcmp(l, r);
}
bool str_wild_char_eq(const char*l, const char*r)noexcept{
while(*l != '\0' && *r != '\0'){
if(*l != '?' && *r != '?' && *l != *r)
return false;
++l;
++r;
}
return *l == '\0' && *r == '\0';
}
bool starts_with(const char*prefix, const char*str)noexcept{
while(*prefix != '\0' && *str == *prefix){
++prefix;
++str;
}
return *prefix == '\0';
}
void copy_str_and_make_lower_case(const char*in, char*out, unsigned out_size)noexcept{
char*out_end = out + out_size-1;
while(*in && out != out_end){
if('A' <= *in && *in <= 'Z')
*out = *in - 'A' + 'a';
else
*out = *in;
++in;
++out;
}
*out = '\0';
}
}
|
CelestialAmber/tobutobugirl-dx
|
data/palettes/minigame_score.h
|
#ifndef MINIGAME_SCORE_PALETTE_H
#define MINIGAME_SCORE_PALETTE_H
#define minigame_score_palette_data_length 2U
const unsigned int minigame_score_palette_data[] = {
23911, 14498, 32767, 0,
32767, 22197, 10570, 0
};
#endif
|
cycloidio/cycloid-cli
|
printer/table/printer.go
|
<gh_stars>10-100
package table
import (
"fmt"
"io"
"reflect"
"strconv"
"strings"
"time"
"github.com/olekukonko/tablewriter"
"github.com/cycloidio/cycloid-cli/client/models"
"github.com/cycloidio/cycloid-cli/printer"
)
type Table struct{}
// entryFromStruct is a helper to get struct field name
// which represents the column titles
func entryFromStruct(obj reflect.Value, h []string) []string {
// we can't use obj.NumField() because it will
// create a too big slice since it will include
// the unexported field
values := make([]string, 0)
for _, header := range h {
value := obj.FieldByName(header)
switch value.Kind() {
case reflect.String:
values = append(values, value.String())
case reflect.Uint32:
values = append(values, strconv.FormatInt(int64(value.Uint()), 10))
case reflect.Slice:
// Render Slice of strings
typ := value.Type().Elem()
if typ.Kind() == reflect.String {
stringSlice := make([]string, value.Len())
for i := 0; i < value.Len(); i++ {
stringSlice[i] = value.Index(i).String()
}
values = append(values, strings.Join(stringSlice[:], "\n"))
} else {
values = append(values, strconv.Itoa(value.Len()))
}
case reflect.Ptr:
elt := value.Elem()
switch elt.Kind() {
case reflect.String:
values = append(values, elt.String())
case reflect.Uint32:
values = append(values, strconv.FormatInt(int64(elt.Uint()), 10))
case reflect.Int64:
t := time.Unix(elt.Int(), 0)
values = append(values, t.Format(time.RFC3339))
default:
// in the case we don't support the type, we print it
// for further integration
values = append(values, elt.Kind().String())
}
default:
// in the case we don't support the type, we print it
// for further integration
values = append(values, value.Kind().String())
}
}
return values
}
// headersFromStruct is a helper to get struct field name
// which represents the column titles
func headersFromStruct(obj reflect.Value, opts printer.Options) []string {
// we don't set a size to the slice since `NumField`
// return the number of struct fields including the
// unexported fields
headers := make([]string, 0)
for i := 0; i < obj.NumField(); i++ {
f := obj.Type().Field(i)
// remove unexported fields
if len(f.PkgPath) != 0 {
continue
}
// the following lines help to avoid
// adding pointer to a struct into the table
v := obj.FieldByName(f.Name)
if v.Kind() == reflect.Ptr {
elt := v.Elem()
if elt.Kind() == reflect.Struct {
continue
}
}
headers = append(headers, f.Name)
}
return headers
}
func generate(obj interface{}, opts printer.Options) ([]string, [][]string, error) {
var (
headers []string
entries [][]string
err error
)
// obj can be a list of pointer or a pointer to struct
// we need to handle both cases
rObj := reflect.ValueOf(obj)
switch rObj.Kind() {
// the object is a pointer to a struct:
// example: *models.ExternalBackend
case reflect.Ptr:
// we need to get the Value targetted by this pointer
elt := rObj.Elem()
headers = headersFromStruct(elt, opts)
entries = make([][]string, 1)
entry := entryFromStruct(elt, headers)
entries = append(entries, entry)
// the object is a slice of pointers to a struct
// example: []*models.ExternalBackend
case reflect.Slice, reflect.Array:
if rObj.Len() > 0 {
// it's supposed to be an uniform slice
// create headers from the first element is enough
elt := rObj.Index(0).Elem()
headers = headersFromStruct(elt, opts)
}
entries = make([][]string, rObj.Len())
for i := 0; i < rObj.Len(); i++ {
elt := rObj.Index(i).Elem()
entry := entryFromStruct(elt, headers)
entries = append(entries, entry)
}
// default we return an error to help for further object types
default:
err = fmt.Errorf("unable to get headers for object type: %v", rObj.Kind())
}
return headers, entries, err
}
func (t Table) Print(obj interface{}, opts printer.Options, w io.Writer) error {
// TODO: init the array using the opts
// given by the user
table := tablewriter.NewWriter(w)
table.SetAutoWrapText(false)
table.SetAutoFormatHeaders(true)
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
table.SetAlignment(tablewriter.ALIGN_LEFT)
table.SetCenterSeparator("")
table.SetColumnSeparator("")
table.SetRowSeparator("")
table.SetHeaderLine(false)
table.SetBorder(false)
table.SetTablePadding("\t") // pad with tabs
table.SetNoWhiteSpace(true)
// Check if the obj is an API error with payload containing errors details
apiErr, ok := obj.(interface {
GetPayload() *models.ErrorPayload
})
if ok {
errP := apiErr.GetPayload()
if reflect.TypeOf(errP) == reflect.TypeOf(&models.ErrorPayload{}) {
obj = errP.Errors
}
}
// Print our obj
headers, entries, err := generate(obj, opts)
if err != nil {
return err
}
table.SetHeader(headers)
table.AppendBulk(entries)
table.Render()
return nil
}
|
Ophien/HyperGraph
|
include/MyGraphEngine/ADMatrixOperations.h
|
#ifndef ADMATRIXOPERATIONS_H
#define ADMATRIXOPERATIONS_H
#include "graphOperations.h"
#include "AdjacencyMatrix.h"
#include "ADMatrixComponent.h"
#include <vector>
class ADMatrixOperations : public graphOperations
{
public:
ADMatrixOperations(void);
~ADMatrixOperations(void);
public:
Graph* creatEmptyGraph(Graph* graph, int size);
void insertVertex(Graph* graph);
void insertEdge(Graph* graph, int outID, int inID);
int* getVertexM(Graph* graph, int ID);
bool findEdge(Graph* graph, int outID, int inID);
std::list<int>* getAdjacentVertexM(Graph* graph, int ID);
void printAdjacentVertexList(Graph* graph, std::list<Vertex*> *list, int ID);
void removeEdge(Graph* graph, int outID, int inID);
void printGraph(Graph* graph);
int getVertexCount(Graph* graph);
Graph* getTransposedGraph(Graph* graph);
int getLessWeightEdgeM(Graph* graph);
int getVertexDegree(Graph* graph, int ID);
bool checkIfIsDirected(Graph* graph);
Graph* getUnion(Graph* graph, Graph* value);
Graph* getAdd(Graph* graph, Graph* value);
bool completeCheck(Graph* graph);
bool checkIfIsIsomorf(Graph* graph, Graph value);
Graph* getComplementary(Graph* graph);
/*
*father implementation empty's
*/
Graph* creatEmptyGraph(){return NULL;}
void insertVertex(Graph* graph, std::string name){}
void insertEdge(Graph* graph, std::string name, int length, int outID, int inID){}
AdjacencyListComponent* getVertex(Graph* graph, int ID){return NULL;}
std::list<Vertex*> *getAdjacentVertex(Graph* graph, int ID){return NULL;}
Graph* getTransposedGraph(Graph* graph, bool debugPrint){return NULL;}
GraphEdge* getLessWeightEdge(Graph* graph){return NULL;}
bool checkIsomorf(Graph* graph, Graph* graphB, std::vector<int> sequence){return NULL;}
ADMatrixComponent** initMatrix(int vertexCount)
{
ADMatrixComponent** Matrix = new ADMatrixComponent*[vertexCount];
for(int i = 0; i < vertexCount; i++)
Matrix[i] = new ADMatrixComponent[vertexCount];
for(int i = 0; i < vertexCount; i++)
for(int j = 0; j < vertexCount; j++)
{
Matrix[i][j].value = 0;
Matrix[i][j].lenght = 0;
Matrix[i][j].color = "white";
}
return Matrix;
}
void printMatrix(Graph* graph)
{
AdjacencyMatrix* a = dynamic_cast<AdjacencyMatrix*>(graph);
int linhas = a->getVertexCount();
int colunas = linhas;
std::cout << std::endl;
for (int i = 0; i < linhas; i++)
{
for (int j = 0; j < colunas; j++)
std::cout << a->ADMatrix[i][j].value << " ";
std::cout<<"\n";
}
std::cout << std::endl;
}
void unalocMatrix(ADMatrixComponent** Matrix, int linhas, int colunas)
{
for (int i = 0; i < linhas; i++)
delete []Matrix[i];
delete []Matrix;
}
};
#endif
|
IPA380/OpenIGTLink4J
|
OpenIGTLink4J_v2/src/examples/OIGTL_SensorStreamServer.java
|
import java.io.IOException;
import msg.OIGTL_RTSMessage;
import msg.OIGTL_STTMessage;
import msg.sensor.RTSSensorMessage;
import msg.sensor.STPSensorMessage;
import msg.sensor.STTSensorMessage;
import msg.sensor.SensorMessage;
import network.IOpenIGTMessageSender;
import network.stream.OpenIGTLinkStreamingServer;
import network.stream.StreamRunner;
import protocol.MessageParser;
import util.RTSMessageStatus;
/**
* Example class to illustrate the use of {@link OpenIGTLinkStreamingServer}
* as a base class for a server that implements the OpenIGTLink streaming
* mechanism for {@link SensorMessage}
*
* @author <NAME>
*
*/
public class OIGTL_SensorStreamServer extends OpenIGTLinkStreamingServer {
/**
* Destination constructor
*
* @param ipAdress
* IP-address, the client will try to connect to
* @param port
* port, the client will try to connect to
*/
public OIGTL_SensorStreamServer(String ipAdress, int port) {
super(port, new MessageParser(false));
}
@Override
public OIGTL_RTSMessage sttMessageReceived(OIGTL_STTMessage message, IOpenIGTMessageSender replyTo) {
/* Log the message */
log.debug("Message received: " + message.toString());
/* filter by message type */
if ( message instanceof STTSensorMessage) {
/* react to STT sensor message
* Create a new implementation of @StreamRunner */
StreamRunner sensorStreamRunner = new StreamRunner(message.getDeviceName(), replyTo) {
/* Override method send reply (this will be called regularly) */
@Override
protected void sendReply() {
/* send a reply message
* be sure to catch Exceptions that might occur because a
* uncaught Exception will terminate the scheduled execution*/
try {
replyTo.send(new SensorMessage(deviceName));
} catch (IOException e) {e.printStackTrace(); }
}
};
/* schedule the new instance of stream runner to be called regularly with a period of
* message.getResolution();*/
scheduleStreamRunner(message.getResolution(), sensorStreamRunner);
return new RTSSensorMessage(message.getDeviceName(), RTSMessageStatus.Success);
}
return null;
}
@Override
public String[] getCapability() {
return new String[] {
STTSensorMessage.DATA_TYPE,
STPSensorMessage.DATA_TYPE
};
}
}
|
kogarashisan/LiquidLava
|
src/Scope/DataBinding.class.js
|
/**
* Value of this DataBinding instance has changed
* @event Lava.scope.DataBinding#changed
*/
Lava.define(
'Lava.scope.DataBinding',
/**
* Binding to a property of a JavaScript object with special support for {@link Lava.mixin.Properties}
* and {@link Lava.system.Enumerable} instances
*
* @lends Lava.scope.DataBinding#
* @extends Lava.scope.Abstract
* @implements _iValueContainer
*/
{
Extends: 'Lava.scope.Abstract',
/**
* This instance supports two-way data binding
* @type {boolean}
* @const
*/
isSetValue: true,
/**
* Global unique identifier of this instance
* @type {_tGUID}
*/
guid: null,
/**
* The name of property to which this scope is bound
* @type {string}
*/
_property_name: null,
/**
* Scope, that provides data source for this instance
* @type {_iValueContainer}
*/
_value_container: null,
/**
* Listener for "changed" event in `_value_container`
* @type {_tListener}
*/
_container_changed_listener: null,
/**
* Listener for onPropertyChanged in data source of this scope (if data source is instance of {@link Lava.mixin.Properties})
* @type {_tListener}
*/
_property_changed_listener: null,
/**
* Listener for {@link Lava.system.Enumerable#event:collection_changed} in data source of this scope
* (if data source is instance of {@link Lava.system.Enumerable})
* @type {_tListener}
*/
_enumerable_changed_listener: null,
/**
* Data source for this scope, from which this scope gets it's value. Also, value of the `_value_container`
* @type {*}
*/
_property_container: null,
/**
* Is `_property_container` an existing object, or this scope is not bound to an existing value
* @type {boolean}
*/
_is_connected: false,
/**
* Create DataBinding instance
* @param {_iValueContainer} value_container The scope, which provides the data source for this instance
* @param {string} property_name
*/
init: function(value_container, property_name) {
this.guid = Lava.guid++;
this._value_container = value_container;
this._property_name = property_name;
this.level = value_container.level + 1;
this._container_changed_listener = value_container.on('changed', this.onParentDataSourceChanged, this);
this._refreshValue();
Lava.schema.DEBUG && Lava.ScopeManager.debugTrackScope(this);
},
/**
* Get `_property_container` from `_value_container`, and get `_property_name` from `_property_container`
*/
_refreshValue: function() {
var property_container = this._value_container.getValue(),
value = null,
is_connected = false;
if (property_container != null) {
// Collection implements Properties, so if _property_name is not a number - then `get` will be called
if (property_container.isCollection && /^\d+$/.test(this._property_name)) {
if (this._enumerable_changed_listener == null) {
this._enumerable_changed_listener = property_container.on('collection_changed', this.onValueChanged, this);
this._property_container = property_container;
}
value = property_container.getValueAt(+this._property_name);
} else if (property_container.isProperties) {
if (this._property_changed_listener == null) {
this._property_changed_listener = property_container.onPropertyChanged(this._property_name, this.onValueChanged, this);
this._property_container = property_container;
}
value = property_container.get(this._property_name);
} else {
value = property_container[this._property_name];
}
is_connected = true;
}
if (value !== this._value || this._is_connected != is_connected) {
this._value = value;
this._is_connected = is_connected;
this._fire('changed');
}
},
/**
* Get `_is_connected`
* @returns {boolean}
*/
isConnected: function() {
return this._is_connected;
},
/**
* Data source for this instance has changed. Remove listeners to old data source and schedule refresh
*/
onParentDataSourceChanged: function() {
if (this._property_changed_listener && (this._value_container.getValue() != this._property_container)) {
// currently listening to the parent's old data source
this._property_changed_listener && this._property_container.removePropertyListener(this._property_changed_listener);
this._enumerable_changed_listener && this._property_container.removeListener(this._enumerable_changed_listener);
this._property_changed_listener = null;
this._enumerable_changed_listener = null;
this._property_container = null;
}
this._queueForRefresh();
},
_doRefresh: function() {
this._refreshValue();
},
/**
* Data source remains the same, but it's property has changed (property we are currently bound to)
*/
onValueChanged: function() {
this._queueForRefresh();
},
/**
* If this instance is bound to existing object - set object's property value
* @param {*} value
*/
setValue: function(value) {
var property_container = this._value_container.getValue();
if (property_container) {
if (this._property_changed_listener) {
Lava.suspendListener(this._property_changed_listener);
property_container.set(this._property_name, value);
Lava.resumeListener(this._property_changed_listener);
} else if (this._enumerable_changed_listener) {
Lava.suspendListener(this._enumerable_changed_listener);
property_container.replaceAt(+this._property_name, value);
Lava.resumeListener(this._enumerable_changed_listener);
} else if (property_container.isProperties) {
property_container.set(this._property_name, value);
} else {
property_container[this._property_name] = value;
}
this._queueForRefresh();
}
},
getValue: function() {
return this._value;
},
destroy: function() {
this._value_container.removeListener(this._container_changed_listener);
this._property_changed_listener && this._property_container.removePropertyListener(this._property_changed_listener);
this._enumerable_changed_listener && this._property_container.removeListener(this._enumerable_changed_listener);
this._property_container = null;
Lava.schema.DEBUG && Lava.ScopeManager.debugStopTracking(this);
this.Abstract$destroy();
}
});
|
jnouyang/palacios
|
linux_module/iface-console.c
|
<filename>linux_module/iface-console.c
/*
* VM Console
* (c) <NAME>, 2010
*/
#include <linux/device.h>
#include <linux/cdev.h>
#include <linux/errno.h>
#include <linux/fs.h>
#include <linux/uaccess.h>
#include <linux/poll.h>
#include <linux/anon_inodes.h>
#include <linux/file.h>
#include <linux/sched.h>
#include <interfaces/vmm_console.h>
#include <palacios/vmm_host_events.h>
#include "mm.h"
#include "vm.h"
#include "palacios.h"
#include "util-queue.h"
#include "linux-exts.h"
typedef enum { CONSOLE_CURS_SET = 1,
CONSOLE_CHAR_SET = 2,
CONSOLE_SCROLL = 3,
CONSOLE_UPDATE = 4,
CONSOLE_RESOLUTION = 5} console_op_t;
struct palacios_console {
struct gen_queue * queue;
spinlock_t lock;
int open;
int connected;
wait_queue_head_t intr_queue;
unsigned int width;
unsigned int height;
struct v3_guest * guest;
};
struct cursor_msg {
int x;
int y;
} __attribute__((packed));
struct character_msg {
int x;
int y;
char c;
unsigned char style;
} __attribute__((packed));
struct scroll_msg {
int lines;
} __attribute__((packed));
struct resolution_msg {
int cols;
int rows;
} __attribute__((packed));
struct cons_msg {
unsigned char op;
union {
struct cursor_msg cursor;
struct character_msg character;
struct scroll_msg scroll;
struct resolution_msg resolution;
};
} __attribute__((packed));
/* This is overkill...*/
#define CONSOLE_QUEUE_LEN 8096
static ssize_t
console_read(struct file * filp,
char __user * buf,
size_t size,
loff_t * offset)
{
struct palacios_console * cons = filp->private_data;
struct cons_msg * msg = NULL;
unsigned long flags;
int entries = 0;
if (cons->open == 0) {
return 0;
}
if (size < sizeof(struct cons_msg)) {
ERROR("Invalid Read operation size: %lu\n", size);
return -EFAULT;
}
msg = dequeue(cons->queue);
if (msg == NULL) {
ERROR("ERROR: Null console message\n");
return -EFAULT;
}
if (copy_to_user(buf, msg, size)) {
ERROR("Read Fault\n");
return -EFAULT;
}
palacios_kfree(msg);
spin_lock_irqsave(&(cons->queue->lock), flags);
{
entries = cons->queue->num_entries;
}
spin_unlock_irqrestore(&(cons->queue->lock), flags);
if (entries > 0) {
wake_up_interruptible(&(cons->intr_queue));
}
// DEBUG("Read from console\n");
return size;
}
static ssize_t
console_write(struct file * filp,
const char __user * buf,
size_t size,
loff_t * offset)
{
struct palacios_console * cons = filp->private_data;
struct v3_keyboard_event event = {0, 0};
int i = 0;
if (cons->open == 0) {
return 0;
}
for (i = 0; i < size; i++) {
if (copy_from_user(&(event.scan_code), buf + i, 1)) {
ERROR("Console Write fault\n");
return -EFAULT;
}
v3_deliver_keyboard_event(cons->guest->v3_ctx, &event);
}
return size;
}
static unsigned int
console_poll(struct file * filp,
struct poll_table_struct * poll_tb)
{
struct palacios_console * cons = filp->private_data;
unsigned int mask = POLLIN | POLLRDNORM;
unsigned long flags;
int entries = 0;
// DEBUG("Console=%p (guest=%s)\n", cons, cons->guest->name);
poll_wait(filp, &(cons->intr_queue), poll_tb);
spin_lock_irqsave(&(cons->queue->lock), flags);
{
entries = cons->queue->num_entries;
}
spin_unlock_irqrestore(&(cons->queue->lock), flags);
if (entries > 0) {
// DEBUG("Returning from POLL\n");
return mask;
}
return 0;
}
static int
console_release(struct inode * i,
struct file * filp)
{
struct palacios_console * cons = filp->private_data;
struct cons_msg * msg = NULL;
unsigned long flags;
DEBUG("Releasing the Console File desc\n");
spin_lock_irqsave(&(cons->queue->lock), flags);
{
cons->connected = 0;
}
spin_unlock_irqrestore(&(cons->queue->lock), flags);
while ((msg = dequeue(cons->queue))) {
palacios_kfree(msg);
}
return 0;
}
static struct file_operations cons_fops = {
.read = console_read,
.write = console_write,
.poll = console_poll,
.release = console_release,
};
static int
console_connect(struct v3_guest * guest,
unsigned int cmd,
unsigned long arg,
void * priv_data)
{
struct palacios_console * cons = priv_data;
int cons_fd = 0;
int acquired = 0;
unsigned long flags;
if (cons->open == 0) {
ERROR("Attempted to connect to unopened console\n");
return -1;
}
spin_lock_irqsave(&(cons->lock), flags);
{
if (cons->connected == 0) {
cons->connected = 1;
acquired = 1;
}
}
spin_unlock_irqrestore(&(cons->lock), flags);
if (acquired == 0) {
ERROR("Console already connected\n");
return -1;
}
cons_fd = anon_inode_getfd("v3-cons", &cons_fops, cons, O_RDWR);
if (cons_fd < 0) {
ERROR("Error creating console inode\n");
return cons_fd;
}
v3_deliver_console_event(guest->v3_ctx, NULL);
v3_lnx_printk("Console connected\n");
return cons_fd;
}
static void *
palacios_tty_open(void * private_data,
unsigned int width,
unsigned int height)
{
struct v3_guest * guest = (struct v3_guest *)private_data;
struct palacios_console * cons = palacios_kmalloc(sizeof(struct palacios_console), GFP_KERNEL);
if (!cons) {
ERROR("Cannot allocate memory for console\n");
return NULL;
}
v3_lnx_printk("Guest initialized virtual console (Guest=%s)\n", guest->name);
if (guest == NULL) {
ERROR("ERROR: Cannot open a console on a NULL guest\n");
palacios_kfree(cons);
return NULL;
}
/*
if (cons->open == 1) {
ERROR("Console already open\n");
palacios_kfree(cons);
return NULL;
}
*/
/* Initialize state fields */
cons->queue = create_queue(CONSOLE_QUEUE_LEN);
cons->guest = guest;
cons->connected = 0;
cons->width = width;
cons->height = height;
cons->open = 1;
spin_lock_init(&(cons->lock));
init_waitqueue_head(&(cons->intr_queue));
add_guest_ctrl(guest, V3_VM_CONSOLE_CONNECT, console_connect, cons);
return cons;
}
static int
post_msg(struct palacios_console * cons,
struct cons_msg * msg)
{
// DEBUG("Posting Console message\n");
while (enqueue(cons->queue, msg) == -1) {
wake_up_interruptible(&(cons->intr_queue));
schedule();
}
wake_up_interruptible(&(cons->intr_queue));
return 0;
}
static int
palacios_tty_cursor_set(void * console,
int x,
int y)
{
struct palacios_console * cons = (struct palacios_console *)console;
struct cons_msg * msg = NULL;
if (cons->connected == 0) {
return 0;
}
msg = palacios_kmalloc(sizeof(struct cons_msg), GFP_KERNEL);
if (!msg) {
ERROR("Cannot allocate cursor set message in console\n");
return -1;
}
msg->op = CONSOLE_CURS_SET;
msg->cursor.x = x;
msg->cursor.y = y;
return post_msg(cons, msg);
}
static int
palacios_tty_character_set(void * console,
int x,
int y,
char c,
unsigned char style)
{
struct palacios_console * cons = (struct palacios_console *) console;
struct cons_msg * msg = NULL;
if (cons->connected == 0) {
return 0;
}
msg = palacios_kmalloc(sizeof(struct cons_msg), GFP_KERNEL);
if (!msg) {
ERROR("Cannot allocate character set message in console\n");
return -1;
}
msg->op = CONSOLE_CHAR_SET;
msg->character.x = x;
msg->character.y = y;
msg->character.c = c;
msg->character.style = style;
return post_msg(cons, msg);
}
static int
palacios_tty_scroll(void * console,
int lines)
{
struct palacios_console * cons = (struct palacios_console *) console;
struct cons_msg * msg = NULL;
if (cons->connected == 0) {
return 0;
}
msg = palacios_kmalloc(sizeof(struct cons_msg), GFP_KERNEL);
if (!msg) {
ERROR("Cannot allocate scroll message in console\n");
return -1;
}
msg->op = CONSOLE_SCROLL;
msg->scroll.lines = lines;
return post_msg(cons, msg);
}
static int
palacios_set_text_resolution(void * console,
int cols,
int rows)
{
struct palacios_console * cons = (struct palacios_console *)console;
struct cons_msg * msg = NULL;
if (cons->connected == 0) {
return 0;
}
msg = palacios_kmalloc(sizeof(struct cons_msg), GFP_KERNEL);
if (!msg) {
ERROR("Cannot allocate text resolution message in console\n");
return -1;
}
msg->op = CONSOLE_RESOLUTION;
msg->resolution.cols = cols;
msg->resolution.rows = rows;
return post_msg(cons, msg);
}
static int
palacios_tty_update(void * console)
{
struct palacios_console * cons = (struct palacios_console *) console;
struct cons_msg * msg = NULL;
if (cons->connected == 0) {
return 0;
}
msg = palacios_kmalloc(sizeof(struct cons_msg), GFP_KERNEL);
if (!msg) {
ERROR("Cannot allocate update message in console\n");
return -1;
}
msg->op = CONSOLE_UPDATE;
return post_msg(cons, msg);
}
static void
palacios_tty_close(void * console)
{
struct palacios_console * cons = (struct palacios_console *) console;
cons->open = 0;
remove_guest_ctrl(cons->guest, V3_VM_CONSOLE_CONNECT);
deinit_queue(cons->queue);
palacios_kfree(cons->queue);
palacios_kfree(cons);
}
static struct v3_console_hooks palacios_console_hooks = {
.open = palacios_tty_open,
.set_cursor = palacios_tty_cursor_set,
.set_character = palacios_tty_character_set,
.scroll = palacios_tty_scroll,
.set_text_resolution = palacios_set_text_resolution,
.update = palacios_tty_update,
.close = palacios_tty_close,
};
static int
console_init( void )
{
V3_Init_Console(&palacios_console_hooks);
return 0;
}
static struct linux_ext console_ext = {
.name = "CONSOLE",
.init = console_init,
.deinit = NULL,
.guest_init = NULL,
.guest_deinit = NULL
};
register_extension(&console_ext);
|
direktspeed/truffle-phpparser
|
trufflephp-parser/org.eclipse.php.core/src/main/java/org/eclipse/php/core/ast/nodes/ASTNode.java
|
/*******************************************************************************
* Copyright (c) 2009-2019 IBM Corporation and others.
*
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* IBM Corporation - initial API and implementation
* Zend Technologies
*******************************************************************************/
package org.eclipse.php.core.ast.nodes;
import java.util.*;
import org.eclipse.php.core.PHPVersion;
import org.eclipse.php.core.ast.match.ASTMatcher;
import org.eclipse.php.core.ast.visitor.Visitor;
/**
* Abstract superclass of all Abstract Syntax Tree (AST) node types.
* <p>
* An AST node represents a PHP source code construct, such as a name, type,
* expression, statement, or declaration.
* <p>
* ASTs do not contain cycles.
* <p>
*
* @see Visitable
* @author <NAME>., <NAME>. ,2007
* </p>
*/
public abstract class ASTNode implements Visitable {
/**
* ASTNode Types
*/
public static final int ARRAY_ACCESS = 0;
public static final int ARRAY_CREATION = 1;
public static final int ARRAY_ELEMENT = 2;
public static final int ASSIGNMENT = 3;
public static final int AST_ERROR = 4;
public static final int BACK_TICK_EXPRESSION = 5;
public static final int BLOCK = 6;
public static final int BREAK_STATEMENT = 7;
public static final int CAST_EXPRESSION = 8;
public static final int CATCH_CLAUSE = 9;
public static final int STATIC_CONSTANT_ACCESS = 10;
public static final int CONSTANT_DECLARATION = 11;
public static final int CLASS_DECLARATION = 12;
public static final int CLASS_INSTANCE_CREATION = 13;
public static final int CLASS_NAME = 14;
public static final int CLONE_EXPRESSION = 15;
public static final int COMMENT = 16;
public static final int CONDITIONAL_EXPRESSION = 17;
public static final int CONTINUE_STATEMENT = 18;
public static final int DECLARE_STATEMENT = 19;
public static final int DO_STATEMENT = 20;
public static final int ECHO_STATEMENT = 21;
public static final int EMPTY_STATEMENT = 22;
public static final int EXPRESSION_STATEMENT = 23;
public static final int FIELD_ACCESS = 24;
public static final int FIELD_DECLARATION = 25;
public static final int FOR_EACH_STATEMENT = 26;
public static final int FORMAL_PARAMETER = 27;
public static final int FOR_STATEMENT = 28;
public static final int FUNCTION_DECLARATION = 29;
public static final int FUNCTION_INVOCATION = 30;
public static final int FUNCTION_NAME = 31;
public static final int GLOBAL_STATEMENT = 32;
public static final int IDENTIFIER = 33;
public static final int IF_STATEMENT = 34;
public static final int IGNORE_ERROR = 35;
public static final int INCLUDE = 36;
public static final int INFIX_EXPRESSION = 37;
public static final int IN_LINE_HTML = 38;
public static final int INSTANCE_OF_EXPRESSION = 39;
public static final int INTERFACE_DECLARATION = 40;
public static final int LIST_VARIABLE = 41;
public static final int METHOD_DECLARATION = 42;
public static final int METHOD_INVOCATION = 43;
public static final int POSTFIX_EXPRESSION = 44;
public static final int PREFIX_EXPRESSION = 45;
public static final int PROGRAM = 46;
public static final int QUOTE = 47;
public static final int REFERENCE = 48;
public static final int REFLECTION_VARIABLE = 49;
public static final int RETURN_STATEMENT = 50;
public static final int SCALAR = 51;
public static final int STATIC_FIELD_ACCESS = 52;
public static final int STATIC_METHOD_INVOCATION = 53;
public static final int STATIC_STATEMENT = 54;
public static final int SWITCH_CASE = 55;
public static final int SWITCH_STATEMENT = 56;
public static final int THROW_STATEMENT = 57;
public static final int TRY_STATEMENT = 58;
public static final int UNARY_OPERATION = 59;
public static final int VARIABLE = 60;
public static final int WHILE_STATEMENT = 61;
public static final int PARENTHESIS_EXPRESSION = 62;
public static final int SINGLE_FIELD_DECLARATION = 63;
public static final int NAMESPACE = 64;
public static final int NAMESPACE_NAME = 65;
public static final int USE_STATEMENT_PART = 66;
public static final int USE_STATEMENT = 67;
public static final int GOTO_LABEL = 68;
public static final int GOTO_STATEMENT = 69;
public static final int LAMBDA_FUNCTION_DECLARATION = 70;
public static final int TRAIT_USE_STATEMENT = 71;
/**
* @deprecated
*/
public static final int TRAIT_DECLARATION = 72;
public static final int FULLY_QUALIFIED_TRAIT_METHOD_REFERENCE = 73;
public static final int TRAIT_ALIAS = 74;
public static final int YIELD_STATEMENT = 75;
public static final int FINALLY_CLAUSE = 76;
public static final int ANONYMOUS_CLASS_DECLARATION = 77;
public static final int RETURN_TYPE = 78;
public static final int EMPTY_EXPRESSION = 79;
public static final int ARROW_FUNCTION_DECLARATION = 80;
public static final int ARRAY_SPREAD_ELEMENT = 81;
/**
* Internal convenience constant indicating that there is definite risk of
* cycles.
*/
static final boolean CYCLE_RISK = true;
/**
* Internal convenience constant indicating that there is no risk of cycles.
*/
static final boolean NO_CYCLE_RISK = false;
/**
* Internal convenience constant indicating that a structural property is
* mandatory.
*/
static final boolean MANDATORY = true;
/**
* Internal convenience constant indicating that a structural property is
* optional.
*/
static final boolean OPTIONAL = false;
/**
* Flag constant (bit mask, value 1) indicating that there is something not
* quite right with this AST node.
* <p>
* The standard parser (<code>ASTParser</code>) sets this flag on a node to
* indicate a syntax error detected in the vicinity.
* </p>
*/
public static final char MALFORMED = 1;
/**
* Flag constant (bit mask, value 2) indicating that this is a node that was
* created by the parser (as opposed to one created by another party).
* <p>
* The standard parser (<code>ASTParser</code>) sets this flag on the nodes
* it creates.
* </p>
*/
public static final char ORIGINAL = 2;
/**
* Flag constant (bit mask, value 4) indicating that this node is
* unmodifiable. When a node is marked unmodifiable, the following
* operations result in a runtime exception:
* <ul>
* <li>Change a simple property of this node.</li>
* <li>Add or remove a child node from this node.</li>
* <li>Parent (or re-parent) this node.</li>
* </ul>
* <p>
* The standard parser (<code>ASTParser</code>) does not set this flag on
* the nodes it creates. However, clients may set this flag on a node to
* prevent further modification of the its structural properties.
* </p>
*/
public static final char PROTECT = 4;
/**
* Flag constant (bit mask, value 8) indicating that this node or a part of
* this node is recovered from source that contains a syntax error detected
* in the vicinity.
* <p>
* The standard parser (<code>ASTParser</code>) sets this flag on a node to
* indicate a recovered node.
* </p>
*/
public static final char RECOVERED = 8;
/**
* Source range
*/
private int start = -1;
private int length = 0;
/**
* character containing flags; none set by default.
* <p>
* N.B. This is a private field, but declared as package-visible for more
* efficient access from inner classes.
* </p>
*
* @see #MALFORMED, #PROTECT, #RECOVERED, #ORIGINAL
*/
int flags = 0;
/**
* Property of parent in which this node is a child, or <code>null</code> if
* this node is a root. Initially <code>null</code>.
*
* @see #getLocationInParent
*/
private StructuralPropertyDescriptor location = null;
/**
* Owning AST.
* <p>
* N.B. This is a private field, but declared as package-visible for more
* efficient access from inner classes.
* </p>
*/
final AST ast;
/**
* Parent AST node, or <code>null</code> if this node is a root. Initially
* <code>null</code>.
*/
private ASTNode parent = null;
/**
* An unmodifiable empty map (used to implement <code>properties()</code>).
*/
private static final Map<String, Object> UNMODIFIABLE_EMPTY_MAP = Collections.unmodifiableMap(new HashMap<>(1));
/**
* Primary field used in representing node properties efficiently. If
* <code>null</code>, this node has no properties. If a <code>String</code>,
* this is the name of this node's sole property, and <code>property2</code>
* contains its value. If a <code>HashMap</code>, this is the table of
* property name-value mappings; <code>property2</code>, if non-null is its
* unmodifiable equivalent. Initially <code>null</code>.
*
* @see #property2
*/
private Object property1 = null;
/**
* Auxiliary field used in representing node properties efficiently.
*
* @see #property1
*/
private Object property2 = null;
/**
* Construct an empty ASTNode and attach it with the given AST
*
* @param ast
*/
public ASTNode(AST ast) {
if (ast == null) {
throw new IllegalArgumentException();
}
this.ast = ast;
setFlags(ast.getDefaultNodeFlag());
}
/**
* Construct a ranged ASTNode and attach it with the given AST
*
* @param ast
*/
public ASTNode(int start, int end, AST ast) {
this(ast);
this.start = start;
this.length = end - start;
}
/**
* Accepts the given visitor on a visit of the current node.
*
* @param visitor
* the visitor object
* @exception IllegalArgumentException
* if the visitor is null
*/
@Override
public final void accept(Visitor visitor) {
if (visitor == null) {
throw new IllegalArgumentException();
}
// begin with the generic pre-visit
visitor.preVisit(this);
// dynamic dispatch to internal method for type-specific visit/endVisit
accept0(visitor);
// end with the generic post-visit
visitor.postVisit(this);
}
/**
* Accepts the given visitor on a type-specific visit of the current node.
* This method must be implemented in all concrete AST node types.
* <p>
* General template for implementation on each concrete ASTNode class:
*
* <pre>
* <code>
* boolean visitChildren = visitor.visit(this);
* if (visitChildren) {
* // visit children in normal left to right reading order
* ... acceptChild();
* }
* visitor.endVisit(this);
* </code>
* </pre>
*
* Note that the caller (<code>accept</code>) take cares of invoking
* <code>visitor.preVisit(this)</code> and
* <code>visitor.postVisit(this)</code>.
* </p>
*
* @param visitor
* the visitor object
*/
abstract void accept0(Visitor visitor);
/**
* Returns whether the subtree rooted at the given node matches the given
* other object as decided by the given matcher.
* <p>
* This internal method is implemented in each of the concrete node
* subclasses.
* </p>
*
* @param matcher
* the matcher
* @param other
* the other object, or <code>null</code>
* @return <code>true</code> if the subtree matches, or <code>false</code>
* if they do not match
*/
public abstract boolean subtreeMatch(ASTMatcher matcher, Object other);
/**
* Returns an integer value identifying the type of this concrete AST node.
* The values are small positive integers, suitable for use in switch
* statements.
* <p>
* For each concrete node type there is a unique node type constant (name
* and value).
* </p>
*
* @return one of the node type constants
*/
public abstract int getType();
/**
* Returns the location of this node within its parent, or <code>null</code>
* if this is a root node.
* <p>
*
* <pre>
* ASTNode node = ...;
* ASTNode parent = node.getParent();
* StructuralPropertyDescriptor location = node.getLocationInParent();
* assert (parent != null) == (location != null);
* if ((location != null) && location.isChildProperty())
* assert parent.getStructuralProperty(location) == node;
* if ((location != null) && location.isChildListProperty())
* assert ((List) parent.getStructuralProperty(location)).contains(node);
* </pre>
*
* </p>
* <p>
* Note that the relationship between an AST node and its parent node may
* change over the lifetime of a node.
* </p>
*
* @return the location of this node in its parent, or <code>null</code> if
* this node has no parent
*/
public final StructuralPropertyDescriptor getLocationInParent() {
return this.location;
}
/**
* Returns the value of the given structural property for this node. The
* value returned depends on the kind of property:
* <ul>
* <li>{@link SimplePropertyDescriptor} - the value of the given simple
* property, or <code>null</code> if none; primitive values are "boxed"</li>
* <li>{@link ChildPropertyDescriptor} - the child node (type
* <code>ASTNode</code>), or <code>null</code> if none</li>
* <li>{@link ChildListPropertyDescriptor} - the list (element type:
* {@link ASTNode})</li>
* </ul>
*
* @param property
* the property
* @return the value, or <code>null</code> if none
* @exception RuntimeException
* if this node does not have the given property
*/
public final Object getStructuralProperty(StructuralPropertyDescriptor property) {
if (property instanceof SimplePropertyDescriptor) {
SimplePropertyDescriptor p = (SimplePropertyDescriptor) property;
if (p.getValueType() == Integer.class) {
int result = internalGetSetIntProperty(p, true, 0);
return Integer.valueOf(result);
} else if (p.getValueType() == Boolean.class) {
boolean result = internalGetSetBooleanProperty(p, true, false);
return Boolean.valueOf(result);
} else {
return internalGetSetObjectProperty(p, true, null);
}
}
if (property instanceof ChildPropertyDescriptor) {
return internalGetSetChildProperty((ChildPropertyDescriptor) property, true, null);
}
if (property instanceof ChildListPropertyDescriptor) {
return internalGetChildListProperty((ChildListPropertyDescriptor) property);
}
throw new IllegalArgumentException();
}
/**
* Sets the value of the given structural property for this node. The value
* passed depends on the kind of property:
* <ul>
* <li>{@link SimplePropertyDescriptor} - the new value of the given simple
* property, or <code>null</code> if none; primitive values are "boxed"</li>
* <li>{@link ChildPropertyDescriptor} - the new child node (type
* <code>ASTNode</code>), or <code>null</code> if none</li>
* <li>{@link ChildListPropertyDescriptor} - not allowed</li>
* </ul>
*
* @param property
* the property
* @param value
* the property value
* @exception RuntimeException
* if this node does not have the given property, or if the
* given property cannot be set
*/
public final void setStructuralProperty(StructuralPropertyDescriptor property, Object value) {
if (property instanceof SimplePropertyDescriptor) {
SimplePropertyDescriptor p = (SimplePropertyDescriptor) property;
if (p.getValueType() == int.class) {
int arg = ((Integer) value).intValue();
internalGetSetIntProperty(p, false, arg);
return;
} else if (p.getValueType() == boolean.class) {
boolean arg = ((Boolean) value).booleanValue();
internalGetSetBooleanProperty(p, false, arg);
return;
} else {
if (value == null && p.isMandatory()) {
throw new IllegalArgumentException();
}
internalGetSetObjectProperty(p, false, value);
return;
}
}
if (property instanceof ChildPropertyDescriptor) {
ChildPropertyDescriptor p = (ChildPropertyDescriptor) property;
ASTNode child = (ASTNode) value;
if (child == null && p.isMandatory()) {
throw new IllegalArgumentException();
}
internalGetSetChildProperty(p, false, child);
return;
}
if (property instanceof ChildListPropertyDescriptor) {
throw new IllegalArgumentException("Cannot set the list of child list property"); //$NON-NLS-1$
}
}
/**
* Returns a list of structural property descriptors for nodes of the same
* type as this node. Clients must not modify the result.
* <p>
* Note that property descriptors are a meta-level mechanism for
* manipulating ASTNodes in a generic way. They are unrelated to
* <code>get/setProperty</code>.
* </p>
*
* @return a list of property descriptors (element type:
* {@link StructuralPropertyDescriptor})
*/
public final List<StructuralPropertyDescriptor> structuralPropertiesForType() {
return internalStructuralPropertiesForType(this.ast.apiLevel);
}
/**
* Returns a list of property descriptors for this node type. Clients must
* not modify the result. This abstract method must be implemented in each
* concrete AST node type.
* <p>
* N.B. This method is package-private, so that the implementations of this
* method in each of the concrete AST node types do not clutter up the API
* doc.
* </p>
*
* @param apiLevel
* the API level; one of the <code>AST.JLS*</code> constants
* @return a list of property descriptors (element type:
* {@link StructuralPropertyDescriptor})
*/
abstract List<StructuralPropertyDescriptor> internalStructuralPropertiesForType(PHPVersion apiLevel);
/**
* @return the related AST
*/
public AST getAST() {
return this.ast;
}
/**
* Returns this node's parent node, or <code>null</code> if this is the root
* node.
* <p>
* Note that the relationship between an AST node and its parent node may
* change over the lifetime of a node.
* </p>
*
* @return the parent of this node, or <code>null</code> if none
*/
public ASTNode getParent() {
return parent;
}
/**
* Sets or clears this node's parent node and location.
* <p>
*
* @param parent
* the new parent of this node, or <code>null</code> if none
* @see #getParent
*/
public void setParent(ASTNode parent, StructuralPropertyDescriptor location) {
this.parent = parent;
this.location = location;
}
public final int getLength() {
return length;
}
public final int getStart() {
return start;
}
public final int getEnd() {
return start + length;
}
/**
* Returns the node class for the corresponding node type.
*
* @param nodeType
* AST node type
* @return the corresponding <code>ASTNode</code> subclass
* @exception IllegalArgumentException
* if <code>nodeType</code> is not a legal AST node type
* @see #getNodeType()
*/
public static Class<? extends ASTNode> nodeClassForType(int nodeType) {
switch (nodeType) {
case ARRAY_ACCESS:
return ArrayAccess.class;
case ARRAY_CREATION:
return ArrayCreation.class;
case ARRAY_ELEMENT:
return ArrayElement.class;
case ARRAY_SPREAD_ELEMENT:
return ArraySpreadElement.class;
case ASSIGNMENT:
return Assignment.class;
case AST_ERROR:
return ASTError.class;
case BACK_TICK_EXPRESSION:
return BackTickExpression.class;
case BLOCK:
return Block.class;
case BREAK_STATEMENT:
return BreakStatement.class;
case CAST_EXPRESSION:
return CastExpression.class;
case CATCH_CLAUSE:
return CatchClause.class;
case STATIC_CONSTANT_ACCESS:
return StaticConstantAccess.class;
case CONSTANT_DECLARATION:
return ConstantDeclaration.class;
case CLASS_DECLARATION:
return ClassDeclaration.class;
case CLASS_INSTANCE_CREATION:
return ClassInstanceCreation.class;
case CLASS_NAME:
return ClassName.class;
case CLONE_EXPRESSION:
return CloneExpression.class;
case COMMENT:
return Comment.class;
case CONDITIONAL_EXPRESSION:
return ConditionalExpression.class;
case CONTINUE_STATEMENT:
return ContinueStatement.class;
case DECLARE_STATEMENT:
return DeclareStatement.class;
case DO_STATEMENT:
return DoStatement.class;
case ECHO_STATEMENT:
return EchoStatement.class;
case EMPTY_STATEMENT:
return EmptyStatement.class;
case EMPTY_EXPRESSION:
return EmptyExpression.class;
case EXPRESSION_STATEMENT:
return ExpressionStatement.class;
case FIELD_ACCESS:
return FieldAccess.class;
case FIELD_DECLARATION:
return FieldsDeclaration.class;
case FOR_EACH_STATEMENT:
return ForEachStatement.class;
case FORMAL_PARAMETER:
return FormalParameter.class;
case FOR_STATEMENT:
return ForStatement.class;
case FUNCTION_DECLARATION:
return FunctionDeclaration.class;
case FUNCTION_INVOCATION:
return FunctionInvocation.class;
case FUNCTION_NAME:
return FunctionName.class;
case GLOBAL_STATEMENT:
return GlobalStatement.class;
case GOTO_LABEL:
return GotoLabel.class;
case GOTO_STATEMENT:
return GotoStatement.class;
case IDENTIFIER:
return Identifier.class;
case IF_STATEMENT:
return IfStatement.class;
case IGNORE_ERROR:
return IgnoreError.class;
case INCLUDE:
return Include.class;
case INFIX_EXPRESSION:
return InfixExpression.class;
case IN_LINE_HTML:
return InLineHtml.class;
case INSTANCE_OF_EXPRESSION:
return InstanceOfExpression.class;
case INTERFACE_DECLARATION:
return InterfaceDeclaration.class;
case LAMBDA_FUNCTION_DECLARATION:
return LambdaFunctionDeclaration.class;
case ARROW_FUNCTION_DECLARATION:
return ArrowFunctionDeclaration.class;
case LIST_VARIABLE:
return ListVariable.class;
case METHOD_DECLARATION:
return MethodDeclaration.class;
case METHOD_INVOCATION:
return MethodInvocation.class;
case NAMESPACE:
return NamespaceDeclaration.class;
case NAMESPACE_NAME:
return NamespaceName.class;
case POSTFIX_EXPRESSION:
return PostfixExpression.class;
case PREFIX_EXPRESSION:
return PrefixExpression.class;
case PROGRAM:
return Program.class;
case QUOTE:
return Quote.class;
case REFERENCE:
return Reference.class;
case REFLECTION_VARIABLE:
return ReflectionVariable.class;
case RETURN_STATEMENT:
return ReturnStatement.class;
case YIELD_STATEMENT:
return YieldExpression.class;
case SCALAR:
return Scalar.class;
case STATIC_FIELD_ACCESS:
return StaticFieldAccess.class;
case STATIC_METHOD_INVOCATION:
return StaticMethodInvocation.class;
case STATIC_STATEMENT:
return StaticStatement.class;
case SWITCH_CASE:
return SwitchCase.class;
case SWITCH_STATEMENT:
return SwitchStatement.class;
case THROW_STATEMENT:
return ThrowStatement.class;
case TRY_STATEMENT:
return TryStatement.class;
case UNARY_OPERATION:
return UnaryOperation.class;
case USE_STATEMENT:
return UseStatement.class;
case USE_STATEMENT_PART:
return UseStatementPart.class;
case VARIABLE:
return Variable.class;
case WHILE_STATEMENT:
return WhileStatement.class;
case PARENTHESIS_EXPRESSION:
return ParenthesisExpression.class;
case FINALLY_CLAUSE:
return FinallyClause.class;
}
throw new IllegalArgumentException();
}
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder();
toString(buffer, ""); //$NON-NLS-1$
return buffer.toString();
}
/**
* Appends the start, length parameters to the buffer
*/
protected void appendInterval(StringBuilder buffer) {
buffer.append(" start='").append(start).append("' length='") //$NON-NLS-1$ //$NON-NLS-2$
.append(length).append("'"); //$NON-NLS-1$
}
/**
* Formats a given string to an XML file
*
* @param input
* @return String the formatted string
*/
protected static String getXmlStringValue(String input) {
String escapedString = input;
escapedString = escapedString.replaceAll("&", "&"); //$NON-NLS-1$ //$NON-NLS-2$
escapedString = escapedString.replaceAll(">", ">"); //$NON-NLS-1$ //$NON-NLS-2$
escapedString = escapedString.replaceAll("<", "<"); //$NON-NLS-1$ //$NON-NLS-2$
escapedString = escapedString.replaceAll("'", "'"); //$NON-NLS-1$ //$NON-NLS-2$
return escapedString;
}
/**
* @return the Program's root for a given ASTNode
*/
public Program getProgramRoot() {
ASTNode node = this;
while (node != null) {
if (node.getType() == ASTNode.PROGRAM) {
return (Program) node;
}
node = node.getParent();
}
return null;
}
/**
* For a given node, returns the outer node that surrounds it
*
* @return the enclosing node for this node
*/
public ASTNode getEnclosingBodyNode() {
ASTNode node = this;
do {
switch (node.getType()) {
case ASTNode.FUNCTION_DECLARATION:
return node;
case ASTNode.FIELD_DECLARATION:
return null;
case ASTNode.PROGRAM:
return node;
}
node = node.getParent();
} while (node != null);
return null;
}
/**
* Returns the root node at or above this node; returns this node if it is a
* root.
*
* @return the root node at or above this node
*/
public final ASTNode getRoot() {
ASTNode candidate = this;
while (true) {
ASTNode p = candidate.getParent();
if (p == null) {
// candidate has no parent - that's the guy
return candidate;
}
candidate = p;
}
}
/**
* Sets the source range of the original source file where the source
* fragment corresponding to this node was found.
* <p>
* See {@link ASTParser#setKind(int)} for details on precisely where source
* ranges are supposed to begin and end.
* </p>
*
* @param startPosition
* a 0-based character index, or <code>-1</code> if no source
* position information is available for this node
* @param length
* a (possibly 0) length, or <code>0</code> if no source position
* information is recorded for this node
* @see #getStartPosition()
* @see #getLength()
* @see ASTParser
*/
public final void setSourceRange(int startPosition, int length) {
if (startPosition >= 0 && length < 0) {
throw new IllegalArgumentException();
}
if (startPosition < 0 && length != 0) {
throw new IllegalArgumentException();
}
this.start = startPosition;
this.length = length;
}
/**
* Removes this node from its parent. Has no effect if this node is
* unparented. If this node appears as an element of a child list property
* of its parent, then this node is removed from the list using
* <code>List.remove</code>. If this node appears as the value of a child
* property of its parent, then this node is detached from its parent by
* passing <code>null</code> to the appropriate setter method; this
* operation fails if this node is in a mandatory property.
*
*/
public final void delete() {
StructuralPropertyDescriptor p = getLocationInParent();
if (p == null) {
// node is unparented
return;
}
if (p.isChildProperty()) {
getParent().setStructuralProperty(this.location, null);
return;
}
if (p.isChildListProperty()) {
List<?> l = (List<?>) getParent().getStructuralProperty(this.location);
l.remove(this);
}
}
/**
* Prelude portion of the "3 step program" for replacing the old child of
* this node with another node. Here is the code pattern found in all AST
* node subclasses:
*
* <pre>
* ASTNode oldChild = this.foo;
* preReplaceChild(oldChild, newFoo, FOO_PROPERTY);
* this.foo = newFoo;
* postReplaceChild(oldChild, newFoo, FOO_PROPERTY);
* </pre>
*
* The first part (preReplaceChild) does all the precondition checks,
* reports pre-delete events, and changes parent links. The old child is
* delinked from its parent (making it a root node), and the new child node
* is linked to its parent. The new child node must be a root node in the
* same AST as its new parent, and must not be an ancestor of this node. All
* three nodes must be modifiable (not PROTECTED). The replace operation
* must fail atomically; so it is crucial that all precondition checks be
* done before any linking and delinking happens. The final part
* (postReplaceChild )reports post-add events.
* <p>
* This method calls <code>ast.modifying()</code> for the nodes affected.
* </p>
*
* @param oldChild
* the old child of this node, or <code>null</code> if there was
* no old child to replace
* @param newChild
* the new child of this node, or <code>null</code> if there is
* no replacement child
* @param property
* the property descriptor of this node describing the
* relationship between node and child
* @exception RuntimeException
* if:
* <ul>
* <li>the node belongs to a different AST</li>
* <li>the node already has a parent</li>
* <li>a cycle in would be created</li>
* <li>any of the nodes involved are unmodifiable</li>
* </ul>
*/
final void preReplaceChild(ASTNode oldChild, ASTNode newChild, ChildPropertyDescriptor property) {
if ((this.flags & PROTECT) != 0) {
// this node is protected => cannot gain or lose children
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
if (newChild != null) {
checkNewChild(this, newChild, property.cycleRisk, null);
}
// delink old child from parent
if (oldChild != null) {
if ((oldChild.flags & PROTECT) != 0) {
// old child node is protected => cannot be unparented
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
if (newChild != null) {
this.ast.preReplaceChildEvent(this, oldChild, newChild, property);
} else {
this.ast.preRemoveChildEvent(this, oldChild, property);
}
oldChild.setParent(null, null);
} else {
if (newChild != null) {
this.ast.preAddChildEvent(this, newChild, property);
}
}
// link new child to parent
if (newChild != null) {
newChild.setParent(this, property);
// cannot notify postAddChildEvent until parent is linked to child
// too
}
}
/**
* Postlude portion of the "3 step program" for replacing the old child of
* this node with another node. See
* {@link #preReplaceChild(ASTNode, ASTNode, ChildPropertyDescriptor)} for
* details.
*/
final void postReplaceChild(ASTNode oldChild, ASTNode newChild, ChildPropertyDescriptor property) {
// link new child to parent
if (newChild != null) {
if (oldChild != null) {
this.ast.postReplaceChildEvent(this, oldChild, newChild, property);
} else {
this.ast.postAddChildEvent(this, newChild, property);
}
} else {
this.ast.postRemoveChildEvent(this, oldChild, property);
}
}
/**
* Prelude portion of the "3 step program" for changing the value of a
* simple property of this node. Here is the code pattern found in all AST
* node subclasses:
*
* <pre>
* preValueChange(FOO_PROPERTY);
* this.foo = newFoo;
* postValueChange(FOO_PROPERTY);
* </pre>
*
* The first part (preValueChange) does the precondition check to make sure
* the node is modifiable (not PROTECTED). The change operation must fail
* atomically; so it is crucial that the precondition checks are done before
* the field is hammered. The final part (postValueChange)reports
* post-change events.
* <p>
* This method calls <code>ast.modifying()</code> for the node affected.
* </p>
*
* @param property
* the property descriptor of this node
* @exception RuntimeException
* if:
* <ul>
* <li>this node is unmodifiable</li>
* </ul>
*/
final void preValueChange(SimplePropertyDescriptor property) {
if ((this.flags & PROTECT) != 0) {
// this node is protected => cannot change valure of properties
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
this.ast.preValueChangeEvent(this, property);
this.ast.modifying();
}
/**
* Postlude portion of the "3 step program" for replacing the old child of
* this node with another node. See
* {@link #preValueChange(SimplePropertyDescriptor)} for details.
*/
final void postValueChange(SimplePropertyDescriptor property) {
this.ast.postValueChangeEvent(this, property);
}
/**
* Ensures that this node is modifiable (that is, not marked PROTECTED). If
* successful, calls ast.modifying().
*
* @exception RuntimeException
* is not modifiable
*/
final void checkModifiable() {
if ((this.flags & PROTECT) != 0) {
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
this.ast.modifying();
}
/**
* Begin lazy initialization of this node. Here is the code pattern found in
* all AST node subclasses:
*
* <pre>
* if (this.foo == null) {
* // lazy init must be thread-safe for readers
* synchronized (this) {
* if (this.foo == null) {
* preLazyInit();
* this.foo = ...; // code to create new node
* postLazyInit(this.foo, FOO_PROPERTY);
* }
* }
* }
* </pre>
*
* @since 3.0
*/
final void preLazyInit() {
// IMPORTANT: this method is called by readers
// ASTNode.this is locked at this point
this.ast.disableEvents();
// will turn events back on in postLasyInit
}
/**
* End lazy initialization of this node.
*
* @param newChild
* the new child of this node, or <code>null</code> if there is
* no replacement child
* @param property
* the property descriptor of this node describing the
* relationship between node and child
* @since 3.0
*/
final void postLazyInit(ASTNode newChild, ChildPropertyDescriptor property) {
// IMPORTANT: this method is called by readers
// ASTNode.this is locked at this point
// newChild is brand new (so no chance of concurrent access)
newChild.setParent(this, property);
// turn events back on (they were turned off in corresponding
// preLazyInit)
this.ast.reenableEvents();
}
/**
* Returns the named property of this node, or <code>null</code> if none.
*
* @param propertyName
* the property name
* @return the property value, or <code>null</code> if none
* @see #setProperty(String,Object)
*/
public final Object getProperty(String propertyName) {
if (propertyName == null) {
throw new IllegalArgumentException();
}
if (this.property1 == null) {
// node has no properties at all
return null;
}
if (this.property1 instanceof String) {
// node has only a single property
if (propertyName.equals(this.property1)) {
return this.property2;
} else {
return null;
}
}
// otherwise node has table of properties
Map<?, ?> m = (Map<?, ?>) this.property1;
return m.get(propertyName);
}
/**
* Sets the named property of this node to the given value, or to
* <code>null</code> to clear it.
* <p>
* Clients should employ property names that are sufficiently unique to
* avoid inadvertent conflicts with other clients that might also be setting
* properties on the same node.
* </p>
* <p>
* Note that modifying a property is not considered a modification to the
* AST itself. This is to allow clients to decorate existing nodes with
* their own properties without jeopardizing certain things (like the
* validity of bindings), which rely on the underlying tree remaining
* static.
* </p>
*
* @param propertyName
* the property name
* @param data
* the new property value, or <code>null</code> if none
* @see #getProperty(String)
*/
public final void setProperty(String propertyName, Object data) {
if (propertyName == null) {
throw new IllegalArgumentException();
}
// N.B. DO NOT CALL ast.modifying();
if (this.property1 == null) {
// node has no properties at all
if (data == null) {
// we already know this
return;
}
// node gets its first property
this.property1 = propertyName;
this.property2 = data;
return;
}
if (this.property1 instanceof String) {
// node has only a single property
if (propertyName.equals(this.property1)) {
// we're in luck
this.property2 = data;
if (data == null) {
// just deleted last property
this.property1 = null;
this.property2 = null;
}
return;
}
if (data == null) {
// we already know this
return;
}
// node already has one property - getting its second
// convert to more flexible representation
HashMap<String, Object> m = new HashMap<>(2);
m.put((String) this.property1, this.property2);
m.put(propertyName, data);
this.property1 = m;
this.property2 = null;
return;
}
// node has two or more properties
HashMap<String, Object> m = (HashMap<String, Object>) this.property1;
if (data == null) {
m.remove(propertyName);
// check for just one property left
if (m.size() == 1) {
// convert to more efficient representation
Map.Entry<String, Object>[] entries = m.entrySet().toArray(new Map.Entry[1]);
this.property1 = entries[0].getKey();
this.property2 = entries[0].getValue();
}
return;
} else {
m.put(propertyName, data);
// still has two or more properties
return;
}
}
/**
* Returns an unmodifiable table of the properties of this node with non-
* <code>null</code> values.
*
* @return the table of property values keyed by property name (key type:
* <code>String</code>; value type: <code>Object</code>)
*/
public final Map<String, Object> properties() {
if (this.property1 == null) {
// node has no properties at all
return UNMODIFIABLE_EMPTY_MAP;
}
if (this.property1 instanceof String) {
// node has a single property
return Collections.singletonMap((String) this.property1, this.property2);
}
// node has two or more properties
if (this.property2 == null) {
this.property2 = Collections.unmodifiableMap((Map<String, Object>) this.property1);
}
// property2 is unmodifiable wrapper for map in property1
return (Map<String, Object>) this.property2;
}
/**
* Returns the flags associated with this node.
* <p>
* No flags are associated with newly created nodes.
* </p>
* <p>
* The flags are the bitwise-or of individual flags. The following flags are
* currently defined:
* <ul>
* <li>{@link #MALFORMED} - indicates node is syntactically malformed</li>
* <li>{@link #ORIGINAL} - indicates original node created by ASTParser</li>
* <li>{@link #PROTECT} - indicates node is protected from further
* modification</li>
* <li>{@link #RECOVERED} - indicates node or a part of this node is
* recovered from source that contains a syntax error</li>
* </ul>
* Other bit positions are reserved for future use.
* </p>
*
* @return the bitwise-or of individual flags
* @see #setFlags(int)
*/
public final int getFlags() {
return this.flags & 0xFFFF;
}
/**
* Sets the flags associated with this node to the given value.
* <p>
* The flags are the bitwise-or of individual flags. The following flags are
* currently defined:
* <ul>
* <li>{@link #MALFORMED} - indicates node is syntactically malformed</li>
* <li>{@link #ORIGINAL} - indicates original node created by ASTParser</li>
* <li>{@link #PROTECT} - indicates node is protected from further
* modification</li>
* <li>{@link #RECOVERED} - indicates node or a part of this node is
* recovered from source that contains a syntax error</li>
* </ul>
* Other bit positions are reserved for future use.
* </p>
* <p>
* Note that the flags are <em>not</em> considered a structural property of
* the node, and can be changed even if the node is marked as protected.
* </p>
*
* @param flags
* the bitwise-or of individual flags
* @see #getFlags()
*/
public final void setFlags(int flags) {
this.ast.modifying();
this.flags |= flags;
}
/**
* Returns a deep copy of the subtree of AST nodes rooted at the given node.
* The resulting nodes are owned by the given AST, which may be different
* from the ASTs of the given node. Even if the given node has a parent, the
* result node will be unparented.
* <p>
* Source range information on the original nodes is automatically copied to
* the new nodes. Client properties (<code>properties</code>) are not
* carried over.
* </p>
* <p>
* The node's <code>AST</code> and the target <code>AST</code> must support
* the same API level.
* </p>
*
* @param target
* the AST that is to own the nodes in the result
* @param node
* the node to copy, or <code>null</code> if none
* @return the copied node, or <code>null</code> if <code>node</code> is
* <code>null</code>
*/
public static <T extends ASTNode> T copySubtree(AST target, T node) {
if (node == null) {
return null;
}
if (target == null) {
throw new IllegalArgumentException();
}
if (target.apiLevel() != node.getAST().apiLevel()) {
throw new UnsupportedOperationException();
}
@SuppressWarnings("unchecked")
T newNode = (T) node.clone(target);
return newNode;
}
/**
* Returns a deep copy of the subtrees of AST nodes rooted at the given list
* of nodes. The resulting nodes are owned by the given AST, which may be
* different from the ASTs of the nodes in the list. Even if the nodes in
* the list have parents, the nodes in the result will be unparented.
* <p>
* Source range information on the original nodes is automatically copied to
* the new nodes. Client properties (<code>properties</code>) are not
* carried over.
* </p>
*
* @param target
* the AST that is to own the nodes in the result
* @param nodes
* the list of nodes to copy (element type: <code>ASTNode</code>)
* @return the list of copied subtrees (element type: <code>ASTNode</code>)
*/
@SuppressWarnings("unchecked")
public static <T extends ASTNode> List<T> copySubtrees(AST target, List<? extends T> nodes) {
List<T> result = new ArrayList<>(nodes.size());
for (Iterator<? extends ASTNode> it = nodes.iterator(); it.hasNext();) {
ASTNode oldNode = it.next();
ASTNode newNode = oldNode.clone(target);
result.add((T) newNode);
}
return result;
}
/**
* Returns a deep copy of the subtree of AST nodes rooted at this node. The
* resulting nodes are owned by the given AST, which may be different from
* the AST of this node. Even if this node has a parent, the result node
* will be unparented.
* <p>
* This method reports pre- and post-clone events, and dispatches to
* <code>clone0(AST)</code> which is reimplemented in node subclasses.
* </p>
*
* @param target
* the AST that is to own the nodes in the result
* @return the root node of the copies subtree
*/
final ASTNode clone(AST target) {
this.ast.preCloneNodeEvent(this);
ASTNode c = this.clone0(target);
this.ast.postCloneNodeEvent(this, c);
return c;
}
/**
* Returns a deep copy of the subtree of AST nodes rooted at this node. The
* resulting nodes are owned by the given AST, which may be different from
* the AST of this node. Even if this node has a parent, the result node
* will be unparented.
* <p>
* This method must be implemented in subclasses.
* </p>
* <p>
* This method does not report pre- and post-clone events. All callers
* should instead call <code>clone(AST)</code> to ensure that pre- and
* post-clone events are reported.
* </p>
* <p>
* N.B. This method is package-private, so that the implementations of this
* method in each of the concrete AST node types do not clutter up the API
* doc.
* </p>
*
* @param target
* the AST that is to own the nodes in the result
* @return the root node of the copies subtree
*/
abstract ASTNode clone0(AST target);
/**
* Checks whether the given new child node is a node in a different AST from
* its parent-to-be, whether it is already has a parent, whether adding it
* to its parent-to-be would create a cycle, and whether the child is of the
* right type. The parent-to-be is the enclosing instance.
*
* @param node
* the parent-to-be node
* @param newChild
* the new child of the parent
* @param cycleCheck
* <code>true</code> if cycles are possible and need to be
* checked, <code>false</code> if cycles are impossible and do
* not need to be checked
* @param nodeType
* a type constraint on child nodes, or <code>null</code> if no
* special check is required
* @exception IllegalArgumentException
* if:
* <ul>
* <li>the child is null</li>
* <li>the node belongs to a different AST</li>
* <li>the child has the incorrect node type</li>
* <li>the node already has a parent</li>
* <li>a cycle in would be created</li>
* </ul>
*/
static void checkNewChild(ASTNode node, ASTNode newChild, boolean cycleCheck, Class<?> nodeType) {
if (newChild.ast != node.ast) {
// new child is from a different AST
throw new IllegalArgumentException();
}
if (newChild.getParent() != null) {
// new child currently has a different parent
throw new IllegalArgumentException();
}
if (cycleCheck && newChild == node.getProgramRoot()) {
// inserting new child would create a cycle
throw new IllegalArgumentException();
}
Class<?> childClass = newChild.getClass();
if (nodeType != null && !nodeType.isAssignableFrom(childClass)) {
// new child is not of the right type
throw new ClassCastException();
}
if ((newChild.flags & PROTECT) != 0) {
// new child node is protected => cannot be parented
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
}
/**
* Sets the value of the given int-valued property for this node. The
* default implementation of this method throws an exception explaining that
* this node does not have such a property. This method should be extended
* in subclasses that have at leasy one simple property whose value type is
* int.
*
* @param property
* the property
* @param get
* <code>true</code> for a get operation, and <code>false</code>
* for a set operation
* @param value
* the new property value; ignored for get operations
* @return the value; always returns <code>0</code> for set operations
* @exception RuntimeException
* if this node does not have the given property, or if the
* given value cannot be set as specified
*/
int internalGetSetIntProperty(SimplePropertyDescriptor property, boolean get, int value) {
throw new RuntimeException("Node does not have this property"); //$NON-NLS-1$
}
/**
* Sets the value of the given boolean-valued property for this node. The
* default implementation of this method throws an exception explaining that
* this node does not have such a property. This method should be extended
* in subclasses that have at least one simple property whose value type is
* boolean.
*
* @param property
* the property
* @param get
* <code>true</code> for a get operation, and <code>false</code>
* for a set operation
* @param value
* the new property value; ignored for get operations
* @return the value; always returns <code>false</code> for set operations
* @exception RuntimeException
* if this node does not have the given property, or if the
* given value cannot be set as specified
*/
boolean internalGetSetBooleanProperty(SimplePropertyDescriptor property, boolean get, boolean value) {
throw new RuntimeException("Node does not have this property"); //$NON-NLS-1$
}
/**
* Sets the value of the given property for this node. The default
* implementation of this method throws an exception explaining that this
* node does not have such a property. This method should be extended in
* subclasses that have at leasy one simple property whose value type is a
* reference type.
*
* @param property
* the property
* @param get
* <code>true</code> for a get operation, and <code>false</code>
* for a set operation
* @param value
* the new property value, or <code>null</code> if none; ignored
* for get operations
* @return the value, or <code>null</code> if none; always returns
* <code>null</code> for set operations
* @exception RuntimeException
* if this node does not have the given property, or if the
* given value cannot be set as specified
*/
Object internalGetSetObjectProperty(SimplePropertyDescriptor property, boolean get, Object value) {
throw new RuntimeException("Node does not have this property"); //$NON-NLS-1$
}
/**
* Sets the child value of the given property for this node. The default
* implementation of this method throws an exception explaining that this
* node does not have such a property. This method should be extended in
* subclasses that have at leasy one child property.
*
* @param property
* the property
* @param get
* <code>true</code> for a get operation, and <code>false</code>
* for a set operation
* @param child
* the new child value, or <code>null</code> if none; always
* <code>null</code> for get operations
* @return the child, or <code>null</code> if none; always returns
* <code>null</code> for set operations
* @exception RuntimeException
* if this node does not have the given property, or if the
* given child cannot be set as specified
*/
ASTNode internalGetSetChildProperty(ChildPropertyDescriptor property, boolean get, ASTNode child) {
throw new RuntimeException("Node does not have this property"); //$NON-NLS-1$
}
/**
* Returns the list value of the given property for this node. The default
* implementation of this method throws an exception explaining that this
* noed does not have such a property. This method should be extended in
* subclasses that have at leasy one child list property.
*
* @param property
* the property
* @return the list (element type: {@link ASTNode})
* @exception RuntimeException
* if the given node does not have the given property
*/
List<? extends ASTNode> internalGetChildListProperty(ChildListPropertyDescriptor property) {
throw new RuntimeException("Node does not have this property"); //$NON-NLS-1$
}
/**
* A specialized implementation of a list of ASTNodes. The implementation is
* based on an ArrayList.
*/
class NodeList<T extends ASTNode> extends AbstractList<T> {
/**
* The underlying list in which the nodes of this list are stored
* (element type: <code>ASTNode</code>).
* <p>
* Be stingy on storage - assume that list will be empty.
* </p>
* <p>
* This field declared default visibility (rather than private) so that
* accesses from <code>NodeList.Cursor</code> do not require a synthetic
* accessor method.
* </p>
*/
ArrayList<T> store = new ArrayList<>(0);
/**
* The property descriptor for this list.
*/
ChildListPropertyDescriptor propertyDescriptor;
/**
* A cursor for iterating over the elements of the list. Does not lose
* its position if the list is changed during the iteration.
*/
class Cursor implements Iterator<T> {
/**
* The position of the cursor between elements. If the value is N,
* then the cursor sits between the element at positions N-1 and N.
* Initially just before the first element of the list.
*/
private int position = 0;
/*
* (non-Javadoc) Method declared on <code>Iterator</code>.
*/
@Override
public boolean hasNext() {
return this.position < NodeList.this.store.size();
}
/*
* (non-Javadoc) Method declared on <code>Iterator</code>.
*/
@Override
public T next() {
T result = NodeList.this.store.get(this.position);
this.position++;
return result;
}
/*
* (non-Javadoc) Method declared on <code>Iterator</code>.
*/
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/**
* Adjusts this cursor to accomodate an add/remove at the given
* index.
*
* @param index
* the position at which the element was added or removed
* @param delta
* +1 for add, and -1 for remove
*/
void update(int index, int delta) {
if (this.position > index) {
// the cursor has passed the added or removed element
this.position += delta;
}
}
}
/**
* A list of currently active cursors (element type: <code>Cursor</code>
* ), or <code>null</code> if there are no active cursors.
* <p>
* It is important for storage considerations to maintain the
* null-means-empty invariant; otherwise, every NodeList instance will
* waste a lot of space. A cursor is needed only for the duration of a
* visit to the child nodes. Under normal circumstances, only a single
* cursor is needed; multiple cursors are only required if there are
* multiple visits going on at the same time.
* </p>
*/
private List<Cursor> cursors = null;
/**
* Creates a new empty list of nodes owned by this node. This node will
* be the common parent of all nodes added to this list.
*
* @param property
* the property descriptor
* @since 3.0
*/
NodeList(ChildListPropertyDescriptor property) {
super();
this.propertyDescriptor = property;
}
/*
* (non-javadoc)
*
* @see java.util.AbstractCollection#size()
*/
@Override
public int size() {
return this.store.size();
}
/*
* (non-javadoc)
*
* @see AbstractList#get(int)
*/
@Override
public T get(int index) {
return this.store.get(index);
}
/*
* (non-javadoc)
*
* @see List#set(int, java.lang.Object)
*/
@Override
public T set(int index, T element) {
if (element == null) {
throw new IllegalArgumentException();
}
if ((ASTNode.this.flags & PROTECT) != 0) {
// this node is protected => cannot gain or lose children
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
// delink old child from parent, and link new child to parent
ASTNode newChild = element;
ASTNode oldChild = this.store.get(index);
if (oldChild == newChild) {
return (T) oldChild;
}
if ((oldChild.flags & PROTECT) != 0) {
// old child is protected => cannot be unparented
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
ASTNode.checkNewChild(ASTNode.this, newChild, this.propertyDescriptor.cycleRisk,
this.propertyDescriptor.elementType);
ASTNode.this.ast.preReplaceChildEvent(ASTNode.this, oldChild, newChild, this.propertyDescriptor);
T result = this.store.set(index, (T) newChild);
// n.b. setParent will call ast.modifying()
oldChild.setParent(null, null);
newChild.setParent(ASTNode.this, this.propertyDescriptor);
ASTNode.this.ast.postReplaceChildEvent(ASTNode.this, oldChild, newChild, this.propertyDescriptor);
return result;
}
/*
* (non-javadoc)
*
* @see List#add(int, java.lang.Object)
*/
@Override
public void add(int index, T element) {
if (element == null) {
throw new IllegalArgumentException();
}
if ((ASTNode.this.flags & PROTECT) != 0) {
// this node is protected => cannot gain or lose children
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
// link new child to parent
ASTNode newChild = element;
ASTNode.checkNewChild(ASTNode.this, newChild, this.propertyDescriptor.cycleRisk,
this.propertyDescriptor.elementType);
ASTNode.this.ast.preAddChildEvent(ASTNode.this, newChild, this.propertyDescriptor);
this.store.add(index, element);
updateCursors(index, +1);
// n.b. setParent will call ast.modifying()
newChild.setParent(ASTNode.this, this.propertyDescriptor);
ASTNode.this.ast.postAddChildEvent(ASTNode.this, newChild, this.propertyDescriptor);
}
/*
* (non-javadoc)
*
* @see List#remove(int)
*/
@Override
public T remove(int index) {
if ((ASTNode.this.flags & PROTECT) != 0) {
// this node is protected => cannot gain or lose children
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
// delink old child from parent
ASTNode oldChild = this.store.get(index);
if ((oldChild.flags & PROTECT) != 0) {
// old child is protected => cannot be unparented
throw new IllegalArgumentException("AST node cannot be modified"); //$NON-NLS-1$
}
ASTNode.this.ast.preRemoveChildEvent(ASTNode.this, oldChild, this.propertyDescriptor);
// n.b. setParent will call ast.modifying()
oldChild.setParent(null, null);
T result = this.store.remove(index);
updateCursors(index, -1);
ASTNode.this.ast.postRemoveChildEvent(ASTNode.this, oldChild, this.propertyDescriptor);
return result;
}
/**
* Allocate a cursor to use for a visit. The client must call
* <code>releaseCursor</code> when done.
* <p>
* This method is internally synchronized on this NodeList. It is
* thread-safe to create a cursor.
* </p>
*
* @return a new cursor positioned before the first element of the list
*/
Cursor newCursor() {
synchronized (this) {
// serialize cursor management on this NodeList
if (this.cursors == null) {
// convert null to empty list
this.cursors = new ArrayList<>(1);
}
Cursor result = new Cursor();
this.cursors.add(result);
return result;
}
}
/**
* Releases the given cursor at the end of a visit.
* <p>
* This method is internally synchronized on this NodeList. It is
* thread-safe to release a cursor.
* </p>
*
* @param cursor
* the cursor
*/
void releaseCursor(Cursor cursor) {
synchronized (this) {
// serialize cursor management on this NodeList
this.cursors.remove(cursor);
if (this.cursors.isEmpty()) {
// important: convert empty list back to null
// otherwise the node will hang on to needless junk
this.cursors = null;
}
}
}
/**
* Adjusts all cursors to accomodate an add/remove at the given index.
* <p>
* This method is only used when the list is being modified. The AST is
* not thread-safe if any of the clients are modifying it.
* </p>
*
* @param index
* the position at which the element was added or removed
* @param delta
* +1 for add, and -1 for remove
*/
private void updateCursors(int index, int delta) {
if (this.cursors == null) {
// there are no cursors to worry about
return;
}
for (Iterator<Cursor> it = this.cursors.iterator(); it.hasNext();) {
Cursor c = it.next();
c.update(index, delta);
}
}
}
}
|
brian-kelley/seacas
|
docs/apr_html/apr__builtin_8cc.js
|
<reponame>brian-kelley/seacas
var apr__builtin_8cc =
[
[ "d2r", "apr__builtin_8cc.html#aaeeb6e3399ffad67be7d1a4263077ace", null ],
[ "max", "apr__builtin_8cc.html#ac39d9cef6a5e030ba8d9e11121054268", null ],
[ "min", "apr__builtin_8cc.html#abb702d8b501669a23aa0ab3b281b9384", null ],
[ "PI", "apr__builtin_8cc.html#a598a3330b3c21701223ee0ca14316eca", null ],
[ "r2d", "apr__builtin_8cc.html#a11cb28cf4bfecf5bfbd98ab9e4f2123b", null ],
[ "do_acos", "apr__builtin_8cc.html#a06f0c456a8af7237d7d05cb5bcb001a2", null ],
[ "do_acosd", "apr__builtin_8cc.html#aec24d390e44b224f3290eda89a960c82", null ],
[ "do_acosh", "apr__builtin_8cc.html#a60a1de3be4b2cfafd1389084a48f12b3", null ],
[ "do_angle", "apr__builtin_8cc.html#aeba6ec7e62e4adbae14d4c3ce66d7cc9", null ],
[ "do_angled", "apr__builtin_8cc.html#addee6b07ceaa5f8e52e1ae1d570fee1f", null ],
[ "do_append", "apr__builtin_8cc.html#ac1246f392c8cc83c9a692168fb783377", null ],
[ "do_array_from_string", "apr__builtin_8cc.html#a29492cc6bfd2ca12c898b677586635fa", null ],
[ "do_asin", "apr__builtin_8cc.html#ae0e88b0d240f83b134a4de01189cd464", null ],
[ "do_asind", "apr__builtin_8cc.html#a5ad99f8129a512c1e6d6918418cfe06e", null ],
[ "do_asinh", "apr__builtin_8cc.html#a60cc0f4955230944854a0e9620e4c593", null ],
[ "do_atan", "apr__builtin_8cc.html#abe8f1689473432f01ca9d3cb8aefc82d", null ],
[ "do_atan2", "apr__builtin_8cc.html#add6ad0eca512a884eb9219f85714ef9c", null ],
[ "do_atan2d", "apr__builtin_8cc.html#a5c56d448e95cffa55323ce2d5c1749b1", null ],
[ "do_atand", "apr__builtin_8cc.html#a1ff18aed240463b04283d61de3485304", null ],
[ "do_atanh", "apr__builtin_8cc.html#acc4bad484a49e2da5ee17fb6f1e1720c", null ],
[ "do_case", "apr__builtin_8cc.html#af2d3232f7987dc0cf240c15eba43d97a", null ],
[ "do_cbrt", "apr__builtin_8cc.html#a36d0526a5e532db1b0390b24dd54d2ce", null ],
[ "do_ceil", "apr__builtin_8cc.html#aac9098c9b30cb685803c94f21c7a0913", null ],
[ "do_cols", "apr__builtin_8cc.html#a99bf5187e84623fd8c823e1203277fd5", null ],
[ "do_cos", "apr__builtin_8cc.html#ac7e2e12b48b3dca6d1807156b5765b30", null ],
[ "do_cosd", "apr__builtin_8cc.html#afb3f1985a9af440a7e1c2f7b1f694cd1", null ],
[ "do_cosh", "apr__builtin_8cc.html#a107ba1d1a552b06c1a76079d0e998f4c", null ],
[ "do_csv_array", "apr__builtin_8cc.html#ac2dd297bf24147460cd66c0ca5521fe5", null ],
[ "do_csv_array1", "apr__builtin_8cc.html#a29f56fc474e8c3453359e147b5c37794", null ],
[ "do_csv_array2", "apr__builtin_8cc.html#a631aa2d07915b9883f9e87049d56b7f8", null ],
[ "do_d2r", "apr__builtin_8cc.html#a23a72e5919febdc3d93e46050a321f8d", null ],
[ "do_delete", "apr__builtin_8cc.html#a86206bd72831a96ab5324696f6f446e2", null ],
[ "do_dim", "apr__builtin_8cc.html#a5a6a70b43460035a15372c2bdcbcc098", null ],
[ "do_dist", "apr__builtin_8cc.html#a323f45204f6f41fd77fc3ca9178fd38d", null ],
[ "do_dumpfunc", "apr__builtin_8cc.html#a44cd486ae529189868adce2338669ec2", null ],
[ "do_dumpfunc1", "apr__builtin_8cc.html#a3236d5633743cef584ad09fc89ef9227", null ],
[ "do_dumpsym", "apr__builtin_8cc.html#ab169f8c42da364074afef398d7fc41e5", null ],
[ "do_dumpsym1", "apr__builtin_8cc.html#a91ee3cd38cb04762eb5d8328aab89dd9", null ],
[ "do_dumpvar", "apr__builtin_8cc.html#a6384c97695216021b7495e9a6401c123", null ],
[ "do_dumpvar1", "apr__builtin_8cc.html#a1a0216226b61a207cece5317065d1d62", null ],
[ "do_elseif", "apr__builtin_8cc.html#adeb0a281429d350bb91e264298dad23d", null ],
[ "do_erf", "apr__builtin_8cc.html#acb8df463df07cf68c84ba7a704a8889e", null ],
[ "do_erfc", "apr__builtin_8cc.html#a6cb6e1212a28dbbe78b45ba9527c468c", null ],
[ "do_error", "apr__builtin_8cc.html#a1ed5c0f0d644d10b8f417069bc4de540", null ],
[ "do_execute", "apr__builtin_8cc.html#aee42615c86d66273c4925ed8b0818b94", null ],
[ "do_exp", "apr__builtin_8cc.html#a74fbb678108b6c6fc6ef2f777db65c09", null ],
[ "do_expm1", "apr__builtin_8cc.html#a61a07df615db97b725953068019c2c6d", null ],
[ "do_extract", "apr__builtin_8cc.html#aa0c11568985712afa14888ee1404521e", null ],
[ "do_fabs", "apr__builtin_8cc.html#a14e32b38901271c2ad41df03018f7dc5", null ],
[ "do_file_to_string", "apr__builtin_8cc.html#afc426a192bbd26b63948dc9ac920c7d1", null ],
[ "do_find_word", "apr__builtin_8cc.html#a99b633016ae438918eb1be7229432fc9", null ],
[ "do_floor", "apr__builtin_8cc.html#a885fd993251109f432d15806c15b669e", null ],
[ "do_fmod", "apr__builtin_8cc.html#a9a8638311f650d15dd8f0d4d75dfafa3", null ],
[ "do_get_date", "apr__builtin_8cc.html#a004535129dec760726b02b1364e3f2a7", null ],
[ "do_get_iso_date", "apr__builtin_8cc.html#a25c318acf1d87e65f14ce4bb09685642", null ],
[ "do_get_temp_filename", "apr__builtin_8cc.html#a7769883737de372afda807b0dd5390a7", null ],
[ "do_get_time", "apr__builtin_8cc.html#aa34be71e259689e7db16fab669d6b83c", null ],
[ "do_get_word", "apr__builtin_8cc.html#abaff31d01779e024a02e0c18e896d8f0", null ],
[ "do_getenv", "apr__builtin_8cc.html#aea32abd6ff2dbe511fd59cc692c5fc2c", null ],
[ "do_hypot", "apr__builtin_8cc.html#aaea81a89b6d09b1d773b902351a67c3e", null ],
[ "do_identity", "apr__builtin_8cc.html#a8b49a4ee41c394ed31acefbe49b3ce5f", null ],
[ "do_if", "apr__builtin_8cc.html#ae2baf4ef350f9e070bb9575542b5db69", null ],
[ "do_include_path", "apr__builtin_8cc.html#aef198ffd70cf5928486e352fcc97b00c", null ],
[ "do_int", "apr__builtin_8cc.html#a8a49ddcf35a11f4ae03d4c4ba4e7274b", null ],
[ "do_intout", "apr__builtin_8cc.html#a316ae4cf755f95c286f7542ff393e37f", null ],
[ "do_julday", "apr__builtin_8cc.html#a277080a462e51d049b4900f2398af13f", null ],
[ "do_juldayhms", "apr__builtin_8cc.html#a72772e5c75c1673473c1739225ed25dd", null ],
[ "do_lgamma", "apr__builtin_8cc.html#aa40a6ac840b21e49e0f1e8f12d85e61e", null ],
[ "do_linear_array", "apr__builtin_8cc.html#a5d576fccf6fa0fdc70360514a49bb415", null ],
[ "do_log", "apr__builtin_8cc.html#ac40969ab8401cfaa631b3f854071ab39", null ],
[ "do_log10", "apr__builtin_8cc.html#a4bd44e2f34c68fa7c1003acca4bd3059", null ],
[ "do_log1p", "apr__builtin_8cc.html#a87e9c1572a6a08d574a5a3de7b874587", null ],
[ "do_make_array", "apr__builtin_8cc.html#a9a94f9cc46aff41ee8ad34830c830607", null ],
[ "do_make_array_init", "apr__builtin_8cc.html#a9f16c2fd46c50c7234bbd0e3bf03e38e", null ],
[ "do_max", "apr__builtin_8cc.html#a935f2b8a2f7015c8bf6a538ec9757e11", null ],
[ "do_min", "apr__builtin_8cc.html#a8505e7bb450e197ccd6fba2f76f354ed", null ],
[ "do_nint", "apr__builtin_8cc.html#a547896363d5215f6062dfcb75f838ea3", null ],
[ "do_notif", "apr__builtin_8cc.html#a49ce28c59d9674d93e668d0f31207221", null ],
[ "do_option", "apr__builtin_8cc.html#aa17aedebcd9b5674535f3495410d57cf", null ],
[ "do_output", "apr__builtin_8cc.html#a1a03e4a25e3e4700958c4d696791c193", null ],
[ "do_polarX", "apr__builtin_8cc.html#a1e4c75a82d86a0e62327b0254308587f", null ],
[ "do_polarY", "apr__builtin_8cc.html#a6cf65e69f2f3f4b5120db7b984887339", null ],
[ "do_pow", "apr__builtin_8cc.html#a5fbf8f01752cf2e1aa1ed94eaee03f9b", null ],
[ "do_print_array", "apr__builtin_8cc.html#a94d7479298c988a04768cbb5eb540847", null ],
[ "do_r2d", "apr__builtin_8cc.html#a3bc26e885a4acc02e6d70895ef41e21a", null ],
[ "do_rand", "apr__builtin_8cc.html#a3d4fb2ac982efa7277cfe3dd35dbd0dc", null ],
[ "do_rand_lognormal", "apr__builtin_8cc.html#a770837e9af161d186759f957ebfe9783", null ],
[ "do_rand_normal", "apr__builtin_8cc.html#a2363fb0db936f3b915106ae46816186e", null ],
[ "do_rand_weibull", "apr__builtin_8cc.html#a388b69eea4c2f53215e9965876f1d9d5", null ],
[ "do_rescan", "apr__builtin_8cc.html#a69d89d7a0ce20649bcf27999421d8629", null ],
[ "do_rows", "apr__builtin_8cc.html#abc3fd394b3538a30c353e561c1616dea", null ],
[ "do_sign", "apr__builtin_8cc.html#a14795244966bd576fe472b7e9d1183f0", null ],
[ "do_sin", "apr__builtin_8cc.html#af0aaf5493e1dee3e0df5b0c7abeaf3d6", null ],
[ "do_sind", "apr__builtin_8cc.html#a68725f896fb03bba9d2b6dc4eb595c9d", null ],
[ "do_sinh", "apr__builtin_8cc.html#a0cb8d779d74c4c9ed873e487f209d82e", null ],
[ "do_sqrt", "apr__builtin_8cc.html#ab4f66d4d58dc4140eddba058d0185574", null ],
[ "do_srand", "apr__builtin_8cc.html#a2ead35a7a56e8bf656f8127afc50d7a2", null ],
[ "do_str_elseif", "apr__builtin_8cc.html#a77e5e390fad8911ca6abab32ad85e332", null ],
[ "do_str_if", "apr__builtin_8cc.html#a77d9411d4f0c5e3321680855ccd35110", null ],
[ "do_str_notif", "apr__builtin_8cc.html#acb6c9727d89073755abcea20e1beccd2", null ],
[ "do_strtod", "apr__builtin_8cc.html#ad80b67f429ac3e8671c738c1c0af8864", null ],
[ "do_switch", "apr__builtin_8cc.html#ad2e10e83c5c60161a6054089df5e8fce", null ],
[ "do_tan", "apr__builtin_8cc.html#a7cb8552f6ee9bdd973ea038dec05d9c3", null ],
[ "do_tand", "apr__builtin_8cc.html#ad88c54937c59d2f4d681b2850a15a075", null ],
[ "do_tanh", "apr__builtin_8cc.html#ad7343276658a57dea2a48a8ae846a028", null ],
[ "do_tgamma", "apr__builtin_8cc.html#aaf9fea7aacde738df8efeab80dc4197c", null ],
[ "do_time", "apr__builtin_8cc.html#a17e37c1a676d3ace04ccfe80f9af5c9a", null ],
[ "do_tolower", "apr__builtin_8cc.html#a6ad099176b51e40443555d9a8e3ab433", null ],
[ "do_tostring", "apr__builtin_8cc.html#ad85eb83a2e5f4b04c966058ae5c97802", null ],
[ "do_toupper", "apr__builtin_8cc.html#a9f2740ff3e1e7c19ea51e6012e8286e9", null ],
[ "do_transpose", "apr__builtin_8cc.html#ae281320dbd0625fd80bcc66abed12356", null ],
[ "do_word_count", "apr__builtin_8cc.html#a8c7c911b8d3dcf548afb75166de22de2", null ],
[ "get_tokenized_strings", "apr__builtin_8cc.html#acf907345185955f17b5cdbf928cde507", null ],
[ "reset_error", "apr__builtin_8cc.html#a97223934f027610caeba5b309178b354", null ],
[ "rng", "apr__builtin_8cc.html#afba54e4ae2d3a2369ed3086a983916fa", null ],
[ "rd", "apr__builtin_8cc.html#a210b8303fb82ba1cd00e1e45de8ce01b", null ],
[ "tokenized_strings", "apr__builtin_8cc.html#a8af8d106c8603c278d56239b3a9ad0e1", null ]
];
|
rinceyuan/WeFe
|
board/board-service/src/main/java/com/welab/wefe/board/service/dto/kernel/JobDataSet.java
|
/**
* Copyright 2021 Tianmian Tech. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.welab.wefe.board.service.dto.kernel;
import com.welab.wefe.common.enums.ComponentType;
import com.welab.wefe.common.enums.JobMemberRole;
import java.util.List;
/**
* @author zane.luo
*/
public class JobDataSet {
public String nodeId;
public String taskId;
public ComponentType componentType;
public List<Member> members;
public static class Member {
public String memberId;
public JobMemberRole memberRole;
public String dataSetId;
public long dataSetRows;
public long dataSetFeatures;
}
}
|
lakshyarawal/pythonPractice
|
Arrays/largest_element.py
|
<reponame>lakshyarawal/pythonPractice<gh_stars>0
""" Largest Element in Array: Given an array find the largest element in the array """
"""Solution: """
def largest_element(a) -> int:
le = 0
for i in a:
if i > le:
le = i
return le
def main():
arr_input = [40, 100, 8, 50]
a = largest_element(arr_input)
print(a)
# Using the special variable
# __name__
if __name__ == "__main__":
main()
|
rmulvey/bptest
|
src/org.xtuml.bp.test/src/org/xtuml/bp/test/launcher/restore/RestoreTestLauncherDelegate.java
|
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
package org.xtuml.bp.test.launcher.restore;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
import org.eclipse.core.resources.IMarkerDelta;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.debug.core.DebugException;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.ILaunchConfiguration;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.debug.core.model.IBreakpoint;
import org.eclipse.debug.core.model.IDebugTarget;
import org.eclipse.debug.core.model.ILaunchConfigurationDelegate;
import org.eclipse.debug.core.model.IMemoryBlock;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.model.IThread;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IMethod;
import org.eclipse.jdt.core.IType;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
import org.eclipse.jdt.launching.JavaLaunchDelegate;
import org.xtuml.bp.test.TestPlugin;
public class RestoreTestLauncherDelegate extends JavaLaunchDelegate implements
ILaunchConfigurationDelegate {
String junitConfigName;
ILaunch restoreTestLaunch;
ILaunchConfiguration restoreTestConfig;
ILaunch juintLaunch;
ILaunchConfiguration junitConfig = null;
boolean isTerminated = false;
private long startTime;
public RestoreTestLauncherDelegate() {
super();
}
public void launch(ILaunchConfiguration configuration, String mode,
ILaunch launch, IProgressMonitor monitor) throws CoreException {
ILaunchConfigurationWorkingCopy workingCopy = null;
isTerminated = false;
startTime = System.currentTimeMillis();
RestoreDebugTarget target = new RestoreDebugTarget();
launch.addDebugTarget(target);
restoreTestLaunch = launch;
restoreTestConfig = configuration;
junitConfigName = configuration.getAttribute(
IJavaLaunchConfigurationConstants.ATTR_MAIN_TYPE_NAME, "");
if (junitConfigName.equals(""))
reportErrorAndExit("Junit configuration not specified.");
ILaunchConfiguration[] configurations = DebugPlugin.getDefault()
.getLaunchManager().getLaunchConfigurations();
for (int i = 0; i < configurations.length; i++) {
ILaunchConfiguration c = configurations[i];
if (c.getName().equals(junitConfigName)) {
junitConfig = c;
break;
}
}
if (junitConfig == null) {
reportErrorAndExit("junit configuration : " + junitConfigName
+ " not found.");
}
Vector v_methods = getTestMethods();
boolean clearWS = true;
RestoreTestResultLogger.start(v_methods.size()*2);
for (int i = 0; i < v_methods.size(); i++) {
IMethod method = (IMethod) v_methods.get(i);
workingCopy = junitConfig.copy(method.getElementName());
// dont clear work space after 1st run
workingCopy.setAttribute("clearws", clearWS);
clearWS = false;
if (isTerminated)
break;
launch(workingCopy, mode, launch, monitor, method.getElementName());
}// end for
if (RestoreTestResultLogger.getDefault() != null) {
RestoreTestResultLogger.getDefault().finished(
System.currentTimeMillis() - startTime);
}
isTerminated = true;
}
/**
* @param message
* @throws CoreException
*/
private void reportErrorAndExit(String message) throws CoreException {
Status status = new Status(IStatus.ERROR, "org.xtuml.bp.test", 0, message, null);
TestPlugin.getDefault().getLog().log(status);
throw new CoreException(status); //exit
}
private Vector getTestMethods() throws CoreException {
Vector v_methods = new Vector();
Map testCases = restoreTestConfig.getAttribute(
"org.eclipse.debug.core.environmentVariables", new HashMap());
int testCaseCount = 0;
while (true) {
String testCaseName = (String) testCases.get("testcase"
+ testCaseCount);
if (testCaseName == null) {
if (testCaseCount == 0)
reportErrorAndExit("No testcase class specified in "
+ "environment tab of configuration");
break;
}
testCaseCount++;
String projectName = junitConfig.getAttribute(
IJavaLaunchConfigurationConstants.ATTR_PROJECT_NAME, "");
IJavaProject project = JavaCore.create(
ResourcesPlugin.getWorkspace().getRoot()).getJavaProject(
projectName);
IType itype = project.findType(testCaseName);
if (itype == null) {
reportErrorAndExit("testcase not found: " + testCaseName);
}
IMethod[] methods = itype.getMethods();
for (int i = 0; i < methods.length; i++) {
IMethod method = methods[i];
if (isTestMethod(method))
v_methods.add(method);
}
// TODO iterate type to get test methods from all super classes
}
return v_methods;
}
protected boolean isTestMethod(IMethod method) {
try {
return method.getParameterNames().length == 0
&& method.getReturnType().equals("V")
&& method.getElementName().startsWith("test");
} catch (JavaModelException e) {
e.printStackTrace();
return false;
}
}
private void launch(ILaunchConfigurationWorkingCopy workingCopy,
String mode, ILaunch launch, IProgressMonitor monitor,
String testMethod) throws CoreException {
String workingCopyName = workingCopy.getName();
// name can't be set so we have to create a new copy
workingCopy = workingCopy.copy(workingCopyName + "_Setup");
String vmArg = workingCopy.getAttribute("vmargs", "");
vmArg += " -DTestCaseName=" + testMethod;
String newVmArg = vmArg + " -DSETUP_WORKSPACE=True";
workingCopy.setAttribute("vmargs", newVmArg);
if (isTerminated)
return;
RestoreTestResultLogger.setInSetupWorkspace(true);
juintLaunch = workingCopy.launch(mode, monitor, true);
while (!juintLaunch.isTerminated() && !isTerminated) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
workingCopy = workingCopy.copy(workingCopyName + "_Run");
newVmArg = vmArg + " -DSETUP_WORKSPACE=False";
workingCopy.setAttribute("clearws", false);
workingCopy.setAttribute("vmargs", newVmArg);
// run actual restore test
if (isTerminated)
return;
RestoreTestResultLogger.setInSetupWorkspace(false);
juintLaunch = workingCopy.launch(mode, monitor, true);
while (!juintLaunch.isTerminated() && !isTerminated) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private class RestoreDebugTarget implements IDebugTarget {
public RestoreDebugTarget() {
super();
}
public IProcess getProcess() {
return null;
}
public IThread[] getThreads() throws DebugException {
return new IThread[0];
}
public boolean hasThreads() throws DebugException {
return false;
}
public String getName() throws DebugException {
return "Restore Tests";
}
public boolean supportsBreakpoint(IBreakpoint breakpoint) {
return false;
}
public String getModelIdentifier() {
return null;
}
public IDebugTarget getDebugTarget() {
return this;
}
public ILaunch getLaunch() {
return restoreTestLaunch;
}
public Object getAdapter(Class adapter) {
return null;
}
public boolean canTerminate() {
return !isTerminated;
}
public boolean isTerminated() {
return isTerminated;
}
public void terminate() throws DebugException {
isTerminated = true;
if (juintLaunch != null)
juintLaunch.terminate();
}
public boolean canResume() {
return false;
}
public boolean canSuspend() {
return false;
}
public boolean isSuspended() {
return false;
}
public void resume() throws DebugException {
}
public void suspend() throws DebugException {
}
public void breakpointAdded(IBreakpoint breakpoint) {
}
public void breakpointRemoved(IBreakpoint breakpoint, IMarkerDelta delta) {
}
public void breakpointChanged(IBreakpoint breakpoint, IMarkerDelta delta) {
}
public boolean canDisconnect() {
return false;
}
public void disconnect() throws DebugException {
}
public boolean isDisconnected() {
return false;
}
public boolean supportsStorageRetrieval() {
return false;
}
public IMemoryBlock getMemoryBlock(long startAddress, long length)
throws DebugException {
return null;
}
}
}
|
v3n/audiality2
|
src/rchm.c
|
/*----------------------------------------------------------------------------.
rchm.c - Reference Counting Handle Manager 0.4 |
.----------------------------------------------------------------------------'
| Copyright 2012-2014 <NAME> <<EMAIL>>
|
| This software is provided 'as-is', without any express or implied warranty.
| In no event will the authors be held liable for any damages arising from the
| use of this software.
|
| Permission is granted to anyone to use this software for any purpose,
| including commercial applications, and to alter it and redistribute it
| freely, subject to the following restrictions:
|
| 1. The origin of this software must not be misrepresented; you must not
| claim that you wrote the original software. If you use this software
| in a product, an acknowledgment in the product documentation would be
| appreciated but is not required.
| 2. Altered source versions must be plainly marked as such, and must not be
| misrepresented as being the original software.
| 3. This notice may not be removed or altered from any source distribution.
'---------------------------------------------------------------------------*/
#include "rchm.h"
#include <stdlib.h>
#include <string.h>
RCHM_errors rchm_RegisterType(RCHM_manager *m, RCHM_typecode tc,
const char *name, RCHM_destructor_cb destroy, void *userdata)
{
if(!m->types || (tc >= m->ntypes))
{
int nsize = tc + 8;
RCHM_typeinfo *nti = (RCHM_typeinfo *)realloc(m->types,
nsize * sizeof(RCHM_typeinfo));
if(!nti)
return RCHM_OOMEMORY;
memset(nti + m->ntypes, 0,
(nsize - m->ntypes) * sizeof(RCHM_typeinfo));
m->types = nti;
m->ntypes = nsize;
}
m->types[tc].destructor = destroy;
m->types[tc].userdata = userdata;
free(m->types[tc].name);
if(name)
m->types[tc].name = strdup(name);
else
m->types[tc].name = NULL;
return RCHM_OK;
}
RCHM_errors rchm_AddBlock(RCHM_manager *m, int bi)
{
if(!(m->blocktab[bi] = (RCHM_handleinfo *)malloc(
RCHM_BLOCKSIZE * sizeof(RCHM_handleinfo))))
return RCHM_OOMEMORY;
return RCHM_OK;
}
void rchm_Cleanup(RCHM_manager *m)
{
int i, bi = m->nexthandle >> RCHM_BLOCKSIZE_POW2;
for(i = 0; i <= bi; ++i)
free(m->blocktab[i]);
for(i = 0; i < m->ntypes; ++i)
free(m->types[i].name);
free(m->types);
memset(m, 0, sizeof(*m));
}
RCHM_errors rchm_Init(RCHM_manager *m, int inithandles)
{
RCHM_errors res;
int i, ii = (inithandles - 1) >> RCHM_BLOCKSIZE_POW2;
if(ii >= RCHM_MAXBLOCKS)
return RCHM_OOHANDLES;
memset(m, 0, sizeof(*m));
m->pool = -1;
for(i = 0; i < ii; ++i)
if((res = rchm_AddBlock(m, i)))
{
rchm_Cleanup(m);
return res;
}
return RCHM_OK;
}
|
1zilc/homebrew-cask
|
Casks/razorsql.rb
|
<filename>Casks/razorsql.rb<gh_stars>1-10
cask "razorsql" do
arch = Hardware::CPU.intel? ? "" : "_aarch64"
version "10.0.3"
if Hardware::CPU.intel?
sha256 "6c2fdb01b8ed53de80fdbc912ec652d56d2add1780227cc0e679a10bc1b2c9e6"
else
sha256 "11d21dc0e5316b80e4a539c0837ed51e9644946166bbfcb856004a2a586dcd62"
end
url "https://s3.dualstack.us-east-1.amazonaws.com/downloads.razorsql.com/downloads/#{version.dots_to_underscores}/razorsql#{version.dots_to_underscores}#{arch}.dmg",
verified: "s3.dualstack.us-east-1.amazonaws.com/"
name "RazorSQL"
desc "SQL query tool and SQL editor"
homepage "https://razorsql.com/"
livecheck do
url "https://razorsql.com/download_mac.html"
regex(/href=.*?razorsql[._-]?v?(\d+(?:[._]\d+)+)#{arch}\.dmg/i)
strategy :page_match do |page, regex|
page.scan(regex).map { |match| match[0].tr("_", ".") }
end
end
depends_on macos: ">= :mojave"
app "RazorSQL.app"
zap trash: "~/.razorsql"
end
|
CheongRyoung/everyparking
|
everyParking/EveryParkingAdmin/src/main/java/com/everyparking/admin/framework/common/controller/LoginController.java
|
package com.everyparking.admin.framework.common.controller;
import java.util.HashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import com.everyparking.admin.framework.common.service.CommonService;
import com.everyparking.admin.framework.common.util.SessionUtil;
import com.everyparking.admin.framework.common.vo.MemberVo;
@Controller
@RequestMapping("/login")
public class LoginController extends BaseController {
@Autowired
CommonService commonService;
@RequestMapping("/loginPage")
public String loginForm(){
return "/login/loginForm";
}
@RequestMapping("/loginProcess")
public ModelAndView loginProcess(HttpServletRequest request, MemberVo param) throws Exception {
MemberVo sessionUser = commonService.login(param);
ModelAndView mav = new ModelAndView();
if(sessionUser != null) {
mav.addObject("sessionUser", sessionUser);
if (sessionUser.getUSER_TYPE().equals("US02")) {
//로그인 인증 성공
SessionUtil.setSessionData(request, "sessionUser", sessionUser);
mav.setViewName("redirect: /parkingManage/adminHome");
} else {
//로그인 인증 실패
mav.setViewName("/login/loginFail");
}
} else {
mav.setViewName("/login/loginFail");
}
return mav;
}
@RequestMapping("/logout")
public String loggout(HttpSession session, HttpServletRequest request) {
session.invalidate();
return "redirect:/login/loginPage";
}
@RequestMapping("/loginReq")
public String loginReq() {
return "/login/loginReq";
}
}
|
kinarashah/rancher
|
vendor/k8s.io/kubernetes/plugin/pkg/admission/security/podsecuritypolicy/metrics.go
|
<filename>vendor/k8s.io/kubernetes/plugin/pkg/admission/security/podsecuritypolicy/metrics.go
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package podsecuritypolicy
import (
"strconv"
"github.com/prometheus/client_golang/prometheus"
"k8s.io/apiserver/pkg/admission"
)
const (
namespace = "apiserver"
subsystem = "admission"
)
var (
admitCounter = prometheus.NewCounterVec(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: subsystem,
Name: "controller_admission_latencies_seconds_count",
Help: "Admission controller counts, identified by name and broken out for each operation and API resource and type (validate or admit).",
},
[]string{"name", "type", "operation", "group", "version", "resource", "subresource", "rejected"},
)
)
func init() {
prometheus.MustRegister(admitCounter)
}
func ObserveAdmit(rejected bool, attr admission.Attributes) {
gvr := attr.GetResource()
labels := []string{PluginName, "admit", string(attr.GetOperation()), gvr.Group, gvr.Version, gvr.Resource, attr.GetSubresource(), strconv.FormatBool(rejected)}
admitCounter.WithLabelValues(labels...).Inc()
}
|
iphyer/LeetcodeSummary
|
DailyChallenge/LC_759.py
|
<reponame>iphyer/LeetcodeSummary
class Solution:
def numSubarrayBoundedMax(self, nums: List[int], left: int, right: int) -> int:
# pick each ele to be maximum ele
# Then sliding windows
res = 0
for ind,num in enumerate(nums):
# num itself is in or not
if left <= num <= right:
res += 1
# sliding windows
# p1, p2 in
# in range 0~len(nums)-1
# nums(p1) < num
# nums(p2) <= num
p1, p2 = ind-1, ind+1
while p1 > -1 and nums[p1] < num:
p1 -= 1
res += 1
while p2 < len(nums) and nums[p2] <= num:
p2 += 1
res += 1
# Crossing num
if p1+1 != ind and p2-1 != ind:
res += (ind-p1-1) * (p2-ind-1)
return res
|
MrPepperoni/Reaping2-1
|
src/core/rotate_component.cpp
|
<gh_stars>1-10
#include "core/rotate_component.h"
RotateComponent::RotateComponent()
: mSpeed(0.0)
, mRotating(true)
{
}
void RotateComponent::SetSpeed(double speed)
{
mSpeed=speed;
}
double RotateComponent::GetSpeed()const
{
return mSpeed;
}
void RotateComponent::SetRotating(bool rotating)
{
mRotating=rotating;
}
bool RotateComponent::IsRotating()const
{
return mRotating;
}
void RotateComponentLoader::BindValues()
{
Bind( "speed", func_double( &RotateComponent::SetSpeed ) );
Bind( "rotating", func_bool( &RotateComponent::SetRotating ) );
}
RotateComponentLoader::RotateComponentLoader()
{
}
REAPING2_CLASS_EXPORT_IMPLEMENT( RotateComponent, RotateComponent );
|
anuraaga/zipkin-java
|
zipkin-server/src/main/java/zipkin/server/ZipkinUiConfiguration.java
|
<reponame>anuraaga/zipkin-java<filename>zipkin-server/src/main/java/zipkin/server/ZipkinUiConfiguration.java
/**
* Copyright 2015-2016 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.server;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnResource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.filter.CharacterEncodingFilter;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
/**
* Zipkin-UI is a single-page application that reads configuration from /config.json.
*
* <p>When looking at a trace, the browser is sent to the path "/traces/{id}". For the single-page
* app to serve that route, the server needs to forward the request to "/index.html". The same
* forwarding applies to "/dependencies" and any other routes the UI controls.
*
* <p>Under the scenes the JavaScript code looks at {@code window.location} to figure out what the
* UI should do. This is handled by a route api defined in the crossroads library.
*/
@Configuration
@ConditionalOnResource(resources = "classpath:zipkin-ui") // from io.zipkin:zipkin-ui
public class ZipkinUiConfiguration extends WebMvcConfigurerAdapter {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/**").addResourceLocations("classpath:/zipkin-ui/");
}
/**
* This opts out of adding charset to png resources.
*
* <p>By default, {@linkplain CharacterEncodingFilter} adds a charset qualifier to all resources,
* which helps, as javascript assets include extended character sets. However, the filter also
* adds charset to well-known binary ones like png. This creates confusing content types, such as
* "image/png;charset=UTF-8".
*
* See https://github.com/spring-projects/spring-boot/issues/5459
*/
@Bean
@Order(Ordered.HIGHEST_PRECEDENCE)
public CharacterEncodingFilter characterEncodingFilter() {
CharacterEncodingFilter filter = new CharacterEncodingFilter() {
@Override
protected boolean shouldNotFilter(HttpServletRequest request) {
return request.getServletPath().endsWith(".png");
}
};
filter.setEncoding("UTF-8");
filter.setForceEncoding(true);
return filter;
}
@RestController
public static class ZipkinUi {
@Autowired
ZipkinServerProperties server;
@RequestMapping(value = "/config.json", method = RequestMethod.GET, produces = APPLICATION_JSON_VALUE)
public ZipkinServerProperties.Ui getUiConfig() {
return server.getUi();
}
/**
* This cherry-picks well-known routes the single-page app serves, and forwards to that as
* opposed to returning a 404.
*/
// TODO This approach requires maintenance when new UI routes are added. Change to the following:
// If the path is a a file w/an extension, treat normally.
// Otherwise instead of returning 404, forward to the index.
// See https://github.com/twitter/finatra/blob/458c6b639c3afb4e29873d123125eeeb2b02e2cd/http/src/main/scala/com/twitter/finatra/http/response/ResponseBuilder.scala#L321
@RequestMapping(value = {"/", "/traces/{id}", "/dependency"}, method = RequestMethod.GET)
public ModelAndView forwardUiEndpoints(ModelMap model) {
// Note: RequestMapping "/" requires us to use ModelAndView result vs just a string.
// When "/" is mapped, the server literally returns "forward:/index.html" vs forwarding.
return new ModelAndView("forward:/index.html", model);
}
}
}
|
luc78as/Create
|
src/main/java/com/simibubi/create/modules/contraptions/components/actors/DrillMovementBehaviour.java
|
package com.simibubi.create.modules.contraptions.components.actors;
import com.simibubi.create.foundation.utility.SuperByteBuffer;
import com.simibubi.create.foundation.utility.VecHelper;
import com.simibubi.create.modules.contraptions.components.contraptions.MovementContext;
import net.minecraft.util.DamageSource;
import net.minecraft.util.math.Vec3d;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
public class DrillMovementBehaviour extends BlockBreakingMovementBehaviour {
@Override
public boolean isActive(MovementContext context) {
return !VecHelper.isVecPointingTowards(context.relativeMotion,
context.state.get(DrillBlock.FACING).getOpposite());
}
@Override
public Vec3d getActiveAreaOffset(MovementContext context) {
return new Vec3d(context.state.get(DrillBlock.FACING).getDirectionVec()).scale(.65f);
}
@Override
@OnlyIn(value = Dist.CLIENT)
public SuperByteBuffer renderInContraption(MovementContext context) {
return DrillTileEntityRenderer.renderInContraption(context);
}
@Override
protected DamageSource getDamageSource() {
return DrillBlock.damageSourceDrill;
}
}
|
nkchinh/grammatica
|
src/java/net/percederberg/grammatica/output/VisualBasicConstantsFile.java
|
<filename>src/java/net/percederberg/grammatica/output/VisualBasicConstantsFile.java<gh_stars>1-10
/*
* VisualBasicConstantsFile.java
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the BSD license.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* LICENSE.txt file for more details.
*
* Copyright (c) 2004 <NAME>. All rights reserved.
* Copyright (c) 2003-2015 <NAME>. All rights reserved.
*/
package net.percederberg.grammatica.output;
import java.io.IOException;
import java.util.HashMap;
import net.percederberg.grammatica.code.visualbasic.VisualBasicComment;
import net.percederberg.grammatica.code.visualbasic.VisualBasicEnumeration;
import net.percederberg.grammatica.code.visualbasic.VisualBasicFile;
import net.percederberg.grammatica.code.visualbasic.VisualBasicNamespace;
import net.percederberg.grammatica.parser.ProductionPattern;
import net.percederberg.grammatica.parser.TokenPattern;
/**
* The Visual Basic constants file generator. This class encapsulates
* all the Visual Basic (.NET) code necessary for creating a constants
* enumeration file.
*
* @author <NAME>
* @author <NAME>
* @version 1.5
* @since 1.5
*/
class VisualBasicConstantsFile {
/**
* The enumeration comment.
*/
private static final String TYPE_COMMENT =
"<remarks>An enumeration with token and production node\n" +
"constants.</remarks>";
/**
* The parser generator.
*/
private VisualBasicParserGenerator gen;
/**
* The file to write.
*/
private VisualBasicFile file;
/**
* The enumeration declaration.
*/
private VisualBasicEnumeration enm;
/**
* The mapping from id to constant name. This map contains all
* tokens and productions added to the file.
*/
private HashMap constantNames = new HashMap();
/**
* Creates a new constants file.
*
* @param gen the parser generator to use
*/
public VisualBasicConstantsFile(VisualBasicParserGenerator gen) {
String name = gen.getBaseName() + "Constants";
int modifiers;
this.gen = gen;
this.file = new VisualBasicFile(gen.getBaseDir(), name);
if (gen.getPublicAccess()) {
modifiers = VisualBasicEnumeration.PUBLIC;
} else {
modifiers = VisualBasicEnumeration.FRIEND;
}
this.enm = new VisualBasicEnumeration(modifiers, name);
initializeCode();
}
/**
* Initializes the source code objects.
*/
private void initializeCode() {
String str;
VisualBasicNamespace n;
// Add namespace
if (gen.getNamespace() == null) {
file.addEnumeration(enm);
} else {
n = new VisualBasicNamespace(gen.getNamespace());
n.addEnumeration(enm);
file.addNamespace(n);
}
// Add file comment
str = file.toString() + "\n\n" + gen.getFileComment();
file.addComment(new VisualBasicComment(VisualBasicComment.SINGLELINE,
str));
// Add type comment
enm.addComment(new VisualBasicComment(TYPE_COMMENT));
}
/**
* Adds a token constant definition to this file.
*
* @param pattern the token pattern
*/
public void addToken(TokenPattern pattern) {
String constant;
constant = gen.getCodeStyle().getUpperCase(pattern.getName());
enm.addConstant(constant, String.valueOf(pattern.getId()));
constantNames.put(new Integer(pattern.getId()), constant);
}
/**
* Adds a production constant definition to this file. This method
* checks if the production pattern has already been added.
*
* @param pattern the production pattern
*/
public void addProduction(ProductionPattern pattern) {
String constant;
if (!pattern.isSynthetic()) {
constant = gen.getCodeStyle().getUpperCase(pattern.getName());
enm.addConstant(constant, String.valueOf(pattern.getId()));
constantNames.put(new Integer(pattern.getId()), constant);
}
}
/**
* Creates source code for accessing one of the constants in this
* file.
*
* @param id the node type (pattern) id
*
* @return the constant name, or
* null if not found
*/
public String getConstant(int id) {
String name = (String) constantNames.get(new Integer(id));
if (name == null) {
return null;
} else {
return enm.toString() + "." + name;
}
}
/**
* Writes the file source code.
*
* @throws IOException if the output file couldn't be created
* correctly
*/
public void writeCode() throws IOException {
file.writeCode(gen.getCodeStyle());
}
}
|
theholyhades1/tartanHacks2015
|
site/flask/lib/python2.7/site-packages/guess_language/blocks.py
|
''' Categorize unicode characters by the code block in which they are found.
Copyright (c) 2008, <NAME>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''
import os, re
from bisect import bisect_left
def _loadBlocks():
''' Load Blocks.txt.
Create and return two parallel lists. One has the start and end points for
codepoint ranges, the second has the corresponding block name.
'''
# Expects our version of Blocks.txt to be in the same dir as this file
blocksPath = os.path.join(os.path.dirname(__file__), 'Blocks.txt')
endpoints = []
names = []
splitter = re.compile(r'^(....)\.\.(....); (.*)$')
for line in open(blocksPath):
if line.startswith('#'):
continue
line = line.strip()
if not line:
continue
m = splitter.match(line)
assert m
start = int(m.group(1), 16)
end = int(m.group(2), 16)
name = m.group(3)
endpoints.append(start)
endpoints.append(end)
names.append(name)
names.append(name)
return endpoints, names
_endpoints, _names = _loadBlocks()
def unicodeBlock(c):
''' Returns the name of the unicode block containing c
c must be a single character. '''
ix = bisect_left(_endpoints, ord(c))
return _names[ix]
|
asheraryam/ezEngine
|
Code/EnginePlugins/ParticlePlugin/Renderer/ParticleExtractor.cpp
|
#include <ParticlePluginPCH.h>
#include <Core/World/World.h>
#include <Foundation/Threading/Lock.h>
#include <ParticlePlugin/Renderer/ParticleExtractor.h>
#include <ParticlePlugin/WorldModule/ParticleWorldModule.h>
#include <RendererCore/Pipeline/View.h>
EZ_BEGIN_DYNAMIC_REFLECTED_TYPE(ezParticleExtractor, 1, ezRTTIDefaultAllocator<ezParticleExtractor>)
EZ_END_DYNAMIC_REFLECTED_TYPE;
ezParticleExtractor::ezParticleExtractor(const char* szName)
: ezExtractor(szName)
{
m_DependsOn.PushBack(ezMakeHashedString("ezVisibleObjectsExtractor"));
}
void ezParticleExtractor::Extract(
const ezView& view, const ezDynamicArray<const ezGameObject*>& visibleObjects, ezExtractedRenderData& extractedRenderData)
{
EZ_LOCK(view.GetWorld()->GetReadMarker());
if (const ezParticleWorldModule* pModule = view.GetWorld()->GetModule<ezParticleWorldModule>())
{
pModule->ExtractRenderData(view, extractedRenderData);
}
}
EZ_STATICLINK_FILE(ParticlePlugin, ParticlePlugin_Renderer_ParticleExtractor);
|
tl455047/osc2021
|
lab8/kernel/sched.c
|
<gh_stars>1-10
#include "sched.h"
#include <printf.h>
#include <string.h>
void schedule() {
struct task_struct* current_task, *next_task;
current_task = get_current();
if(current_task->resched == 1) {
disable_interrupt();
//task_queue_status(&run_queue);
next_task = task_queue_pop(&run_queue);
task_queue_push(current_task, &run_queue);
current_task->resched = 0;
enable_interrupt();
if(next_task != null) {
switch_to(current_task, next_task);
}
}
// else if
// exit
}
|
co-develop-drv/ZookeeperClient
|
src/main/java/com/saaavsaaa/client/retry/RetryCallable.java
|
<gh_stars>0
package com.saaavsaaa.client.retry;
import com.saaavsaaa.client.action.IProvider;
import com.saaavsaaa.client.zookeeper.section.Connection;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by aaa
*/
public abstract class RetryCallable {
private static final Logger logger = LoggerFactory.getLogger(RetryCallable.class);
protected final DelayPolicyExecutor delayPolicyExecutor;
protected final IProvider provider;
public RetryCallable(final IProvider provider, final DelayRetryPolicy delayRetryPolicy) {
this.delayPolicyExecutor = new DelayPolicyExecutor(delayRetryPolicy);
this.provider = provider;
}
public abstract void call() throws KeeperException, InterruptedException;
public void exec() throws KeeperException, InterruptedException {
try {
logger.debug("begin exec call");
call();
logger.debug("end exec call");
} catch (KeeperException e) {
logger.warn("exec KeeperException:{}", e.getMessage());
delayPolicyExecutor.next();
if (Connection.needReset(e)) {
provider.resetConnection();
}
execDelay();
} catch (InterruptedException e) {
throw e;
}
}
protected void execDelay() throws KeeperException, InterruptedException {
for (;;) {
long delay = delayPolicyExecutor.getNextTick() - System.currentTimeMillis();
if (delay > 0) {
logger.debug("exec delay:{}", delay);
Thread.sleep(delay);
} else {
if (delayPolicyExecutor.hasNext()) {
logger.debug("exec hasNext");
exec();
}
break;
}
}
}
}
|
damjack/onixo
|
spec/onixo/methods/proprietary_id_spec.rb
|
<reponame>damjack/onixo
require 'spec_helper'
describe Onixo::Methods::ProprietaryId do
end
|
robertovillarejo/java-bot-broker
|
src/main/java/io/github/robertovillarejo/bot/config/DialogflowConfig.java
|
package io.github.robertovillarejo.bot.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import ai.api.AIConfiguration;
import ai.api.AIDataService;
@Configuration
@ConfigurationProperties
public class DialogflowConfig {
@Bean
public AIDataService aiDataService(@Value("${apiKey}") String apiKey) {
return new AIDataService(new AIConfiguration(apiKey));
}
}
|
ibizaman/veewee
|
lib/fission.old/response.rb
|
module Fission
class Response
attr_accessor :code, :output, :data
def initialize(args={})
@code = args.fetch :code, 1
@output = args.fetch :output, ''
@data = args.fetch :data, nil
end
def successful?
@code == 0
end
end
end
|
00-01/gap_sdk
|
gvsoc/gvsoc/engine/include/vp/trace/event_dumper.hpp
|
<reponame>00-01/gap_sdk
/*
* Copyright (C) 2020 GreenWaves Technologies, SAS, ETH Zurich and
* University of Bologna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Authors: <NAME>, GreenWaves Technologies (<EMAIL>)
*/
#ifndef __VP_TRACE_EVENT_DUMPER_HPP__
#define __VP_TRACE_EVENT_DUMPER_HPP__
#include <stdio.h>
#include <unordered_map>
#include <gv/gvsoc.hpp>
namespace vp {
class Event_dumper;
class Event_file
{
public:
virtual void dump(int64_t timestamp, int id, uint8_t *event, int width, bool is_real, bool is_string, uint8_t flags, uint8_t *flag_mask) {}
virtual void close() {}
virtual void add_trace(string name, int id, int width, bool is_real, bool is_string) {}
protected:
int64_t last_timestamp = -1;
FILE *file;
bool header_dumped = false;
};
class Event_trace
{
public:
Event_trace(string trace_name, Event_file *file, int width, bool is_real, bool is_string);
void reg(int64_t timestamp, uint8_t *event, int width, uint8_t flags, uint8_t *flag_mask);
inline void dump(int64_t timestamp) { file->dump(timestamp, id, this->buffer, this->width, this->is_real, this->is_string, this->flags, this->flags_mask); }
std::string trace_name;
bool is_real = false;
bool is_string;
Event_trace *next;
bool is_enqueued;
int width;
int bytes;
int id;
uint8_t *buffer;
void set_vcd_user(gv::Vcd_user *user);
private:
Event_file *file;
uint8_t flags;
uint8_t *flags_mask;
};
class Event_dumper
{
public:
Event_dumper(vp::component *comp) : comp(comp) {}
Event_trace *get_trace(string trace_name, string file_name, int width, bool is_real=false, bool is_string=false);
Event_trace *get_trace_real(string trace_name, string file_name);
Event_trace *get_trace_string(string trace_name, string file_name);
void close();
void set_vcd_user(gv::Vcd_user *user);
vp::component *comp;
private:
std::map<std::string, Event_trace *> event_traces;
std::map<std::string, Event_file *> event_files;
};
class Vcd_file : public Event_file
{
public:
Vcd_file(Event_dumper *dumper, string path);
void close();
void add_trace(string name, int id, int width, bool is_real, bool is_string);
void dump(int64_t timestamp, int id, uint8_t *event, int width, bool is_real, bool is_string, uint8_t flags, uint8_t *flag_mask);
private:
string parse_path(string path, bool begin);
};
class Lxt2_file : public Event_file
{
public:
Lxt2_file(Event_dumper *dumper, string path);
void close();
void add_trace(string name, int id, int width, bool is_real, bool is_string);
void dump(int64_t timestamp, int id, uint8_t *event, int width, bool is_real, bool is_string, uint8_t flags, uint8_t *flag_mask);
private:
struct lxt2_wr_trace *trace;
std::vector<struct lxt2_wr_symbol *> symbols;
};
class Fst_file : public Event_file
{
public:
Fst_file(Event_dumper *dumper, string path);
void close();
void add_trace(string name, int id, int width, bool is_real, bool is_string);
void dump(int64_t timestamp, int id, uint8_t *event, int width, bool is_real, bool is_string, uint8_t flags, uint8_t *flag_mask);
private:
string parse_path(string path, bool begin);
void *writer;
std::vector<uint32_t> vars;
};
class Raw_file : public Event_file
{
public:
Raw_file(Event_dumper *dumper, string path);
void close();
void add_trace(string name, int id, int width, bool is_real, bool is_string);
void dump(int64_t timestamp, int id, uint8_t *event, int width, bool is_real, bool is_string, uint8_t flags, uint8_t *flag_mask);
private:
void *dumper;
std::unordered_map<int, void *> traces;
};
};
#endif
|
Aleksuo/SpeechDismantlerUIProto
|
src/tests/utils/GeneralUtils.test.js
|
/* eslint-disable */
import { millisecondsToTimeString, estimateStartTime, secondsToMilliseconds, nanosecondsToMilliseconds } from '../../utils/GeneralUtils'
describe("millisecondsToTimeString", () => {
it('Converts milliseconds to a readable string', () => {
expect(millisecondsToTimeString(0)).toEqual("00:00")
})
it('Seconds are displayed correctly', () => {
expect(millisecondsToTimeString(0)).toEqual("00:00")
expect(millisecondsToTimeString(1000)).toEqual("00:01")
expect(millisecondsToTimeString(10000)).toEqual("00:10")
expect(millisecondsToTimeString(61000)).toEqual("01:01")
})
it('Minutes are displayed correctly', () => {
expect(millisecondsToTimeString(60000)).toEqual("01:00")
expect(millisecondsToTimeString(600000)).toEqual("10:00")
expect(millisecondsToTimeString(660000)).toEqual("11:00")
})
})
describe("secondsToMilliseconds", () =>{
it('Converts seconds to milliseconds correctly', () => {
expect(secondsToMilliseconds(0)).toEqual(0)
expect(secondsToMilliseconds(50)).toEqual(50000)
})
})
describe("nanosecondsToMilliseconds", () => {
it('Converts nanoseconds to milliseconds correctly', () => {
expect(nanosecondsToMilliseconds(0)).toEqual(0)
expect(nanosecondsToMilliseconds(1000000)).toEqual(1)
expect(nanosecondsToMilliseconds(100000000)).toEqual(100)
})
})
describe("estimateStartTime", () => {
const oneWordSentence = {
endTime: 2000,
words: [
{
startTime: { seconds: "1", nanos: 0 },
endTime: { seconds: "1", nanos: 600000000 }
}
]
}
const MultipleWordSentence = {
endTime: 3000,
words: [
{
startTime: { seconds: "1", nanos: 0 },
endTime: { seconds: "1", nanos: 600000000 }
},
{
startTime: { seconds: "2", nanos: 0 },
endTime: {seconds: "2", nanos: 100000000}
}
]
}
it('Returns correct estimate for one word sentence', () => {
expect(estimateStartTime(oneWordSentence, 0)).toEqual(1400)
})
it('Returns correct estimate for multiple word sentence', () =>{
expect(estimateStartTime(MultipleWordSentence, 0)).toEqual(1900)
})
})
|
mbatc/Fractal
|
Engine/source/SceneRenderer.cpp
|
<reponame>mbatc/Fractal<filename>Engine/source/SceneRenderer.cpp
#include "Fractal/ISceneRenderer.h"
#include "Fractal/IDeviceState.h"
#include "Fractal/IUniformBuffer.h"
#include "Fractal/StructuredBuffer.h"
#include "Fractal/ISceneGraph.h"
#include "Fractal/INode.h"
#include "Fractal/IComponent.h"
#include "Fractal/Log.h"
#include "Fractal/IShaderMaterial.h"
#include "Fractal/IVertexArray.h"
#include "Fractal/IProgram.h"
#include "Fractal/IAssetManager.h"
#include "Fractal/IApplication.h"
#include "Fractal/IRenderAPI.h"
#include "Fractal/IMeshRenderer.h"
#include "Fractal/ITransform.h"
#include "Fractal/ICamera.h"
#include "Fractal/ILight.h"
#include "Fractal/ISkybox.h"
#include "Fractal/ISampler.h"
#include "ctVector.h"
#include "Fractal/MeshPrimitives.h"
namespace Fractal
{
namespace Impl
{
class SceneRenderer : public ISceneRenderer
{
FRACTAL_DEFAULT_INTERFACE;
public:
enum UniformBufferSlot
{
UBS_Camera = 0,
UBS_Model = 1,
UBS_Material = 2,
UBS_Light = 3,
UBS_Count,
};
typedef void(*ComponentHandler)(SceneRenderer* pRenderer, IComponent* pComponent);
struct RenderJob
{
Mat4D modelMat;
int64_t elementOffset = 0;
int64_t elementCount = 0;
Ref<IVertexArray> pVertexArray;
Ref<IProgram> pShader;
Ref<IShaderMaterial> pMaterial;
};
SceneRenderer()
: m_cameraBuffer(Fractal_GetGraphicsAPI()->CreateUniformBuffer(0))
, m_modelBuffer(Fractal_GetGraphicsAPI()->CreateUniformBuffer(0))
, m_lightBuffer(Fractal_GetGraphicsAPI()->CreateUniformBuffer(0))
, m_camera(m_cameraBuffer->GetBuffer())
, m_model(m_modelBuffer->GetBuffer())
, m_lights(m_lightBuffer->GetBuffer(), 0)
, m_pSampler(Fractal_GetGraphicsAPI()->CreateSampler())
{
m_pSampler->SetFilterMinMode(FilterMode_Linear, false);
m_pSampler->SetFilterMagMode(FilterMode_Nearest);
AddHandler(ComponentTypeIndex<ICamera>(), HandleCamera);
AddHandler(ComponentTypeIndex<IMeshRenderer>(), HandleMesh);
AddHandler(ComponentTypeIndex<ILight>(), HandleLight);
AddHandler(ComponentTypeIndex<ISkybox>(), HandleSkybox);
IRenderAPI* pGfx = Fractal_GetGraphicsAPI();
m_fullscreenQuad = pGfx->CreateRenderMesh(Fractal_MeshPrimitives_CreateQuadUV(
-1, -1, 0, 1, // Min Pos/UV
1, 1, 1, 0 // Max Pos/UV
));
m_skyboxShader[SkyboxType_Equirectangular] = Fractal_GetAssetManager()->Load<IProgram>("~/shader/skybox/equirect");
m_skyboxShader[SkyboxType_CubeMap] = Fractal_GetAssetManager()->Load<IProgram>("~/shader/skybox/cubemap");
}
void AddHandler(int64_t componentTypeID, ComponentHandler handler)
{
m_handler.resize(Fractal_Component_Count(), nullptr);
m_handler[componentTypeID] = handler;
m_handlerLookup.clear(); // We need to rebuild the handler lookup
}
void Handle(IComponent* pComponent)
{
ComponentHandler handler = GetHandler(pComponent->GetTypeIndex());
if (handler != nullptr)
handler(this, pComponent);
}
ComponentHandler GetHandler(flIN int64_t typeID)
{
if (m_handlerLookup.empty())
{
m_handlerLookup.resize(Fractal_Component_Count());
for (int64_t id = 0; id < Fractal_Component_Count(); ++id)
{
int64_t handlerType = id;
while (handlerType > 0 && m_handler[handlerType] == nullptr)
handlerType = Fractal_Component_GetBaseTypeIndex(handlerType);
m_handlerLookup[id] = handlerType;
}
}
return m_handler[m_handlerLookup[typeID]];
}
static void HandleMesh(SceneRenderer* pRenderer, IComponent* pComponent)
{
IMeshRenderer* pMesh = (IMeshRenderer*)pComponent;
ITransform* pTransform = pMesh->GetNode()->GetTransform();
if (pTransform)
{
RenderJob job;
job.modelMat = pTransform->GetTransform();
job.pVertexArray = pMesh->GetMesh()->GetVertexArray();
for (int64_t subMesh = 0; subMesh < pMesh->GetSubMeshCount(); ++subMesh)
{
IProgram* pShader = pMesh->GetShader(subMesh);
IShaderMaterial* pMaterial = pMesh->GetMaterial(subMesh);
IRenderMesh::SubMesh const* pSubMesh = pMesh->GetSubMesh(subMesh);
job.pShader = pShader;
job.pMaterial = pMaterial;
job.elementOffset = pSubMesh->offset;
job.elementCount = pSubMesh->count;
pRenderer->m_renderQueue.push_back(job);
}
}
}
static void HandleLight(SceneRenderer* pRenderer, IComponent* pComponent)
{
ILight* pLight = (ILight*)pComponent;
auto& lightData = pRenderer->m_lights;
LightData l;
l.type = (int32_t)pLight->GetLightType();
l.colour = Vec3F(pLight->GetDiffuse().r, pLight->GetDiffuse().g, pLight->GetDiffuse().b) * pLight->GetDiffuse().a;
l.ambient = Vec3F(pLight->GetAmbient().r, pLight->GetAmbient().g, pLight->GetAmbient().b) * pLight->GetAmbient().a;
l.position = pLight->GetTransform()->GetPosition();
l.direction = pLight->GetTransform()->GetForward();
l.strength = (float)pLight->GetStrength();
l.falloff = (float)pLight->GetFalloff();
l.innerCutoff = (float)Cos(pLight->GetInnerConeAngle());
l.outerCutoff = (float)Cos(pLight->GetOuterConeAngle());
lightData.PushBack(l);
}
static void HandleCamera(SceneRenderer* pRenderer, IComponent* pComponent)
{
ICamera* pCamera = (ICamera*)pComponent;
}
static void HandleSkybox(SceneRenderer* pRenderer, IComponent* pComponent)
{
ISkybox* pSkybox = (ISkybox*)pComponent;
pRenderer->m_skyboxType = pSkybox->GetSkyboxType();
pRenderer->m_skyboxTex = pSkybox->GetTexture();
}
void Begin(flIN ISceneGraph* pGraph)
{
if (pGraph == nullptr)
{
flWarning("Begin(...) failed: pGraph was nullptr.");
return;
}
}
void Submit(flIN INode* pNode)
{
if (pNode == nullptr)
{
flWarning("Submit(...) failed: pNode was nullptr.");
return;
}
if (!pNode->IsActive() || !pNode->IsVisible())
return; // Invisible or inactive. Don't render
for (int64_t componentIndex = 0; componentIndex < pNode->GetComponentCount(); ++componentIndex)
Submit(pNode->GetComponent(componentIndex));
for (int64_t childIndex = 0; childIndex < pNode->GetChildCount(); ++childIndex)
Submit(pNode->GetChild(childIndex));
}
void Submit(flIN IComponent* pComponent)
{
if (pComponent == nullptr)
{
flWarning("Submit(...) failed: pComponent was nullptr.");
return;
}
Handle(pComponent);
}
void End() {}
void Draw(flIN Mat4D viewMatrix, flIN Mat4D projMat)
{
m_lights.Upload();
m_lightBuffer->Bind(UBS_Light, true);
m_camera->viewMat = viewMatrix;
m_camera->camMat = viewMatrix.Inverse();
m_camera->projMat = projMat;
m_camera->invProjMat = projMat.Inverse();
m_camera.Upload();
m_cameraBuffer->Bind(UBS_Camera);
IRenderAPI* pGraphics = Fractal_GetGraphicsAPI();
// Draw the skybox
if (m_skyboxTex != nullptr)
{
m_fullscreenQuad->GetVertexArray()->Bind();
pGraphics->GetState()->SetFeatureEnabled(DeviceFeature_DepthRead, false);
pGraphics->GetState()->SetFeatureEnabled(DeviceFeature_DepthWrite, false);
m_skyboxShader[m_skyboxType]->Bind();
m_skyboxShader[m_skyboxType]->SetTexture(0, m_skyboxTex.Get());
m_skyboxShader[m_skyboxType]->SetSampler(0, m_pSampler.Get());
pGraphics->Render(DrawMode_Triangles, m_fullscreenQuad->GetVertexArray()->GetIndexBuffer() != nullptr, 0, m_fullscreenQuad->GetVertexArray()->GetIndexCount());
}
IVertexArray* pActiveVertexArray = nullptr;
IProgram* pActiveProgram = nullptr;
IShaderMaterial* pActiveMaterial = nullptr;
pGraphics->GetState()->SetFeatureEnabled(DeviceFeature_DepthRead, true);
pGraphics->GetState()->SetFeatureEnabled(DeviceFeature_DepthWrite, true);
for (auto& job : m_renderQueue)
{
m_model->modelMat = job.modelMat;
m_model->normalMat = job.modelMat.Inverse().Transpose();
m_model->mvp = (projMat * viewMatrix * job.modelMat);
m_model.Upload();
m_modelBuffer->Bind(UBS_Model);
if (job.pVertexArray != pActiveVertexArray)
{
pActiveVertexArray = job.pVertexArray.Get();
pActiveVertexArray->Bind();
}
if (job.pShader != pActiveProgram)
{
pActiveProgram = job.pShader.Get();
if (pActiveProgram->Compile())
pActiveProgram->Bind();
else
pActiveProgram = nullptr;
}
if (job.pMaterial != pActiveMaterial)
{
pActiveMaterial = job.pMaterial.Get();
pActiveMaterial->Bind(UBS_Material);
}
if (pActiveMaterial != nullptr && pActiveProgram != nullptr && pActiveVertexArray != nullptr)
pGraphics->Render(DrawMode_Triangles, pActiveVertexArray->GetIndexBuffer() != nullptr, job.elementOffset, job.elementCount);
}
}
void Clear() {}
private:
struct ModelData
{
Mat4F modelMat;
Mat4F normalMat;
Mat4F mvp;
};
struct CameraData
{
Mat4F viewMat;
Mat4F projMat;
Mat4F camMat;
Mat4F invProjMat;
};
struct LightData
{
Vec3F colour;
int32_t type;
Vec3F ambient;
float strength;
Vec3F position;
float falloff;
Vec3F direction;
float innerCutoff;
float outerCutoff;
float padding[3];
};
Ref<IUniformBuffer> m_cameraBuffer;
Ref<IUniformBuffer> m_modelBuffer;
Ref<IUniformBuffer> m_lightBuffer;
StructuredBuffer<ModelData> m_model;
StructuredBuffer<CameraData> m_camera;
StructuredBuffer<LightData> m_lights;
ctVector<RenderJob> m_renderQueue;
ctVector<int64_t> m_handlerLookup; // Handler to use for each component type
ctVector<ComponentHandler> m_handler; // Handler functions
SkyboxType m_skyboxType;
Ref<ITexture> m_skyboxTex;
Ref<ISampler> m_pSampler;
Ref<IProgram> m_skyboxShader[SkyboxType_Count];
Ref<IRenderMesh> m_fullscreenQuad;
};
}
}
flEXPORT Fractal::ISceneRenderer* flCCONV Fractal_CreateSceneRenderer()
{
return flNew Fractal::Impl::SceneRenderer;
}
|
AssociationPaupiette/paupiette
|
db/migrate/20181113094109_rename_preregister_for_preregistration.rb
|
<filename>db/migrate/20181113094109_rename_preregister_for_preregistration.rb<gh_stars>0
class RenamePreregisterForPreregistration < ActiveRecord::Migration[5.2]
def change
rename_table :preregisters, :preregistrations
end
end
|
Snehagupta1907/CircuitVerse
|
spec/requests/api/v1/projects_controller/toggle_star_spec.rb
|
<filename>spec/requests/api/v1/projects_controller/toggle_star_spec.rb
# frozen_string_literal: true
require "rails_helper"
RSpec.describe Api::V1::ProjectsController, "#toggle_star", type: :request do
describe "toggle starred condition for a particular project" do
let!(:user) { FactoryBot.create(:user) }
let!(:project) { FactoryBot.create(:project, author: user) }
context "when not authenticated" do
before do
get "/api/v1/projects/#{project.id}/toggle-star", as: :json
end
it "returns status :not_authorized" do
expect(response).to have_http_status(:unauthorized)
expect(response.parsed_body).to have_jsonapi_errors
end
end
context "when authenticated & stars a non existent project" do
before do
token = get_auth_token(user)
get "/api/v1/projects/0/toggle-star",
headers: { "Authorization": "Token #{token}" }, as: :json
end
it "returns status :not_found" do
expect(response).to have_http_status(:not_found)
expect(response.parsed_body).to have_jsonapi_errors
end
end
context "when stars an unstarred project" do
before do
token = get_auth_token(user)
get "/api/v1/projects/#{project.id}/toggle-star",
headers: { "Authorization": "Token #{token}" }, as: :json
end
it "returns status :ok & starred message" do
expect(response).to have_http_status(:ok)
expect(response.parsed_body["message"]).to eq("Starred successfully!")
end
end
context "when unstars a starred project" do
before do
FactoryBot.create(:star, project: project, user: user)
token = get_auth_token(user)
get "/api/v1/projects/#{project.id}/toggle-star",
headers: { "Authorization": "Token #{token}" }, as: :json
end
it "returns status :ok & starred message" do
expect(response).to have_http_status(:ok)
expect(response.parsed_body["message"]).to eq("Unstarred successfully!")
end
end
end
end
|
cinecove/defunctr
|
lib/browsers/safari.js
|
/* @flow */
'use strict';
import htmlElementConstructorCheck from '../checks/htmlElementConstructorCheck';
export default function () : boolean {
return htmlElementConstructorCheck();
}
|
himanshiLt/prepack
|
test/serializer/abstract/PutValue.js
|
<reponame>himanshiLt/prepack<gh_stars>1000+
// throws introspection error
var i = 42;
i.someProperty = 43;
var obj = __makePartial({});
obj.someProperty = 42;
|
ishitamed19/referit3d
|
referit3d/external_tools/Scan2CAD/Network/pytorch/SaveOutput.py
|
import numpy as np
import pathlib
import Vox
import os
import sys
sys.path.append("../base")
import JSONHelper
def save_output(batch_size, rootdir, samples, outputs, is_testtime=False):
for i in range(batch_size):
is_match = outputs["match"][i].item()
if True:
sdf_scan = samples["sdf_scan"][i].numpy()
df_cad = samples["df_cad"][i].numpy()
heatmap_pred = outputs["heatmap"][i].data.cpu().numpy()
grid2world_scan = samples["grid2world_scan"][i].numpy()
grid2world_cad = samples["grid2world_cad"][i].numpy()
basename_save = samples["basename_save"][i]
voxres_scan = samples["voxres_scan"][i]
voxres_cad = samples["voxres_cad"][i]
scale = outputs["scale"][i].data.cpu().numpy().tolist()
p_scan = samples["p_scan"][i].numpy().tolist()
savedir = rootdir + "/" + basename_save
pathlib.Path(savedir).mkdir(parents=False, exist_ok=True)
dims_cad = [df_cad.shape[1], df_cad.shape[2], df_cad.shape[3]]
vox = Vox.Vox(dims_cad, voxres_cad, grid2world_cad, df_cad, heatmap_pred)
Vox.write_vox(savedir + "/predict-heatmap.vox2", vox)
item = {"match" : is_match, "scale" : scale, "p_scan" : p_scan}
JSONHelper.write(savedir + "/predict.json", item)
force_symlink(savedir + "/input-center.vox", samples["filename_vox_center"][i])
if is_testtime:
continue
#if is_match > 0.95:
# print(savedir)
# print(scale)
# dim_scan = [sdf_scan.shape[1], sdf_scan.shape[2], sdf_scan.shape[3]]
# vox = Vox.Vox(dim_scan, voxres_scan, grid2world_scan, sdf_scan)
# Vox.write_vox(savedir + "/input-center.vox", vox)
# quit()
heatmap_gt = outputs["heatmap_gt"][i].data.cpu().numpy()
dim_cad = [df_cad.shape[1], df_cad.shape[2], df_cad.shape[3]]
vox = Vox.Vox(dim_cad, voxres_cad, grid2world_cad, df_cad, heatmap_gt)
Vox.write_vox(savedir + "/gt-heatmap.vox2", vox)
def force_symlink(linkname, target):
try:
os.symlink(target, linkname)
except:
os.remove(linkname)
os.symlink(target, linkname)
|
ketancmaheshwari/swift-k
|
cogkit/modules/util/src/org/globus/cog/util/Streamer.java
|
<reponame>ketancmaheshwari/swift-k
package org.globus.cog.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class Streamer extends Thread {
InputStream istream;
OutputStream ostream;
int chunk = 64*1024;
enum Status {
UNSUBMITTED,
ACTIVE,
COMPLETED,
FAILED
}
Status status = Status.UNSUBMITTED;
public Streamer(InputStream istream, OutputStream ostream) {
this.istream = istream;
this.ostream = ostream;
setName("Streamer");
}
public void run() {
status = Status.ACTIVE;
byte[] buffer = new byte[chunk];
BufferedInputStream bis = new BufferedInputStream(istream);
BufferedOutputStream bos = new BufferedOutputStream(ostream);
int actual = 0;
try {
while ((actual = bis.read(buffer, 0, chunk)) != -1)
bos.write(buffer, 0, actual);
bos.flush();
}
catch (IOException e) {
status = Status.FAILED;
e.printStackTrace();
}
status = Status.COMPLETED;
}
public Status getStatus() {
return status;
}
public boolean isTerminal() {
return (status == Status.COMPLETED ||
status == Status.FAILED);
}
}
|
codeasashu/react-openapi-builder
|
src/Stores/oasStore.js
|
import {hasIn, escapeRegExp} from 'lodash';
import {toFSPath, resolve, isURL} from '@stoplight/path';
import Path from './oas/path';
import Service from './oas/service';
import {eventTypes, nodeOperations} from '../datasets/tree';
class OasStore {
constructor(e) {
this.stores = e;
this.path = new Path(e);
this.service = new Service(e);
this.eventEmitter = e.eventEmitter;
//this.operation = new ld(e)
//this.service = new hd(e)
}
activate() {
this.path.activate();
this.service.activate();
}
registerEventListeners() {
this.eventEmitter.on(eventTypes.GoToRef, (refPath) => {
const sourceNode = this.stores.uiStore.activeSourceNode;
if (!sourceNode || !refPath) {
return;
}
if (isURL(refPath)) {
this.stores.browserStore.openUrlInBrowser(refPath);
return;
}
let nodeUri;
let resolvedRefPath = toFSPath(
resolve(
sourceNode.uri.replace(
new RegExp(escapeRegExp(sourceNode.path) + '$'),
'',
),
refPath,
),
);
const hashLoc = resolvedRefPath.indexOf('#');
if (hashLoc !== -1) {
nodeUri =
resolvedRefPath.length < hashLoc + 2
? undefined
: resolvedRefPath.slice(hashLoc + 1);
}
const node = this.stores.graphStore.getNodeByUri(
`/p/reference.yaml${nodeUri}`,
);
if (node) {
this.stores.uiStore.setActiveNode(node);
}
});
}
addSharedParameter({sourceNodeId, name, parameterType}) {
const sourceNode = this.stores.graphStore.getNodeById(sourceNodeId);
const destination = ['components', 'parameters'];
const itemPath = [...destination, name];
this.stores.graphStore.graph.patchSourceNodeProp(
sourceNodeId,
'data.parsed',
[
...(hasIn(sourceNode.data.parsed, destination)
? []
: [
{
op: nodeOperations.Add,
path: destination,
value: {},
},
]),
{
op: nodeOperations.Add,
path: itemPath,
value: {
name,
in: parameterType,
required: parameterType === 'path',
schema: {
type: 'string',
},
},
},
],
);
}
addSharedResponse({sourceNodeId, name}) {
const sourceNode = this.stores.graphStore.getNodeById(sourceNodeId);
const destination = ['components', 'responses'];
const itemPath = [...destination, name];
this.stores.graphStore.graph.patchSourceNodeProp(
sourceNodeId,
'data.parsed',
[
...(hasIn(sourceNode.data.parsed, destination)
? []
: [
{
op: nodeOperations.Add,
path: destination,
value: {},
},
]),
{
op: nodeOperations.Add,
path: itemPath,
value: {
description: '',
content: {
'application/json': {
schema: {
type: 'object',
properties: {},
},
},
},
},
},
],
);
}
addSharedExample({sourceNodeId, name}) {
const sourceNode = this.stores.graphStore.getNodeById(sourceNodeId);
const destination = ['components', 'examples'];
const itemPath = [...destination, name];
this.stores.graphStore.graph.patchSourceNodeProp(
sourceNodeId,
'data.parsed',
[
...(hasIn(sourceNode.data.parsed, destination)
? []
: [
{
op: nodeOperations.Add,
path: destination,
value: {},
},
]),
{
op: nodeOperations.Add,
path: itemPath,
value: {
description: 'Example shared example',
type: 'object',
properties: {
id: {
type: 'string',
},
},
required: ['id'],
},
},
],
);
}
addSharedRequestBody({sourceNodeId, name}) {
const sourceNode = this.stores.graphStore.getNodeById(sourceNodeId);
const destination = ['components', 'requestBodies'];
const itemPath = [...destination, name];
this.stores.graphStore.graph.patchSourceNodeProp(
sourceNodeId,
'data.parsed',
[
...(hasIn(sourceNode.data.parsed, destination)
? []
: [
{
op: nodeOperations.Add,
path: destination,
value: {},
},
]),
{
op: nodeOperations.Add,
path: itemPath,
value: {
description: 'Example response',
content: {
'application/json': {
schema: {
type: 'object',
properties: {},
},
},
},
},
},
],
);
}
}
export default OasStore;
|
ministryofjustice/opg-sirius-end-to-end-tests
|
cypress/integration/supervision/clients/add-event.spec.js
|
beforeEach(() => {
cy.loginAs('Case Manager');
cy.createAClient();
});
describe('Add event to a client', { tags: ['@supervision', 'client', '@smoke-journey','supervision-notes'] }, () => {
it(
'Given I\'m a Case Manager on Supervision, when I add an event, then Word formatting is cleaned',
() => {
cy.get('@clientId').then(clientId => {
cy.visit('/supervision/#/clients/' + clientId);
cy.contains('<NAME>');
cy.get('[id="create-event-button"]').click()
cy.get('.tox-edit-area__iframe', { timeout: 10000 }).should('be.visible').scrollIntoView()
cy.window().its('tinyMCE').its('activeEditor').its('initialized',{"timeout":2000})
cy.window().then((win) => {
const pastedata = '<p class="MsoNormal" style="margin: 0cm 0cm 11.25pt; font-size: 12pt; font-family: Calibri, sans-serif; text-align: justify; background: white;"><span style="font-size: 10.5pt; font-family: "Open Sans", sans-serif;">Test</span><span style="font-size: 10.5pt; font-family: "Open Sans", sans-serif; color: rgb(192, 0, 0);"> this</span><span style="font-size: 10.5pt; font-family: "Open Sans", sans-serif;"><b> pasted </b>data then.<o:p></o:p></span></p>';
let editor = win.tinymce.activeEditor
editor.dom.createRng();
editor.execCommand('mceInsertClipboardContent', false, {
content: pastedata
});
let content = editor.getContent();
expect(content).to.contain('<p>Test this<strong> pasted </strong>data then.</p>')
expect(content).to.not.contain('Calibri');
expect(content).to.not.contain('MsoNormal')
expect(content).to.not.contain('span')
});
});
}
);
});
|
bufferoverflow/embb
|
dataflow_cpp/include/embb/dataflow/internal/sink.h
|
/*
* Copyright (c) 2014, Siemens AG. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef EMBB_DATAFLOW_INTERNAL_SINK_H_
#define EMBB_DATAFLOW_INTERNAL_SINK_H_
#include <embb/dataflow/internal/node.h>
#include <embb/dataflow/internal/inputs.h>
#include <embb/dataflow/internal/sink_executor.h>
#include <embb/dataflow/internal/action.h>
namespace embb {
namespace dataflow {
namespace internal {
template <int Slices, class Inputs> class Sink;
template <
int Slices,
typename I1, typename I2, typename I3, typename I4, typename I5>
class Sink< Slices, Inputs<Slices, I1, I2, I3, I4, I5> >
: public Node
, public ClockListener {
public:
typedef Inputs<Slices, I1, I2, I3, I4, I5> InputsType;
typedef SinkExecutor< InputsType > ExecutorType;
typedef typename ExecutorType::FunctionType FunctionType;
explicit Sink(FunctionType function)
: executor_(function) {
input_clock_expected_ = 0;
inputs_.SetListener(this);
}
void SetListener(ClockListener * listener) {
listener_ = listener;
}
virtual bool HasInputs() const {
return inputs_.Size() > 0;
}
virtual void Run(int clock) {
//const int idx = clock % Slices;
// force ordering
while (input_clock_expected_ != clock) embb::base::Thread::CurrentYield();
if (inputs_.AreNoneBlank(clock)) {
executor_.Execute(clock, inputs_);
}
listener_->OnClock(clock);
input_clock_expected_ = clock + 1;
}
InputsType & GetInputs() {
return inputs_;
}
template <int Index>
typename TypeAt<typename InputsType::Types, Index>::Result & GetInput() {
return inputs_.template Get<Index>();
}
virtual void OnClock(int clock) {
lock_.Lock();
TrySpawn(clock);
lock_.Unlock();
}
private:
InputsType inputs_;
ExecutorType executor_;
embb::base::Atomic<int> input_clock_expected_;
Action action_[Slices];
ClockListener * listener_;
SpinLock lock_;
void TrySpawn(int clock) {
const int idx = clock % Slices;
if (!inputs_.AreAtClock(clock))
EMBB_THROW(embb::base::ErrorException,
"Some inputs are not at expected clock.")
action_[idx] = Action(this, clock);
sched_->Spawn(action_[idx]);
}
};
} // namespace internal
} // namespace dataflow
} // namespace embb
#endif // EMBB_DATAFLOW_INTERNAL_SINK_H_
|
zerookrash/App-Full-Consultorios
|
cliente_src/src/contenedores/Rutas/index.js
|
import React from 'react';
import { Helmet } from 'react-helmet';
import { BrowserRouter as Router, Route, Switch } from 'react-router-dom';
import BienvenidoPagina from '../BienvenidoPagina';
// import BienvenidoPagina2 from '../BienvenidoPagina2/Home';
import DashboardPagina from '../DashboardPagina';
import EstadoPagina from '../EstadoPagina';
import NoEncontradoPagina from '../NoencontradoPagina';
import TestPagina from '../Test';
const Rutas = () => {
return (
<div>
<Helmet>
<title>📲 Desarrollador Web NodeJS</title>
</Helmet>
<Router>
<Switch>
<Route component={BienvenidoPagina} exact={true} path='/' />
<Route component={DashboardPagina} exact={true} path='/dashboard' />
<Route component={EstadoPagina} exact={true} path='/es' />
<Route component={TestPagina} exact={true} path='/test' />
<Route component={NoEncontradoPagina} path='*' />
</Switch>
</Router>
</div>
);
};
export default Rutas;
|
caesardai/assignments
|
A10/buddhabrot.c
|
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <assert.h>
#include <math.h>
#include <time.h>
#include <string.h>
#include <pthread.h>
#include <sys/time.h>
#include "read_ppm.h"
pthread_mutex_t mutex;
pthread_barrier_t barrier;
// step 1
void *determine_membership(void* args);
// step 2
void *compute_visited_counts(void* args);
// step 3
void *compute_colors(void* args);
// start routine
void * thread_function(void * args);
struct thread_data {
int id;
int size;
float xmin;
float xmax;
float ymin;
float ymax;
int maxIterations;
int row_s, row_t, col_s, col_t;
struct ppm_pixel* palette;
struct ppm_pixel** graph_matrix;
int* max_count;
int** membership_log;
int** v_count;
};
int main(int argc, char* argv[]) {
double timer;
int size = 400;
float xmin = -2.0;
float xmax = 0.47;
float ymin = -1.12;
float ymax = 1.12;
int max_count = 0;
int numProcesses = 4;
int maxIterations = 1000;
struct timeval tstart, tend;
int opt;
while ((opt = getopt(argc, argv, ":s:l:r:t:b:p:")) != -1) {
switch (opt) {
case 's': size = atoi(optarg); break;
case 'l': xmin = atof(optarg); break;
case 'r': xmax = atof(optarg); break;
case 't': ymax = atof(optarg); break;
case 'b': ymin = atof(optarg); break;
case '?': printf("usage: %s -s <size> -l <xmin> -r <xmax> "
"-b <ymin> -t <ymax> -p <numProcesses>\n", argv[0]); break;
}
}
printf("Generating buddhabrot with size %dx%d\n", size, size);
printf(" Num processes = %d\n", numProcesses);
printf(" X range = [%.4f,%.4f]\n", xmin, xmax);
printf(" Y range = [%.4f,%.4f]\n", ymin, ymax);
// todo: your code here
// initialize barrier and mutex
pthread_barrier_init(&barrier, NULL, 4);
pthread_mutex_init(&mutex, NULL);
// allocate 2D array to store image
struct ppm_pixel** graph_matrix = malloc(sizeof(struct ppm_pixel*) * size);
if (graph_matrix == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
for (int i = 0; i < size; i++) {
graph_matrix[i] = malloc(sizeof(struct ppm_pixel) * size);
if (graph_matrix[i] == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
}
// allocate 2D array to keep track of points in mandelbrot set
int** membership_log = malloc(sizeof(int*) * size);
if (graph_matrix == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
for (int i = 0; i < size; i++) {
membership_log[i] = malloc(sizeof(int) * size);
if (membership_log[i] == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
}
// allocate 2D array to keep track of visited counts
int** v_count = malloc(sizeof(int*) * size);
if (v_count == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
for (int i = 0; i < size; i++) {
v_count[i] = malloc(sizeof(int) * size);
if (v_count[i] == NULL) {
printf("Memory allocation failed. Exiting.\n");
exit(1);
}
}
// initialize all to zero
pthread_t tid[4];
struct thread_data data[4];
for (int r = 0; r < size; r++) {
for (int c = 0; c < size; c++) {
v_count[r][c] = 0;
}
}
// compute image
gettimeofday(&tstart, NULL);
for (int i = 0; i < numProcesses; i++) {
int half = size / 2;
int row_s, row_t, col_s, col_t;
if (i == 0) { // first quadrant
row_s = 0;
row_t = half;
col_s = 0;
col_t = half;
} if (i == 1) { // second quadrant
row_s = 0;
row_t = half;
col_s = half;
col_t = size;
} if (i == 2) { // third quadrant
row_s = half;
row_t = size;
col_s = 0;
col_t = half;
} if (i == 3) { // forth quadrant
row_s = half;
row_t = size;
col_s = half;
col_t = size;
}
data[i].id = i;
data[i].size = size;
data[i].row_s = row_s;
data[i].row_t = row_t;
data[i].col_s = col_s;
data[i].col_t = col_t;
data[i].xmin = xmin;
data[i].xmax = xmax;
data[i].ymin = ymin;
data[i].ymax = ymax;
data[i].max_count = &max_count;
data[i].maxIterations = maxIterations;
data[i].graph_matrix = graph_matrix;
data[i].membership_log = membership_log;
data[i].v_count = v_count;
pthread_create(&tid[i], NULL, thread_function, (void*) &data[i]);
}
for (int i = 0; i < numProcesses; i++) {
pthread_join(tid[i], NULL);
}
gettimeofday(&tend, NULL);
timer = tend.tv_sec - tstart.tv_sec + (tend.tv_usec - tstart.tv_usec)/1.e6;
printf("Computed buddhabrot set (%dx%d) in %g seconds\n", size, size, timer);
char output_name[128];
sprintf(output_name, "buddhabrot-%d-%.10ld.ppm", size, time(0));
int name_len = strlen(output_name);
output_name[name_len] = '\0';
// write file
write_ppm(output_name, graph_matrix, size, size);
// free memory
for (int i = 0; i < size; i++) {
free(graph_matrix[i]);
graph_matrix[i] = NULL;
free(membership_log[i]);
membership_log[i] = NULL;
free(v_count[i]);
v_count[i] = NULL;
}
free(graph_matrix);
graph_matrix = NULL;
free(membership_log);
membership_log = NULL;
free(v_count);
v_count = NULL;
// delete mutex and barrier
pthread_mutex_destroy(&mutex);
pthread_barrier_destroy(&barrier);
return 0;
}
/////////////////
/////// FUNCTIONS ///////
/////////////////
// step 1
void *determine_membership(void* args) {
int iter;
float xfrac, yfrac, x0, y0, x, y, xtmp;
struct thread_data* data = (struct thread_data *) args;
for (int r = data->row_s; r < data->row_t; r++) {
for (int c = data->col_s; c < data->col_t; c++) {
xfrac = (float) r /data->size;
yfrac = (float) c /data->size;
x0 = data->xmin + xfrac * (data->xmax - data->xmin);
y0 = data->ymin + yfrac * (data->ymax - data->ymin);
x = 0;
y = 0;
iter = 0;
while (iter < data->maxIterations && x*x + y*y < 2*2) {
xtmp = x*x - y*y + x0;
y = 2*x*y + y0;
x = xtmp;
iter++;
}
if (iter < data->maxIterations) {
data->membership_log[r][c] = 0;
} else {
data->membership_log[r][c] = 1;
}
}
}
return (void *) NULL;
}
// step 2
void *compute_visited_counts(void* args) {
int yrow, xcol;
float xfrac, yfrac, x0, y0, x, y, xtmp;
struct thread_data* data = (struct thread_data *) args;
for (int r = data->row_s; r < data->row_t; r++) {
for (int c = data->col_s; c < data->col_t; c++) {
if (data->membership_log[r][c] == 1) {
continue;
} else {
xfrac = (float) r /data->size;
yfrac = (float) c /data->size;
x0 = data->xmin + xfrac * (data->xmax - data->xmin);
y0 = data->ymin + yfrac * (data->ymax - data->ymin);
x = 0;
y = 0;
while (x*x + y*y < 2*2) {
xtmp = x*x - y*y + x0;
y = 2*x*y + y0;
x = xtmp;
yrow = round(data->size * (y - data->ymin) / (data->ymax - data->ymin));
xcol = round(data->size * (x - data->xmin) / (data->xmax - data->xmin));
if (yrow < 0 || yrow >= data->size) { // out of range
continue;
}
if (xcol < 0 || xcol >= data->size) { // out of range
continue;
}
pthread_mutex_lock(&mutex);
data->v_count[yrow][xcol]++;
// update max count
if (data->v_count[yrow][xcol] > *(data->max_count)) {
*(data->max_count) = data->v_count[yrow][xcol];
}
pthread_mutex_unlock(&mutex);
}
}
}
}
return (void *) NULL;
}
// step 3
void *compute_colors(void* args) {
float gamma = 0.681;
float factor = 1.0 / gamma;
struct thread_data* data = (struct thread_data *) args;
for (int r = data->row_s; r < data->row_t; r++) {
for (int c = data->col_s; c < data->col_t; c++) {
float value = 0;
if (data->v_count[r][c] > 0) {
value = log(data->v_count[r][c]) / log(*(data->max_count));
value = pow(value, factor);
}
data->graph_matrix[r][c].red = value * 255;
data->graph_matrix[r][c].green = value * 255;
data->graph_matrix[r][c].blue = value * 255;
}
}
return (void *) NULL;
}
void * thread_function(void * args) {
struct thread_data* data = (struct thread_data *) args;
printf("Thread %d) sub-image block: cols (%d, %d) to rows (%d, %d)\n",
data->id, data->col_s, data->col_t, data->row_s, data->row_t);
// step 1
void *determine_membership(void* args);
// step 2
void *compute_visited_counts(void* args);
// step 3
void *compute_colors(void* args);
printf("Thread %d) finished\n", data->id);
return (void *) NULL;
}
|
alpapad/raft-retry-log
|
retry-log-server/src/main/java/com/aktarma/retrylog/server/RetryLogStateMachine.java
|
<filename>retry-log-server/src/main/java/com/aktarma/retrylog/server/RetryLogStateMachine.java
package com.aktarma.retrylog.server;
import java.io.IOException;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.ratis.proto.RaftProtos;
import org.apache.ratis.protocol.Message;
import org.apache.ratis.protocol.RaftGroupId;
import org.apache.ratis.server.RaftServer;
import org.apache.ratis.server.protocol.TermIndex;
import org.apache.ratis.server.storage.RaftStorage;
import org.apache.ratis.statemachine.StateMachineStorage;
import org.apache.ratis.statemachine.TransactionContext;
import org.apache.ratis.statemachine.impl.BaseStateMachine;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
import com.aktarma.retrylog.common.wire.proto.ExceptionResponse;
import com.aktarma.retrylog.common.wire.proto.OperationResultCode;
import com.aktarma.retrylog.common.wire.proto.Request;
import com.aktarma.retrylog.common.wire.proto.Response;
import com.aktarma.retrylog.common.wire.proto.ToRetryRequest;
import com.aktarma.retrylog.common.wire.proto.ToRetryResponse;
import com.aktarma.retrylog.common.wire.proto.TransmissionBeginRequest;
import com.aktarma.retrylog.common.wire.proto.TransmissionBeginResponse;
import com.aktarma.retrylog.common.wire.proto.TransmissionCommitRequest;
import com.aktarma.retrylog.common.wire.proto.TransmissionCommitResponse;
import com.aktarma.retrylog.common.wire.proto.TransmissionConfirmedRequest;
import com.aktarma.retrylog.common.wire.proto.TransmissionConfirmedResponse;
import com.aktarma.retrylog.common.wire.proto.TransmissionEntry;
import com.aktarma.retrylog.common.wire.proto.TransmissionFailRequest;
import com.aktarma.retrylog.common.wire.proto.TransmissionFailResponse;
import com.aktarma.retrylog.common.wire.proto.TransmissionRetryEntry;
import com.aktarma.retrylog.server.db.DbBackendFactory;
import com.aktarma.retrylog.server.db.IDbBackend;
import com.aktarma.retrylog.server.db.OpResult;
import com.aktarma.retrylog.server.db.TermIndexSupplier;
public class RetryLogStateMachine extends BaseStateMachine implements TermIndexSupplier {
private IDbBackend backend = null;
private RaftStorage raftStorage;
public RetryLogStateMachine() {
super();
backend = DbBackendFactory.getDb(this);
}
@Override
public CompletableFuture<TermIndex> notifyInstallSnapshotFromLeader(RaftProtos.RoleInfoProto roleInfoProto,
TermIndex termIndex) {
backend.reinitialize();
this.setLastAppliedTermIndex(backend.getSnapshotInfo());
return CompletableFuture.completedFuture(backend.getSnapshotInfo());
}
@Override
public void pause() {
backend.pause();
}
@Override
public StateMachineStorage getStateMachineStorage() {
return backend;
}
/**
* @param server the current server information
* @param groupId the cluster groupId
* @param raftStorage the raft storage which is used to keep raft related stuff
* @throws IOException if any error happens during load state
*/
@Override
public void initialize(RaftServer server, RaftGroupId groupId, RaftStorage raftStorage) throws IOException {
super.initialize(server, groupId, raftStorage);
this.raftStorage = raftStorage;
backend.init(this.raftStorage);
this.setLastAppliedTermIndex(backend.getSnapshotInfo());
}
/**
* very similar to initialize method, but doesn't initialize the storage system
* because the state machine reinitialized from the PAUSE state and storage
* system initialized before.
*
* @throws IOException if any error happens during load state
*/
@Override
public void reinitialize() throws IOException {
backend.reinitialize();
this.setLastAppliedTermIndex(backend.getSnapshotInfo());
}
/**
* @return the index of the snapshot
*/
@Override
public long takeSnapshot() {
return backend.takeSnapshot();
}
// @Override
// public void notifyTermIndexUpdated(long term, long index) {
// super.notifyTermIndexUpdated(term, index);
// // CompletableFuture.supplyAsync(()-> backend.sync());
// }
// @Override
// public TransactionContext applyTransactionSerial(TransactionContext trx) {
// final Request request = RetryLogCommon.unwrap(trx.getLogEntry());
// if (request != null && request.getRequestTypeCase().equals(Request.RequestTypeCase.TRANSMISSION_BEGIN)) {
// return begin(trx, request);
// }
// return trx;
// }
//
// private TransactionContext begin(TransactionContext trx, Request request) {
// final RaftProtos.LogEntryProto entry = Objects.requireNonNull(trx.getLogEntry());
// final long index = entry.getIndex();
// final long term = entry.getTerm();
//
// TransmissionBeginRequest br = request.getTransmissionBegin();
// trx.setStateMachineContext(backend.begin(term, index, br));
// return trx;
// }
// public CompletableFuture<Void> flush(long logIndex) {
// // LOG.error("Flushing up to {}" , logIndex);
// // CompletableFuture.supplyAsync(() -> backend.sync());
// // return CompletableFuture.allOf(CompletableFuture.supplyAsync(() ->
// // backend.sync()), super.flush(logIndex));
// return super.flush(logIndex);
// }
@Override
public CompletableFuture<Message> applyTransaction(TransactionContext trx) {
final RaftProtos.LogEntryProto entry = Objects.requireNonNull(trx.getLogEntry());
final long index = entry.getIndex();
final long term = entry.getTerm();
updateLastAppliedTermIndex(term, index);
final Request request = RetryLogCommon.unwrap(entry);
if (request == null) {
return RetryLogCommon.completeExceptionally(index, "Could not unmashal request");
}
switch (request.getRequestTypeCase()) {
case TRANSMISSION_BEGIN:
return begin(trx, term, index, request.getTransmissionBegin());
case RETRY_REQ:
return retry(trx, request.getRetryReq());
case TRANSMISSION_COMMIT:
return commit(trx, term, index, request.getTransmissionCommit());
case TRANSMISSION_FAIL:
return fail(trx, term, index, request.getTransmissionFail());
case TRANSMISSION_CONFIRMED:
return confirm(trx, term, index, request.getTransmissionConfirmed());
case REQUESTTYPE_NOT_SET:
break;
default:
break;
}
return RetryLogCommon.completeExceptionally(index, "Message received is not handled by this state machine");
}
private CompletableFuture<Message> begin(TransactionContext trx, long term, long index,
TransmissionBeginRequest request) {
return sync(trx, builder -> {
TransmissionBeginResponse.Code code = backend.begin(term, index, request);
// TransmissionBeginResponse.Code code = TransmissionBeginResponse.Code.class.cast(trx.getStateMachineContext());
builder.setTransmissionBeginResponse(TransmissionBeginResponse.newBuilder()//
.setEntry(TransmissionEntry.newBuilder()//
.setNetworkref(request.getNetworkref())//
.setRetry(request.getRetry()))
.setCode(code));
});
}
private CompletableFuture<Message> fail(TransactionContext trx, long term, long index,
TransmissionFailRequest request) {
return sync(trx, builder -> {
OpResult result = this.backend.fail(term, index, request);
builder.setTransmissionFailResponse(TransmissionFailResponse.newBuilder()//
.setEntry(result.getEntry())//
.setCode(toCode(result)));
});
}
private CompletableFuture<Message> confirm(TransactionContext trx, long term, long index,
TransmissionConfirmedRequest request) {
return sync(trx, builder -> {
OpResult result = this.backend.confirm(term, index, request);
builder.setTransmissionConfirmedResponse(TransmissionConfirmedResponse.newBuilder()//
.setEntry(result.getEntry())//
.setCode(toCode(result)));
});
}
private CompletableFuture<Message> commit(TransactionContext trx, long term, long index,
TransmissionCommitRequest request) {
return sync(trx, builder -> {
OpResult result = this.backend.commit(term, index, request);
builder.setTransmissionCommitResponse(TransmissionCommitResponse.newBuilder()//
.setEntry(result.getEntry())//
.setCode(toCode(result)));
});
}
private CompletableFuture<Message> retry(TransactionContext trx, ToRetryRequest retryReq) {
return null;
}
private static CompletableFuture<Message> sync(TransactionContext trx, Consumer<Response.Builder> c) {
return CompletableFuture.completedFuture(respond(trx, c));
}
// private static CompletableFuture<Message> async(TransactionContext trx, Consumer<Response.Builder> c) {
// return CompletableFuture.supplyAsync(() -> respond(trx, c));
//
// }
// private void updateLastAppliedTermIndex(TransactionContext trx) {
// final RaftProtos.LogEntryProto entry = trx.getLogEntry();
// updateLastAppliedTermIndex(entry.getTerm(), entry.getIndex());
// }
private static Message respond(TransactionContext trx, Consumer<Response.Builder> c) {
Response.Builder builder = Response.newBuilder();
try {
c.accept(builder);
} catch (Exception e) {
LOG.error("Error:", e);
return Message.valueOf(builder.setException(toExceptionResponse(e)).build().toByteString(),
() -> "Message:" + e.getMessage());
}
return Message.valueOf(builder.build().toByteString());
}
private static OperationResultCode toCode(OpResult result) {
return OperationResultCode.valueOf(result.getCode().name());
}
/**
* Return retryable references
*
* @param request the GET message
* @return the Message containing the current counter value
*/
@Override
public CompletableFuture<Message> query(Message request) {
ToRetryRequest rq;
try {
rq = ToRetryRequest.parseFrom(request.getContent().toByteArray());
} catch (InvalidProtocolBufferException e) {
ByteString buf = ToRetryResponse.newBuilder()
.setException(ExceptionResponse.newBuilder().setExceptionClassName("me").build()).build()
.toByteString();
return CompletableFuture.completedFuture(Message.valueOf(buf, () -> "Message:" + e));
}
LOG.info("Query request from {} ", rq.getRequestor().toString());
ToRetryResponse.Builder builder = ToRetryResponse.newBuilder();
for (int i = 0; i < 10; i++) {
builder.addEntry(TransmissionRetryEntry.newBuilder().setNetworkref(ByteString.copyFromUtf8("a" + i))
.setRetry(1).build());
}
return CompletableFuture.completedFuture(Message.valueOf(builder.build().toByteString(), () -> "Message:"));
}
private static ExceptionResponse toExceptionResponse(Exception e) {
return ExceptionResponse.newBuilder().setExceptionClassName(e.getClass().getName())
.setStackTrace(ExceptionUtils.getStackTrace(e)).setMessage(e.getMessage()).build();
}
// @Override
// public TermIndex getLastAppliedTermIndex() {
// return this.getLastAppliedTermIndex();
// }
}
|
kozlov-a-d/wp-theme
|
frontend/assets/blocks/page-error/page-error.js
|
<gh_stars>0
import './page-error.scss';
|
LightSun/DataIO
|
DataIO/src/test/java/com/heaven7/java/data/io/music/transfer/TransitionCutTransfer.java
|
package com.heaven7.java.data.io.music.transfer;
import com.heaven7.java.base.util.TextUtils;
import com.heaven7.java.data.io.bean.MusicItem2;
import com.heaven7.java.data.io.bean.WrappedSubItem;
import com.heaven7.java.data.io.poi.ExcelRow;
import com.heaven7.java.visitor.ResultVisitor;
import com.heaven7.java.visitor.collection.VisitServices;
import java.util.List;
/**
* @author heaven7
*/
public class TransitionCutTransfer extends BaseAdditionalTransfer<List<Float>> {
public static final int PERIOD = 3;
private int travelIndex = -1;
public TransitionCutTransfer(){
this(new TransitionCutTransfer.Indexer());
}
public TransitionCutTransfer(TransitionCutTransfer.Indexer indexer) {
super("transition_cut", indexer);
}
@Override
protected List<Float> parseItem(ExcelRow row) {
//专场点目前只有每隔3行才有
Indexer mIndexer = (Indexer) this.indexer;
if(travelIndex < 0){
travelIndex = 1;
}else {
travelIndex++;
}
if(travelIndex % PERIOD == 0){
String str = row.getColumns().get(mIndexer.index_trans_cuts).getColumnString();
if(TextUtils.isEmpty(str)){
return null;
}
str = str.replace(" ", "");
return VisitServices.from(str.split(",")).map(new ResultVisitor<String, Float>() {
@Override
public Float visit(String s, Object param) {
return Float.parseFloat(s);
}
}).getAsList();
}
return null;
}
@Override
protected void applyAdditionInfo(MusicItem2 matchItem, WrappedSubItem<List<Float>> wsb) {
if(matchItem.getTransitionCuts() != null && wsb.getSubItem() == null){
System.err.println("TransitionCutTransfer >>> unexpect applyAdditionInfo. ");
}
matchItem.setTransitionCuts(wsb.getSubItem());
}
public static class Indexer extends BaseAdditionalTransfer.Indexer{
public int index_trans_cuts = 3;
}
}
|
jayjanssen/praxis
|
cycle/http.go
|
<reponame>jayjanssen/praxis
package cycle
import (
"bytes"
"crypto/tls"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"sync"
)
type HTTP struct {
Cycles []HTTPCycle
Server *httptest.Server
index int
lock sync.Mutex
}
type HTTPCycle struct {
Request HTTPRequest
Response HTTPResponse
}
type HTTPRequest struct {
Method string
Path string
Body []byte
}
type HTTPResponse struct {
Code int
Body []byte
}
func NewHTTP() (*HTTP, error) {
return &HTTP{Cycles: []HTTPCycle{}}, nil
}
func (s *HTTP) Add(req HTTPRequest, res HTTPResponse) {
s.lock.Lock()
defer s.lock.Unlock()
s.Cycles = append(s.Cycles, HTTPCycle{Request: req, Response: res})
}
func (s *HTTP) Cycle(w http.ResponseWriter, r *http.Request) {
s.lock.Lock()
defer s.lock.Unlock()
if len(s.Cycles) < 1 {
http.Error(w, "no more cycles", 500)
return
}
cycle := s.Cycles[0]
s.Cycles = s.Cycles[1:]
if err := cycle.Request.Match(r); err != nil {
http.Error(w, err.Error(), 500)
}
if cycle.Response.Code > 0 {
w.WriteHeader(cycle.Response.Code)
}
w.Write(cycle.Response.Body)
}
func (s *HTTP) Listen() string {
s.Server = httptest.NewUnstartedServer(http.HandlerFunc(s.Cycle))
s.Server.TLS = &tls.Config{
NextProtos: []string{"h2"},
}
s.Server.StartTLS()
return s.Server.URL
}
func (c *HTTPRequest) Match(r *http.Request) error {
if err := compare(c.Method, r.Method, "method"); err != nil {
return err
}
if err := compare(c.Path, r.URL.Path, "path"); err != nil {
return err
}
data, err := ioutil.ReadAll(r.Body)
if err != nil {
return err
}
if err := compareb(c.Body, data, "body"); err != nil {
return err
}
return nil
}
func compare(expected, got string, name string) error {
switch {
case expected == "":
return nil
case expected == got:
return nil
default:
return fmt.Errorf("bad cycle %s: expected:%s got:%s", name, expected, got)
}
}
func compareb(expected, got []byte, name string) error {
switch {
case expected == nil:
return nil
case bytes.Compare(expected, got) == 0:
return nil
default:
return fmt.Errorf("bad cycle %s: expected:%q got:%q", name, string(expected), string(got))
}
}
|
ym001/Manteia
|
Manteia/Task.py
|
<gh_stars>1-10
"""
This module proclaims the good word. May they
regain total freedom of artificial thought towards a new age
reminiscent.
You can install it with pip:
pip install Manteia
Example of use:
>>> from Manteia import testManteia
>>> testManteia ()
This code is licensed under MIT.
"""
__all__ = ['testManteia','testData','testClassification']
from .Preprocess import Preprocess
from .Classification import Classification
from .Statistic import Statistic
from .Visualisation import Visualisation
from .Model import Model
class Task:
def __init__(self,documents=None,labels=None,task='classification'):
if documents!=None:
self.data=Data(documents,labels)
if task=='classification' and documents!=None:
self.classification=Classification(data=self.data)
def test(self):
return "Hello, Task Mantéïa is alive."
def testManteia():
print ("Hello, Mantéïa is alive.")
def testData():
documents=[' ,;:123test car','test houses']
labels=['1','0']
mant=Data(documents,labels)
print(mant.data.list_labels)
print(mant.data.get_df())
def testClassification():
documents=['test car','test house']
labels=['1','0']
mant=Classification(documents,labels)
|
YC-S/LeetCode
|
src/all_problems/P1829_MaximumXORForEachQuery.java
|
package all_problems;
public class P1829_MaximumXORForEachQuery {
public int[] getMaximumXor(int[] nums, int maximumBit) {
int n = nums.length, i = n - 1, mx = (1 << maximumBit) - 1, xor = 0;
int[] ans = new int[n];
for (int num : nums) {
xor ^= num;
ans[i--] = xor ^ mx;
}
return ans;
}
}
|
gchinmayvarma/C-Python-Batch-OldProjects
|
C++/FPS.cpp
|
<reponame>gchinmayvarma/C-Python-Batch-OldProjects<filename>C++/FPS.cpp
#include <iostream>
using namespace std;
#include <windows.h>
const int ScreenWidth = 120 , ScreenHeight = 40;
float PlayerX = 0.0 , PlayerY = 0.0 , PlayerA = 0.0 ;
float FOV = 3.14159265358979323846264338327950/4;
int MapHeight = 16 , MapWidth = 16 ;
int main()
{
wchar_t *screen = new wchar_t[ScreenWidth*ScreenHeight];
HANDLE Console = CreateConsoleScreenBuffer(GENERIC_READ |
GENERIC_WRITE, 0, NULL, CONSOLE_TEXTMODE_BUFFER, NULL);
SetConsoleActiveScreenBuffer(Console);
DWORD BytesWritten = 0;
wstring map;
map += L"################";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"# #";
map += L"################";
while(1)
{
for(int i = 0, i < ScreenWidth, i++)
{
float RayAngle = (Player - FOV/2.0)
+((float)i/(float)ScreenWidth)
*FOV ;
float DistanceToWall = 0;
bool HitWall = false ;
float EyeX = sinf(RayAngle);
float EyeY = cosf(RayAngle);
while(!HitWall)//11:10
}
screen[ScreenWidth*ScreenHeight - 1] = '\0';
WriteConsoleOutputCharacter(Console, screen,
ScreenWidth*ScreenHeight, {0,0}, &BytesWritten);
}
return 0;
}
|
ministryofjustice/laa-apply-for-legal-aid
|
spec/services/address_lookup_service_spec.rb
|
require 'rails_helper'
RSpec.describe AddressLookupService do
subject(:service) { described_class.new(postcode) }
let(:query_params) do
{
key: ENV['ORDNANACE_SURVEY_API_KEY'],
postcode: postcode,
lr: 'EN'
}
end
let(:api_request_uri) do
uri = URI.parse(described_class::ORDNANCE_SURVEY_URL)
uri.query = query_params.to_query
uri
end
let(:postcode) { 'SW1H9AJ' }
describe '#call' do
context 'when the lookup is successful' do
let(:stubbed_json_body) { file_fixture('address_lookups/success.json') }
before do
stub_request(:get, api_request_uri)
.to_return(status: 200, body: stubbed_json_body)
end
context 'but the response does not contain any results' do
let(:postcode) { 'W1A1AA' }
let(:stubbed_json_body) { file_fixture('address_lookups/no_results.json') }
it 'outcome is unsuccessful' do
outcome = service.call
expect(outcome).not_to be_success
expect(outcome.errors).to eq(lookup: [:no_results])
expect(outcome.result).to eq([])
end
end
it 'returns a list of mapped addresses' do
outcome = service.call
expect(outcome).to be_success
expect(outcome.errors).to be_empty
expect(outcome.result).to all(be_an(Address))
end
end
context 'when there is a problem connecting to the postcode API' do
before do
stub_request(:get, api_request_uri)
.to_raise(Errno::ECONNREFUSED)
end
it 'outcome is unsuccessful' do
expect(AlertManager).to receive(:capture_exception).with(message_contains('Connection refused'))
outcome = service.call
expect(outcome).not_to be_success
expect(outcome.errors).to eq(lookup: [:service_unavailable])
expect(outcome.result).to eq([])
end
end
context 'when the lookup service is not successful' do
let(:stubbed_body) do
{
error: {
statuscode: 400,
message: 'No postcode parameter provided.'
}
}
end
let(:postcode) { nil }
before do
stub_request(:get, api_request_uri)
.to_return(status: 400, body: stubbed_body.to_json)
end
it 'outcome is unsuccessful' do
expect(AlertManager).to receive(:capture_exception).with(message_contains('No postcode parameter provided'))
outcome = service.call
expect(outcome).not_to be_success
expect(outcome.errors).to eq(lookup: [:unsuccessful])
expect(outcome.result).to eq([])
end
end
end
describe '#record_error' do
let(:state) { :service_unavailable }
let(:error) { StandardError.new 'Service unavailable' }
it 'captures error' do
expect(AlertManager).to receive(:capture_exception).with(message_contains('Service unavailable'))
service.__send__(:record_error, state, error)
end
context 'postocde is in a correct format' do
let(:state) { :unsuccessful }
let(:error) { StandardError.new 'Resource x does not exist' }
let(:postcode) { 'SW109LO' }
before do
stub_request(:get, api_request_uri)
.to_raise(Errno::ECONNREFUSED)
end
it 'does not capture error' do
expect(AlertManager).not_to receive(:capture_exception).with(message_contains('Resource x does not exist'))
service.call
end
end
end
end
|
shellyln/dust-lang
|
scripting/executor/exec_objects.go
|
package executor
import (
"errors"
"reflect"
"unsafe"
emsg "github.com/shellyln/dust-lang/scripting/errors"
mnem "github.com/shellyln/dust-lang/scripting/executor/opcode"
. "github.com/shellyln/takenoco/base"
)
//
func execObjectOp(ctx *ExecutionContext, ast *Ast) (Ast, bool, interface{}, error) {
switch ast.OpCode & mnem.OpCodeMask {
case mnem.List:
{
orig := ast.Value.(AstSlice)
origLen := len(orig)
var payload interface{}
switch ast.OpCode & mnem.ReturnTypeMask {
case mnem.ReturnInt:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]int32, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = int32(orig[i].Value.(int64))
slice[i] = int32(*(*int64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
case mnem.Bits16:
{
slice := make([]int16, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = int16(orig[i].Value.(int64))
slice[i] = int16(*(*int64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
case mnem.Bits8:
{
slice := make([]int8, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = int8(orig[i].Value.(int64))
slice[i] = int8(*(*int64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
default:
{
slice := make([]int64, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = orig[i].Value.(int64)
slice[i] = *(*int64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)
}
payload = slice
}
}
case mnem.ReturnUint:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]uint32, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = uint32(orig[i].Value.(uint64))
slice[i] = uint32(*(*uint64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
case mnem.Bits16:
{
slice := make([]uint16, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = uint16(orig[i].Value.(uint64))
slice[i] = uint16(*(*uint64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
case mnem.Bits8:
{
slice := make([]uint8, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = uint8(orig[i].Value.(uint64))
slice[i] = uint8(*(*uint64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
default:
{
slice := make([]uint64, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = orig[i].Value.(uint64)
slice[i] = *(*uint64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)
}
payload = slice
}
}
case mnem.ReturnFloat:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]float32, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = float32(orig[i].Value.(float64))
slice[i] = float32(*(*float64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr))
}
payload = slice
}
default:
{
slice := make([]float64, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = orig[i].Value.(float64)
slice[i] = *(*float64)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)
}
payload = slice
}
}
case mnem.ReturnBool:
{
slice := make([]bool, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = orig[i].Value.(bool)
slice[i] = *(*bool)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)
}
payload = slice
}
case mnem.ReturnString:
{
slice := make([]string, origLen, origLen)
for i := 0; i < origLen; i++ {
// slice[i] = orig[i].Value.(string)
slice[i] = *(*string)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)
}
payload = slice
}
default:
{
slice := make([]interface{}, origLen, origLen)
for i := 0; i < origLen; i++ {
slice[i] = orig[i].Value
}
payload = slice
}
}
return Ast{
OpCode: mnem.Imm_data | mnem.Indexable,
Type: AstType_ListOfAny,
Value: payload,
}, true, nil, nil
}
case mnem.FilledList:
{
cons := ast.Value.(AstCons)
size := cons.Car.Value.(int64)
var payload interface{}
switch ast.OpCode & mnem.ReturnTypeMask {
case mnem.ReturnInt:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]int32, size, size)
// v := int32(cons.Cdr.Value.(int64))
v := int32(*(*int64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
case mnem.Bits16:
{
slice := make([]int16, size, size)
// v := int16(cons.Cdr.Value.(int64))
v := int16(*(*int64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
case mnem.Bits8:
{
slice := make([]int8, size, size)
// v := int8(cons.Cdr.Value.(int64))
v := int8(*(*int64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
default:
{
slice := make([]int64, size, size)
// v := cons.Cdr.Value.(int64)
v := *(*int64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr)
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
}
case mnem.ReturnUint:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]uint32, size, size)
// v := uint32(cons.Cdr.Value.(uint64))
v := uint32(*(*uint64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
case mnem.Bits16:
{
slice := make([]uint16, size, size)
// v := uint16(cons.Cdr.Value.(uint64))
v := uint16(*(*uint64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
case mnem.Bits8:
{
slice := make([]uint8, size, size)
// v := uint8(cons.Cdr.Value.(uint64))
v := uint8(*(*uint64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
default:
{
slice := make([]uint64, size, size)
// v := cons.Cdr.Value.(uint64)
v := *(*uint64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr)
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
}
case mnem.ReturnFloat:
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
{
slice := make([]float32, size, size)
// v := float32(cons.Cdr.Value.(float64))
v := float32(*(*float64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
default:
{
slice := make([]float64, size, size)
// v := cons.Cdr.Value.(float64)
v := *(*float64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr)
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
}
case mnem.ReturnBool:
{
slice := make([]bool, size, size)
// v := cons.Cdr.Value.(bool)
v := *(*bool)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr)
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
case mnem.ReturnString:
{
slice := make([]string, size, size)
// v := cons.Cdr.Value.(string)
v := *(*string)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr)
for i := int64(0); i < size; i++ {
slice[i] = v
}
payload = slice
}
default:
{
slice := make([]interface{}, size, size)
for i := int64(0); i < size; i++ {
slice[i] = cons.Cdr.Value
}
payload = slice
}
}
return Ast{
OpCode: mnem.Imm_data | mnem.Indexable,
Type: AstType_ListOfAny,
Value: payload,
}, true, nil, nil
}
case mnem.Object:
{
orig := ast.Value.(AstSlice)
origLen := len(orig)
dict := make(map[string]interface{}, origLen/2)
for i := 0; i < origLen; i += 2 {
// dict[orig[i].Value.(string)] = orig[i+1].Value
dict[*(*string)((*rawInterface2)(unsafe.Pointer(&orig[i].Value)).Ptr)] = orig[i+1].Value
}
return Ast{
OpCode: mnem.Imm_data | mnem.Indexable,
Type: AstType_ListOfAny,
Value: dict,
}, true, nil, nil
}
case mnem.Index:
{
opcode := mnem.Imm_data
astType := AstType_Any
retType := ast.OpCode & mnem.ReturnTypeMask
switch retType {
case mnem.ReturnInt:
opcode = mnem.Imm_i64
astType = AstType_Int
case mnem.ReturnUint:
opcode = mnem.Imm_u64
astType = AstType_Uint
case mnem.ReturnFloat:
opcode = mnem.Imm_f64
astType = AstType_Float
case mnem.ReturnBool:
opcode = mnem.Imm_bool
astType = AstType_Bool
case mnem.ReturnString:
opcode = mnem.Imm_str
astType = AstType_String
}
cons := ast.Value.(AstCons)
rv := reflect.ValueOf(cons.Car.Value)
// i := int(cons.Cdr.Value.(int64))
i := int(*(*int64)((*rawInterface2)(unsafe.Pointer(&cons.Cdr.Value)).Ptr))
if 0 <= i && i < rv.Len() {
rvi := rv.Index(i)
var v interface{}
v = rvi.Interface()
// NOTE: promote to 64bit
switch ast.OpCode & mnem.BitLenMask {
case mnem.Bits32:
switch retType {
case mnem.ReturnInt:
// v = int64(v.(int32))
v = int64(*(*int32)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
case mnem.ReturnUint:
// v = uint64(v.(uint32))
v = uint64(*(*uint32)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
case mnem.ReturnFloat:
// v = float64(v.(float32))
v = float64(*(*float32)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
}
case mnem.Bits16:
switch retType {
case mnem.ReturnInt:
// v = int64(v.(int16))
v = int64(*(*int16)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
case mnem.ReturnUint:
// v = uint64(v.(uint16))
v = uint64(*(*uint16)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
}
case mnem.Bits8:
switch retType {
case mnem.ReturnInt:
// v = int64(v.(int8))
v = int64(*(*int8)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
case mnem.ReturnUint:
// v = uint64(v.(uint8))
v = uint64(*(*uint8)((*rawInterface2)(unsafe.Pointer(&v)).Ptr))
}
}
return Ast{
OpCode: opcode,
Type: astType,
Value: v,
Address: ReflectionBox{Val: rvi},
}, true, nil, nil
} else {
return *ast, false, nil, errors.New(emsg.ExecErr00017)
}
}
case mnem.Slice:
{
opcode := mnem.Imm_data
astType := AstType_Any
retType := ast.OpCode & mnem.ReturnTypeMask
slice := ast.Value.(AstSlice)
rv := reflect.ValueOf(slice[0].Value)
rvLen := rv.Len()
var startTmp, endTmp int64
var ok bool
startTmp, ok = slice[1].Value.(int64)
if !ok {
startTmp = 0
}
endTmp, ok = slice[2].Value.(int64)
if !ok {
endTmp = int64(rvLen)
}
start := int(startTmp)
end := int(endTmp)
if 0 <= start && start <= rvLen && 0 <= end && end <= rvLen && start <= end {
rvi := rv.Slice(start, end)
return Ast{
OpCode: opcode | retType,
Type: astType,
Value: rvi.Interface(),
Address: ReflectionBox{Val: rvi},
}, true, nil, nil
} else {
return *ast, false, nil, errors.New(emsg.ExecErr00017)
}
}
case mnem.Mapindex:
{
opcode := mnem.Imm_data
astType := AstType_Any
retType := ast.OpCode & mnem.ReturnTypeMask
switch retType {
case mnem.ReturnInt:
opcode = mnem.Imm_i64
astType = AstType_Int
case mnem.ReturnUint:
opcode = mnem.Imm_u64
astType = AstType_Uint
case mnem.ReturnFloat:
opcode = mnem.Imm_f64
astType = AstType_Float
case mnem.ReturnBool:
opcode = mnem.Imm_bool
astType = AstType_Bool
case mnem.ReturnString:
opcode = mnem.Imm_str
astType = AstType_String
}
cons := ast.Value.(AstCons)
rv := reflect.ValueOf(cons.Car.Value)
k := reflect.ValueOf(cons.Cdr.Value.(string))
v := rv.MapIndex(k)
if v.IsValid() {
return Ast{
OpCode: opcode,
Type: astType,
Value: v.Interface(),
Address: MapContainerReflectionBox{Container: rv, Key: k},
}, true, nil, nil
} else {
return Ast{
OpCode: opcode,
Type: astType,
Value: nil, // TODO: Zero value per type
Address: &NotInitializedMapContainerReflectionBox{Container: rv, Key: k},
}, true, nil, nil
}
}
}
return *ast, false, nil, nil
}
|
nyinyiz/Burpple
|
PADCBurppleApp/app/src/main/java/com/padc/nyinyi/padcburppleapp/data/models/GuideModel.java
|
package com.padc.nyinyi.padcburppleapp.data.models;
import android.content.ContentValues;
import android.content.Context;
import android.util.Log;
import com.padc.nyinyi.padcburppleapp.PADCBurppleApp;
import com.padc.nyinyi.padcburppleapp.Persistence.BurppleDBContract;
import com.padc.nyinyi.padcburppleapp.data.vos.BurpplePromotionShop;
import com.padc.nyinyi.padcburppleapp.data.vos.GuideVO;
import com.padc.nyinyi.padcburppleapp.data.vos.PromotionVO;
import com.padc.nyinyi.padcburppleapp.events.RestApiEvents;
import com.padc.nyinyi.padcburppleapp.networks.BurppleDataAgent;
import com.padc.nyinyi.padcburppleapp.networks.BurppleDataAgentImpl;
import com.padc.nyinyi.padcburppleapp.utils.AppConstants;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
/**
* Created by <NAME> on 1/17/2018.
*/
public class GuideModel {
// private static GuideModel objInstance;
private List<GuideVO> mGuideVOS;
private int mmPageIndex = 1;
@Inject
BurppleDataAgent mDataAgent;
public GuideModel(Context context) {
EventBus.getDefault().register(this);
mGuideVOS = new ArrayList<>();
PADCBurppleApp burppleApp = (PADCBurppleApp) context.getApplicationContext();
burppleApp.getmAppComponent().inject(this);
}
/* public static GuideModel getInstance() {
if (objInstance == null) {
objInstance = new GuideModel();
}
return objInstance;
}
*/
public void startloadingGuide(Context context) {
mDataAgent.loadGuides(context, AppConstants.ACCESS_TOKEN,
mmPageIndex);
}
public List<GuideVO> getmGuideVOS() {
return mGuideVOS;
}
@Subscribe(threadMode = ThreadMode.BACKGROUND)
public void onGuideDataLoaded(RestApiEvents.GuidedDataLoadedEvent event) {
mGuideVOS.addAll(event.getLoadGuided());
//TODO LOGIC TO SAVE THE DATA IN PERSISTENCE LAYER
ContentValues[] guideCVs = new ContentValues[event.getLoadGuided().size()];
for (int index = 0; index < guideCVs.length; index++) {
guideCVs[index] = event.getLoadGuided().get(index).parseToContentValues();
}
int insertGuide = event.getContext().getContentResolver().bulkInsert(BurppleDBContract.GuideEntry.CONTENT_URI, guideCVs);
Log.d(PADCBurppleApp.LOG_TAG, "Inserted Row : " + insertGuide);
}
}
|
best08618/asylo
|
gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/tree-ssa/20070302-1.c
|
<reponame>best08618/asylo
/* { dg-do link } */
/* { dg-options "-O2" } */
void link_error (void);
struct A
{
int x;
float y;
};
volatile float X, Y;
int __attribute__ ((__noinline__))
baz (struct A *z, struct A *y)
{
z->x = (int) X;
z->y = Y;
y->x = (int) X;
y->y = Y;
}
struct A B;
float foo (int i)
{
struct A *p, x, y, z;
p = (i > 10) ? &x : &z;
x.y = 3.0;
p->x += baz (&z, &y);
X = z.y;
Y = p->y;
/* This predicate should always evaluate to false. The call to
baz() is not a clobbering site for x.y. The operand scanner was
considering it a clobbering site for x.y because x.y is in the
alias set of a call-clobbered memory tag. */
if (x.y != 3.0)
link_error ();
}
int
main(int argc, char **argv)
{
foo (argc);
}
|
rlsoluttionscr/roadside-app
|
client/src/components/Dashboard/MakeRequest.js
|
<gh_stars>1-10
import React, { Component, Fragment } from 'react'
import { withStyles } from '@material-ui/core/styles'
import {
Typography,
Grid,
TextField,
FormControl,
InputLabel,
Select,
Input,
Button
} from '@material-ui/core'
import { UserContext } from '../Context'
import axios from 'axios'
import MapsCurrentRequest from './MapsCurrentRequest'
import api from '../api'
const style = theme => ({
root: {
background: '#fff',
display: 'flex',
flexDirection: 'column'
},
noFocus: {
'&:focus': {
background: 'white'
}
}
})
class MakeRequest extends Component {
static contextType = UserContext
asyncGetCurrentPosition = options =>
new Promise((resolve, reject) => {
navigator.geolocation.getCurrentPosition(resolve, reject, options)
})
getCurrentLocation = async () => {
if (navigator.geolocation) {
const {
coords: { latitude, longitude }
} = await this.asyncGetCurrentPosition()
console.log('geolocation accepted', latitude, longitude)
const geocodeURL =
'https://cors-anywhere.herokuapp.com/https://maps.googleapis.com/maps/api/geocode/json'
const { data: result } = await axios.get(geocodeURL, {
params: {
latlng: `${latitude},${longitude}`,
key: process.env.REACT_APP_GOOGLE_MAPS_API
}
})
if (result.results) {
const textAddress = result.results[0].formatted_address
console.log('reverse geocode', textAddress)
return textAddress
} else {
console.log('no results found')
return ''
}
} else {
// Browser doesn't support Geolocation
console.log('browser doesnt support geolocation.')
return ''
}
}
initState = async () => {
const user = this.context
const { vehicleList } = user.userDetails
const address = await this.getCurrentLocation()
return {
vehicleList: vehicleList.map(vehicle => ({
id: vehicle.id,
details: `${vehicle.make} ${vehicle.carModel} • ${vehicle.carPlate}`
})),
address
}
}
async componentDidMount() {
const state = await this.initState()
this.setState({
...state,
suggestions: [],
isLoading: false
})
}
state = {
isLoading: true,
vehicle: '',
description: '',
vehicleId: ''
}
handleChange = event => {
this.setState({ [event.target.name]: event.target.value })
}
handleVehicleChange = event => {
event.persist()
this.setState(state => ({
vehicle: event.target.value,
vehicleId: state.vehicleList.find(x => x.details === event.target.value)
.id
}))
}
handleSubmit = async event => {
event.preventDefault()
const geocodeURL =
'https://cors-anywhere.herokuapp.com/https://maps.googleapis.com/maps/api/geocode/json'
const { data: geocodeResult } = await axios.get(geocodeURL, {
params: {
address: this.state.address,
key: process.env.REACT_APP_GOOGLE_MAPS_API
}
})
if (geocodeResult.results) {
console.log('geocode result', geocodeResult.results[0])
const { lat, lng } = geocodeResult.results[0].geometry.location
const { address, vehicleId, description } = this.state
const { data: result } = await api.post('/callout/customer/create', {
location: {
type: 'Point',
coordinates: [lng, lat]
},
address,
vehicleId,
description
})
if (result.success) {
console.log(result)
const { handleInnerChange } = this.props
handleInnerChange({ loadingResponse: true })
} else {
alert(result.error)
}
} else {
alert('no result found!')
return
}
}
render() {
const {
classes: { noFocus }
} = this.props
console.log(this.state)
const {
address,
suggestions,
description,
isLoading,
vehicleList,
vehicle
} = this.state
console.log(this.state)
if (isLoading) return <Typography variant="body2">Loading...</Typography>
return (
<Fragment>
<Grid container spacing={24}>
<Grid
item
style={{
width: 600
}}
>
<Typography variant="h6" color="primary" gutterBottom>
Make a new request
</Typography>
<Grid container spacing={24}>
<Grid item xs={12}>
<form onSubmit={this.handleSubmit}>
<Grid container spacing={8}>
<Grid item xs={12}>
<MapsCurrentRequest
onChange={(address, suggestions) => {
this.setState({
address,
suggestions
})
}}
address={address}
suggestions={suggestions}
/>
</Grid>
<Grid item xs={12}>
<FormControl fullWidth>
<InputLabel shrink htmlFor="vehicle">
Vehicle
</InputLabel>
<Select
native
required
value={vehicle}
onChange={this.handleVehicleChange}
input={<Input name="vehicle" id="vehicle" required />}
inputProps={{
name: 'vehicle',
id: 'vehicle'
}}
classes={{
select: noFocus
}}
>
<option value="">None</option>
{vehicleList.map((vehicle, idx) => (
<option value={vehicle.details} key={idx}>
{vehicle.details}
</option>
))}
</Select>
</FormControl>
</Grid>
<Grid item xs={12}>
<TextField
required
id="description"
name="description"
onChange={this.handleChange}
value={description}
label="Description"
placeholder="Please provide your vehicle's issues"
multiline
rows="4"
margin="normal"
variant="outlined"
fullWidth
InputLabelProps={{
shrink: true
}}
/>
</Grid>
<Grid item container justify="flex-end" xs={12}>
<Button color="primary" variant="contained" type="submit">
Submit
</Button>
</Grid>
</Grid>
</form>
</Grid>
</Grid>
</Grid>
</Grid>
</Fragment>
)
}
}
export default withStyles(style)(MakeRequest)
|
tristanseifert/cubeland
|
server/net/handlers/Time.h
|
#ifndef NET_HANDLER_TIME_H
#define NET_HANDLER_TIME_H
#include "net/PacketHandler.h"
#include <atomic>
#include <condition_variable>
#include <mutex>
#include <string>
#include <cpptime.h>
#include <cereal/access.hpp>
namespace net::handler {
/**
* Updates clients as to what tf the time is
*/
class Time: public PacketHandler {
public:
Time(ListenerClient *_client);
virtual ~Time();
bool canHandlePacket(const PacketHeader &header) override;
void handlePacket(const PacketHeader &header, const void *payload,
const size_t payloadLen) override;
void authStateChanged() override;
void sendTime();
private:
/// thymer for all clients
static CppTime::Timer timer;
/// number of connected clients; if none are connected, time doesn't advance
static std::atomic_uint numConnectedClients;
/// timer for notifying the client of the current thyme
CppTime::timer_id updateTimer;
};
}
#endif
|
bubenheimer/androidx
|
camera/camera-camera2/src/main/java/androidx/camera/camera2/internal/compat/quirk/AeFpsRangeLegacyQuirk.java
|
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.camera2.internal.compat.quirk;
import android.hardware.camera2.CameraCharacteristics;
import android.util.Range;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.camera2.internal.compat.CameraCharacteristicsCompat;
import androidx.camera.core.impl.Quirk;
/**
* Quirk required to maintain good exposure on legacy devices by specifying a proper
* {@link android.hardware.camera2.CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE}.
* <p>
* Legacy devices set the AE target FPS range to [30, 30]. This can potentially cause underexposure
* issues. {@link androidx.camera.camera2.internal.compat.workaround.AeFpsRange} contains a
* workaround that is used on legacy devices to set a AE FPS range whose upper bound is 30, which
* guarantees a smooth frame rate, and whose lower bound is as small as possible to properly
* expose frames in low light conditions. The default behavior on non legacy devices does not add
* the AE FPS range option.
*
* @see androidx.camera.camera2.internal.compat.workaround.AeFpsRange
*/
public class AeFpsRangeLegacyQuirk implements Quirk {
@Nullable
private final Range<Integer> mAeFpsRange;
public AeFpsRangeLegacyQuirk(
@NonNull final CameraCharacteristicsCompat cameraCharacteristicsCompat) {
final Range<Integer>[] availableFpsRanges = cameraCharacteristicsCompat.get(
CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
mAeFpsRange = pickSuitableFpsRange(availableFpsRanges);
}
static boolean load(@NonNull final CameraCharacteristicsCompat cameraCharacteristicsCompat) {
final Integer level = cameraCharacteristicsCompat.get(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
return level != null && level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
}
/**
* Returns the fps range whose upper is 30 and whose lower is the smallest, or null if no
* range has an upper equal to 30. The rational is:
* (1) Range upper is always 30 so that a smooth frame rate is guaranteed.
* (2) Range lower contains the smallest supported value so that it can adapt as much as
* possible to low light conditions.
*/
@Nullable
public Range<Integer> getRange() {
return mAeFpsRange;
}
@Nullable
private Range<Integer> pickSuitableFpsRange(
@Nullable final Range<Integer>[] availableFpsRanges) {
if (availableFpsRanges == null || availableFpsRanges.length == 0) {
return null;
}
Range<Integer> pickedRange = null;
for (Range<Integer> fpsRange : availableFpsRanges) {
fpsRange = getCorrectedFpsRange(fpsRange);
if (fpsRange.getUpper() != 30) {
continue;
}
if (pickedRange == null) {
pickedRange = fpsRange;
} else {
if (fpsRange.getLower() < pickedRange.getLower()) {
pickedRange = fpsRange;
}
}
}
return pickedRange;
}
/**
* On android 5.0/5.1, {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES}
* returns wrong ranges whose values were multiplied by 1000. So we need to convert them to the
* correct values.
*/
@NonNull
private Range<Integer> getCorrectedFpsRange(@NonNull final Range<Integer> fpsRange) {
int newUpper = fpsRange.getUpper();
int newLower = fpsRange.getLower();
if (fpsRange.getUpper() >= 1000) {
newUpper = fpsRange.getUpper() / 1000;
}
if (fpsRange.getLower() >= 1000) {
newLower = fpsRange.getLower() / 1000;
}
return new Range<>(newLower, newUpper);
}
}
|
chorifa/minirpc
|
src/main/java/com/chorifa/minirpc/utils/StreamID4Http2Util.java
|
<filename>src/main/java/com/chorifa/minirpc/utils/StreamID4Http2Util.java
package com.chorifa.minirpc.utils;
import java.util.concurrent.atomic.AtomicInteger;
public class StreamID4Http2Util {
private static final AtomicInteger count = new AtomicInteger(Integer.MIN_VALUE);
private static final int ODD_NUM = ((Integer.MAX_VALUE -3)>>1)+1;
private static final int OFFSET = (Integer.MIN_VALUE)%ODD_NUM; // actually, -2
/**
* map [Integer.MIN_VALUE, Integer.MAX_VALUE] to odd integer between [3,Integer.MAX_VALUE]
* @return odd integer between [3,Integer.MAX_VALUE]
*/
public static int getCurrentID(){
return ((count.getAndIncrement()%ODD_NUM - OFFSET)%ODD_NUM <<1) +3;
}
}
|
dthree/wat
|
src/vorpal/updater.js
|
<reponame>dthree/wat<filename>src/vorpal/updater.js
'use strict';
const chalk = require('chalk');
module.exports = function (vorpal, options) {
const app = options.app;
vorpal
.command('updates', 'Shows what docs are mid being updated.')
.option('-m, --max', 'Maximum history items to show.')
.action(function (args, cb) {
const queue = app.clerk.updater.queue;
const max = args.options.max || 30;
let limit = queue.length - 1 - max;
limit = (limit < 0) ? 0 : limit;
if (queue.length > 0) {
this.log(chalk.bold('\n Command'));
} else {
this.log(chalk.bold(`\n No updates in the queue.\n To do a fresh update, run the "${chalk.cyan('update')}" command.`));
}
for (let i = queue.length - 1; i > limit; i--) {
let item = String(queue[i]).split('docs/');
item = (item.length > 1) ? item[1] : item[0];
let cmd = String(item).split('/').join(' ');
cmd = String(cmd).replace('.md', '');
cmd = String(cmd).replace('.detail', chalk.gray(' (detailed)'));
cmd = String(cmd).replace('.install', chalk.gray(' (install)'));
cmd = String(cmd).replace(' index', chalk.gray(' '));
this.log(` ${cmd}`);
}
this.log(' ');
cb();
});
};
|
gkumar111/elasticsearch
|
server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality
* for users as they type search terms. The implementation of the completion service uses FSTs that
* are created at index-time and so must be defined in the mapping with the type "completion" before
* indexing.
*/
public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> {
private static final XContentType CONTEXT_BYTES_XCONTENT_TYPE = XContentType.JSON;
static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
static final ParseField SKIP_DUPLICATES_FIELD = new ParseField("skip_duplicates");
public static final String SUGGESTION_NAME = "completion";
/**
* {
* "field" : STRING
* "size" : INT
* "fuzzy" : BOOLEAN | FUZZY_OBJECT
* "contexts" : QUERY_CONTEXTS
* "regex" : REGEX_OBJECT
* "payload" : STRING_ARRAY
* }
*/
private static final ObjectParser<CompletionSuggestionBuilder.InnerBuilder, Void> PARSER = new ObjectParser<>(SUGGESTION_NAME, null);
static {
PARSER.declareField((parser, completionSuggestionContext, context) -> {
if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
if (parser.booleanValue()) {
completionSuggestionContext.fuzzyOptions = new FuzzyOptions.Builder().build();
}
} else {
completionSuggestionContext.fuzzyOptions = FuzzyOptions.parse(parser);
}
},
FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN);
PARSER.declareField((parser, completionSuggestionContext, context) ->
completionSuggestionContext.regexOptions = RegexOptions.parse(parser),
RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT);
PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::field, FIELDNAME_FIELD);
PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::analyzer, ANALYZER_FIELD);
PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::size, SIZE_FIELD);
PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::shardSize, SHARDSIZE_FIELD);
PARSER.declareField((p, v, c) -> {
// Copy the current structure. We will parse, once the mapping is provided
XContentBuilder builder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE);
builder.copyCurrentStructure(p);
v.contextBytes = BytesReference.bytes(builder);
p.skipChildren();
}, CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated
PARSER.declareBoolean(CompletionSuggestionBuilder::skipDuplicates, SKIP_DUPLICATES_FIELD);
}
protected FuzzyOptions fuzzyOptions;
protected RegexOptions regexOptions;
protected BytesReference contextBytes = null;
protected boolean skipDuplicates = false;
public CompletionSuggestionBuilder(String field) {
super(field);
}
/**
* internal copy constructor that copies over all class fields except for the field which is
* set to the one provided in the first argument
*/
private CompletionSuggestionBuilder(String fieldname, CompletionSuggestionBuilder in) {
super(fieldname, in);
fuzzyOptions = in.fuzzyOptions;
regexOptions = in.regexOptions;
contextBytes = in.contextBytes;
skipDuplicates = in.skipDuplicates;
}
/**
* Read from a stream.
*/
public CompletionSuggestionBuilder(StreamInput in) throws IOException {
super(in);
fuzzyOptions = in.readOptionalWriteable(FuzzyOptions::new);
regexOptions = in.readOptionalWriteable(RegexOptions::new);
contextBytes = in.readOptionalBytesReference();
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
skipDuplicates = in.readBoolean();
}
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(fuzzyOptions);
out.writeOptionalWriteable(regexOptions);
out.writeOptionalBytesReference(contextBytes);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
out.writeBoolean(skipDuplicates);
}
}
/**
* Sets the prefix to provide completions for.
* The prefix gets analyzed by the suggest analyzer.
*/
@Override
public CompletionSuggestionBuilder prefix(String prefix) {
super.prefix(prefix);
return this;
}
/**
* Same as {@link #prefix(String)} with fuzziness of <code>fuzziness</code>
*/
public CompletionSuggestionBuilder prefix(String prefix, Fuzziness fuzziness) {
super.prefix(prefix);
this.fuzzyOptions = new FuzzyOptions.Builder().setFuzziness(fuzziness).build();
return this;
}
/**
* Same as {@link #prefix(String)} with full fuzzy options
* see {@link FuzzyOptions.Builder}
*/
public CompletionSuggestionBuilder prefix(String prefix, FuzzyOptions fuzzyOptions) {
super.prefix(prefix);
this.fuzzyOptions = fuzzyOptions;
return this;
}
/**
* Sets a regular expression pattern for prefixes to provide completions for.
*/
@Override
public CompletionSuggestionBuilder regex(String regex) {
super.regex(regex);
return this;
}
/**
* Same as {@link #regex(String)} with full regular expression options
* see {@link RegexOptions.Builder}
*/
public CompletionSuggestionBuilder regex(String regex, RegexOptions regexOptions) {
this.regex(regex);
this.regexOptions = regexOptions;
return this;
}
/**
* Sets query contexts for completion
* @param queryContexts named query contexts
* see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext}
* and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext}
*/
public CompletionSuggestionBuilder contexts(Map<String, List<? extends ToXContent>> queryContexts) {
Objects.requireNonNull(queryContexts, "contexts must not be null");
try {
XContentBuilder contentBuilder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE);
contentBuilder.startObject();
for (Map.Entry<String, List<? extends ToXContent>> contextEntry : queryContexts.entrySet()) {
contentBuilder.startArray(contextEntry.getKey());
for (ToXContent queryContext : contextEntry.getValue()) {
queryContext.toXContent(contentBuilder, EMPTY_PARAMS);
}
contentBuilder.endArray();
}
contentBuilder.endObject();
return contexts(contentBuilder);
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
private CompletionSuggestionBuilder contexts(XContentBuilder contextBuilder) {
contextBytes = BytesReference.bytes(contextBuilder);
return this;
}
/**
* Returns whether duplicate suggestions should be filtered out.
*/
public boolean skipDuplicates() {
return skipDuplicates;
}
/**
* Should duplicates be filtered or not. Defaults to {@code false}.
*/
public CompletionSuggestionBuilder skipDuplicates(boolean skipDuplicates) {
this.skipDuplicates = skipDuplicates;
return this;
}
private static class InnerBuilder extends CompletionSuggestionBuilder {
private String field;
InnerBuilder() {
super("_na_");
}
private InnerBuilder field(String field) {
this.field = field;
return this;
}
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (fuzzyOptions != null) {
fuzzyOptions.toXContent(builder, params);
}
if (regexOptions != null) {
regexOptions.toXContent(builder, params);
}
if (skipDuplicates) {
builder.field(SKIP_DUPLICATES_FIELD.getPreferredName(), skipDuplicates);
}
if (contextBytes != null) {
try (InputStream stream = contextBytes.streamInput()) {
builder.rawField(CONTEXTS_FIELD.getPreferredName(), stream);
}
}
return builder;
}
public static CompletionSuggestionBuilder fromXContent(XContentParser parser) throws IOException {
CompletionSuggestionBuilder.InnerBuilder builder = new CompletionSuggestionBuilder.InnerBuilder();
PARSER.parse(parser, builder, null);
String field = builder.field;
// now we should have field name, check and copy fields over to the suggestion builder we return
if (field == null) {
throw new ElasticsearchParseException(
"the required field option [" + FIELDNAME_FIELD.getPreferredName() + "] is missing");
}
return new CompletionSuggestionBuilder(field, builder);
}
@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context);
// copy over common settings to each suggestion builder
final MapperService mapperService = context.getMapperService();
populateCommonFields(mapperService, suggestionContext);
suggestionContext.setSkipDuplicates(skipDuplicates);
suggestionContext.setFuzzyOptions(fuzzyOptions);
suggestionContext.setRegexOptions(regexOptions);
if (shardSize != null) {
suggestionContext.setShardSize(shardSize);
}
MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField());
if (mappedFieldType == null || mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType == false) {
throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field");
}
if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) {
CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
suggestionContext.setFieldType(type);
if (type.hasContextMappings() && contextBytes != null) {
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = parseContextBytes(contextBytes,
context.getXContentRegistry(), type.getContextMappings());
suggestionContext.setQueryContexts(queryContexts);
} else if (contextBytes != null) {
throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
}
}
assert suggestionContext.getFieldType() != null : "no completion field type set";
return suggestionContext;
}
static Map<String, List<ContextMapping.InternalQueryContext>> parseContextBytes(BytesReference contextBytes,
NamedXContentRegistry xContentRegistry, ContextMappings contextMappings) throws IOException {
try (XContentParser contextParser = XContentHelper.createParser(xContentRegistry,
LoggingDeprecationHandler.INSTANCE, contextBytes, CONTEXT_BYTES_XCONTENT_TYPE)) {
contextParser.nextToken();
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size());
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
XContentParser.Token currentToken;
String currentFieldName;
while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName();
final ContextMapping<?> mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
}
}
return queryContexts;
}
}
@Override
public String getWriteableName() {
return SUGGESTION_NAME;
}
@Override
protected boolean doEquals(CompletionSuggestionBuilder other) {
return skipDuplicates == other.skipDuplicates &&
Objects.equals(fuzzyOptions, other.fuzzyOptions) &&
Objects.equals(regexOptions, other.regexOptions) &&
Objects.equals(contextBytes, other.contextBytes);
}
@Override
protected int doHashCode() {
return Objects.hash(fuzzyOptions, regexOptions, contextBytes, skipDuplicates);
}
}
|
wvanheemstra/core
|
public/resources/js/deft/Deft/promise/Chain.js
|
<reponame>wvanheemstra/core<filename>public/resources/js/deft/Deft/promise/Chain.js
// Generated by CoffeeScript 1.4.0
/*
Copyright (c) 2012 [DeftJS Framework Contributors](http://deftjs.org)
Open source under the [MIT License](http://en.wikipedia.org/wiki/MIT_License).
sequence(), parallel(), pipeline() methods adapted from:
[when.js](https://github.com/cujojs/when)
Copyright (c) <NAME> & <NAME>
Open source under the [MIT License](http://en.wikipedia.org/wiki/MIT_License).
*/
/**
* Utility class with static methods to create chains of Deft.promise.Promises objects.
*/
Ext.define('Deft.promise.Chain', {
alternateClassName: ['Deft.Chain'],
requires: ['Deft.promise.Promise'],
statics: {
/**
* Execute an Array (or Deferred/Promise of an Array) of functions sequentially.
* The specified functions may optionally return their results as Promises.
* Returns a Promise of an Array of results for each function call (in the same order).
*/
sequence: function(fns, scope) {
return Deft.Promise.reduce(fns, function(results, fn) {
return Deft.Promise.when(fn.call(scope)).then(function(result) {
results.push(result);
return results;
});
}, []);
},
/**
* Execute an Array (or Deferred/Promise of an Array) of functions in parallel.
* The specified functions may optionally return their results as Promises.
* Returns a Promise of an Array of results for each function call (in the same order).
*/
parallel: function(fns, scope) {
return Deft.Promise.map(fns, function(fn) {
return fn.call(scope);
});
},
/**
* Execute an Array (or Deferred/Promise of an Array) of functions as a pipeline, where each function's result is passed to the subsequent function as input.
* The specified functions may optionally return their results as Promises.
* Returns a Promise of the result value for the final function in the pipeline.
*/
pipeline: function(fns, scope, initialValue) {
return Deft.Promise.reduce(fns, function(value, fn) {
return fn.call(scope, value);
}, initialValue);
}
}
});
|
ministryofjustice/hmpps-risk-assessment-ui
|
integration-tests/pages/predictors/predictorsPage.js
|
<filename>integration-tests/pages/predictors/predictorsPage.js
const page = require('../page')
const predictorsPage = () =>
page("Offender's scores", {
submit: () => cy.get('.govuk-button').contains('Submit scores to OASys'),
})
const needsPage = () => ({
questions: () => cy.get('.govuk-form-group'),
save: () => cy.get('button').contains('Save and continue'),
})
export default {
verifyOnPage: predictorsPage,
visit: () => {
cy.visit(`/fb6b7c33-07fc-4c4c-a009-8d60f66952c4/questiongroup/RSR/0/2`)
needsPage()
.questions()
.contains('Have you completed an interview with the individual?')
.parent()
.find('input') // Have you completed an interview with the individual? (No)
.check('NO')
needsPage()
.save()
.click()
return predictorsPage
},
}
|
nutiteq/advancedlayers
|
src/main/java/com/nutiteq/utils/UtfGridHelper.java
|
<filename>src/main/java/com/nutiteq/utils/UtfGridHelper.java
package com.nutiteq.utils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.zip.InflaterInputStream;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class UtfGridHelper {
private static final int UTFGRID_NODATA = 32;
public static final String TEMPLATED_FULL_KEY = "templated_full";
public static final String TEMPLATED_TEASER_KEY = "templated_teaser";
public static final String TEMPLATED_LOCATION_KEY = "templated_location";
static byte[] ZLIB_HEADER = {(byte) 0x78, (byte) 0x9C};
public static class MBTileUTFGrid {
public String[] grid = null;
public String[] keys = null;
public JSONObject data = null;
}
public static MBTileUTFGrid decodeUtfGrid(byte[] gridBytes) throws IOException, JSONException {
String gridJSON;
if(gridBytes[0] == ZLIB_HEADER[0] && gridBytes[1] == ZLIB_HEADER[1]){
// seems to be compressed with ZLIB
InflaterInputStream in = new InflaterInputStream(
new ByteArrayInputStream(gridBytes));
ByteArrayOutputStream inflatedOut = new ByteArrayOutputStream();
int readLength;
byte[] block = new byte[1024];
while ((readLength = in.read(block)) != -1)
inflatedOut.write(block, 0, readLength);
inflatedOut.flush();
gridJSON = new String(inflatedOut.toByteArray());
}else{
// uncompressed
gridJSON = new String(gridBytes);
// remove callback
if(!gridJSON.startsWith("{")){
gridJSON = gridJSON.substring(gridJSON.indexOf("(")+1,gridJSON.lastIndexOf(")"));
}
}
MBTileUTFGrid grid = new MBTileUTFGrid();
JSONObject root = new JSONObject(gridJSON);
JSONArray gridA = root.getJSONArray("grid");
JSONArray keysA = root.getJSONArray("keys");
grid.grid = new String[gridA.length()];
for (int i = 0; i < gridA.length(); i++) {
grid.grid[i] = gridA.getString(i);
}
grid.keys = new String[keysA.length()];
for (int i = 0; i < keysA.length(); i++) {
grid.keys[i] = keysA.getString(i);
}
grid.data = root.optJSONObject("data");
return grid;
}
/**
* get clicked UTFgrid code within the tile.
* from https://github.com/mapbox/mbtiles-spec/blob/master/1.1/utfgrid.md "Mapping an ID to a key"
* @param tileSize usually 256
* @param clickedX
* @param clickedY
* @param grid
* @param utfgridRadius
* @return
*/
public static int utfGridCode(int tileSize, int clickedX, int clickedY,
MBTileUTFGrid grid, int utfgridRadius) {
double factor = tileSize / grid.grid.length;
// quick lookup with exact location
int row = (int) Math.round(clickedY / factor);
int col = (int) Math.round(clickedX / factor);
int id = grid.grid[row].codePointAt(col);
// search nearby if not found
// TODO: approximate search works within one tile only, so if click is near tile border, and data is in neighbor tile, then it will not be found
if(utfgridRadius > 0 && id == UTFGRID_NODATA){
// search with pixel tolerance, limit to range [0...grid.grid.length[
int rowMin = Math.max((int) Math.round((clickedY - utfgridRadius) / factor), 0);
int rowMax = Math.min((int) Math.round((clickedY + utfgridRadius) / factor), grid.grid.length-1);
int colMin = Math.max((int) Math.round((clickedX - utfgridRadius) / factor), 0);
int colMax = Math.min((int) Math.round((clickedX + utfgridRadius) / factor), grid.grid.length-1);
// find first match, may be not the closest one really.
for(row = rowMin; row<=rowMax && id == UTFGRID_NODATA;row++)
for(col = colMin; col<=colMax && id == UTFGRID_NODATA;col++){
id = grid.grid[row].codePointAt(col);
}
}
// decode id
if(id >= 93) --id;
if(id >= 35) --id;
id -= UTFGRID_NODATA;
return id;
}
}
|
jong6989/pcsds_app
|
public/app/pages/application/controller.js
|
<reponame>jong6989/pcsds_app<gh_stars>0
'use strict';
myAppModule.controller('application_controller', function ($scope,$filter, $http, $location, $utils, $mdDialog, $interval, Upload, $localStorage) {
$scope.selectedIndex = 0;
$scope.mun = [];
$scope.places_of_transport = [];
$scope.shippers_name = [$scope.user.data.full_name];
$scope.shippers_address = [$scope.user.data.current_address];
$scope.uploading_file = false;
$scope.is_loading = false;
$scope.is_uploading = false;
$scope.photo_uploading_rate = 0;
$scope.picFile = null;
$scope.is_using_camera = false;
$scope.attachment_select_index = -1;
if($localStorage.brain_online_application == undefined) $localStorage.brain_online_application = {};
$scope.chainsaw_brand_list = [];
$scope.ao12_specimen_list = [];
$scope.clear_cropping_image = ()=>{
$scope.picFile = null;
}
$scope.toggle_using_camera = ()=>{
$scope.is_using_camera = !$scope.is_using_camera;
}
$scope.is_croping_image = ()=>{
return ($scope.picFile == null) ? false : true;
};
$scope.upload_process = ()=>{
return Upload.isUploadInProgress();
}
$scope.change_attachment_index = (i)=>{
$scope.attachment_select_index = i;
}
$scope.upload_photo = function(dataUrl, name){
$scope.is_using_camera = false;
//check auth
let profileId = localData.get('profileId');
if(profileId){
$scope.is_uploading = true;
let dateStamp = Date.now();
let uploadImage = storageRef.child(`uploads/${profileId}/profile_pictures/${dateStamp}-${name}`);
uploadImage.putString(dataUrl, 'data_url').then(function(snapshot) {
snapshot.ref.getDownloadURL().then(function(downloadURL) {
$scope.new_application.applicant_photo = downloadURL;
$scope.picFile = null;
$scope.is_uploading =false;
$scope.$apply();
});
}).catch(()=>{
Swal.fire({
type: 'error',
title: 'Oops...',
text: 'Upload Failed',
footer: 'Please try again'
});
$scope.is_uploading = false;
$scope.$apply();
});
}else {
location.reload();
}
// Upload.upload({
// url: api_address,
// data: {
// action:"applicant/account/upload_photo",
// user_id : $scope.user.id,
// file: Upload.dataUrltoBlob(dataUrl, name)
// }
// }).then(function (data) {
// if(data.data.status == 1){
// $scope.new_application.applicant_photo = data.data.data;
// }
// }, null, function (evt) {
// $scope.photo_uploading_rate = parseInt(100.0 * evt.loaded / evt.total);
// });
};
//initialize data
$http.get( "json/permitting/specimen_classification.json").then(function(data){
$scope.specimen_quality_list = data.data.data;
});
$http.get( "json/permitting/organizations.json").then(function(data){
$scope.organization_list = data.data.data;
});
$http.get( "json/permitting/rff_specimen.json").then(function(data){
$scope.rff_specimen_list = data.data.data;
});
$http.get( "json/permitting/ao12_specimen.json").then(function(data){
$scope.ao12_specimen_list = data.data.data;
});
$http.get( "json/permitting/permit_types.json").then(function(data){
$scope.permit_types = data.data.data;
});
$http.get( "json/permitting/gp_specimen.json").then(function(data){
$scope.gp_specimen_list = data.data.data;
});
$scope.change_current_findex = (n)=>{
$scope.selectedIndex = n;
};
$http.get( "json/profile/municipality.json").then(function(data){
$scope.municipalities = data.data.data;
});
$http.get( "json/profile/nationalities.json").then(function(data){
$scope.nationalities = data.data.data;
});
$http.get( "json/profile/purpose_of_transport.json").then(function(data){
$scope.other_purpose = data.data.data;
});
$http.get( "json/profile/place_of_transport.json").then(function(data){
angular.forEach(data.data.data, function(value, key) {
$scope.places_of_transport.push(value.name);
});
});
$scope.initData = function(n){
if($localStorage.brain_online_application[n] == undefined){
$scope.new_application = "";
$scope.new_application = {};
$scope.new_application.applicant = $scope.user.data.full_name;
$scope.new_application.contact = $scope.user.data.current_phone;
$scope.new_application.attachments = [];
$scope.new_application.tin_no = $scope.user.data.tin_no;
$interval(()=>{
$localStorage.brain_online_application[n] = $scope.new_application;
},10000);
}else {
$scope.new_application = $localStorage.brain_online_application[n];
}
};
$scope.myDate = new Date();
$scope.minDate = new Date(
$scope.myDate.getFullYear() - 90,
$scope.myDate.getMonth(),
$scope.myDate.getDate()
);
$scope.maxDate = new Date(
$scope.myDate.getFullYear() - 18,
$scope.myDate.getMonth(),
$scope.myDate.getDate()
);
$scope.set_permit_type = (x)=>{$scope.new_application.required_permit_type = x;};
$scope.set_municipality = function(mun){
$scope.mun = mun;
$scope.new_application.place_of_origin.barangay = null;
};
$scope.submit_application = function(application,key,type){
$scope.to_date = function(d){
return $filter('date')(d, "yyyy-MM-dd");
};
// application.date_postal = $scope.to_date(application.date_postal);
// application.date_sea = $scope.to_date(application.date_sea);
// application.date_air = $scope.to_date(application.date_air);
//required a 2 x 2 Photo
if($scope.new_application.applicant_photo == 'images/user.png'){
$scope.toast("Please upload a photo!");
return null;
}
//name must be complete
let ap_name = application.applicant.split(' ');
if(ap_name.length < 3){
$scope.toast("Apllicant Name is invalid!, Check your middle name.");
return null;
}
$scope.is_loading = true;
const tId = Date.now();
fire.db.transactions.query.add(
{
"data" : { "application": application },
"date" : tId,
"status" : "0",
"user" : $scope.user.data,
"name" : key
}
).then(ref=>{
fire.db.transactions.update(ref.id,{"id":ref.id});
$scope.is_loading = false;
$scope.toast(" Processing Started, Please Wait for a response within 3 days... Your Transaction ID is : " + tId);
// $scope.selectedIndex = 0;
// $localStorage.brain_online_application[n] = undefined;
$scope.initData();
$scope.$apply();
$scope.openQrPage(type,ref.id,tId);
});
};
$scope.upload_attachments = (fs)=>{
var upload_file = (idx)=>{
$scope.uploading_file = true;
let profileId = localData.get('profileId');
if(profileId){
let dateStamp = Date.now();
let uploadRef = storageRef.child(`uploads/${profileId}/attachments/${dateStamp}-${fs[idx].name}`);
uploadRef.put(fs[idx]).then(function(snapshot) {
snapshot.ref.getDownloadURL().then(function(downloadURL) {
db.collection('profile').doc(profileId).update({"uploads":
firebase.firestore.FieldValue.arrayUnion({
name : fs[idx].name,
url : downloadURL
})
});
$scope.uploading_file = false;
if(fs.length !== (idx + 1) ){
upload_file(idx + 1);
}
if($scope.attachment_select_index==-1){
$scope.new_application.attachments.push({
name: fs[idx].name,
url : downloadURL
});
}else {
$scope.add_attachment(downloadURL);
}
$scope.$apply();
});
}).catch(()=>{
Swal.fire({
type: 'error',
title: 'Oops...',
text: 'Upload Failed',
footer: 'Please try again'
});
$scope.uploading_file = false;
$scope.$apply();
});
}else {
location.reload();
}
// Upload.upload({
// url: api_address,
// data: {
// action:"applicant/account/upload_attachments",
// file: fs[idx],
// user_id : $scope.user.id
// }
// }).then(function (data) {
// $scope.uploading_file = false;
// if(fs.length == (idx + 1) ){
// $scope.user = $localStorage.brain_app_user = data.data.data.user;
// if($scope.attachment_select_index==-1){
// $scope.new_application.attachments.push({
// name: data.data.data.file_name,
// url : data.data.data.url
// })
// }else {
// $scope.add_attachment(data.data.data.url);
// }
// }else {
// upload_file(idx + 1);
// }
// });
};
if(fs.length > 0 ) upload_file(0);
};
$scope.is_on_file_list = (item,list)=>{
for (var i = 0; i < list.length; i++) {
if(list[i] == item) return true;
};
return false;
}
$scope.add_attachment = (x)=>{
console.log($scope.attachment_select_index);
$scope.new_application.attachments[$scope.attachment_select_index].url = x;
console.log($scope.new_application.attachments[$scope.attachment_select_index].url);
$scope.close_dialog();
}
});
|
JonFerraiolo/eyevocalize
|
client/main.js
|
<reponame>JonFerraiolo/eyevocalize
import { startupChecks } from './startupChecks.js';
import { helpShowing, toggleHelp, showHelp } from './help.js';
import { popupShowing } from './popup.js';
import { updateTextEntryRow, TextEntryRowSetFocus, TextEntryRowGetText, TextEntryRowSetText, getLastTextSelection } from './TextEntryRow.js';
import { initializeSettings, editSettings, mainAppPercentWhenSmall, getAppFontSize, getSyncMyData, SettingsGetPending, SettingsSync } from './Settings.js';
import { updatePhrases } from './Phrases.js';
import { initializeNotes, NotesGetPending, NotesSync, AddTextToNotes, editNotes } from './Notes.js';
import { initializeHistory, HistoryGetPending, HistorySync, playLastHistoryItem } from './History.js';
import { initializeFavorites, FavoritesGetPending, FavoritesSync, editFavorites } from './MyPhrases.js';
import { fromRight, fromLeft } from './animSlide.js';
import { speak } from './vocalize.js';
import { html, render } from './lib/lit-html/lit-html.js';
import { styleMap } from './lib/lit-html/directives/style-map.js';
let css = `@import 'app.css';`;
let styleElement = document.createElement('style');
styleElement.appendChild(document.createTextNode(css));
document.head.appendChild(styleElement);
export let localization;
let mainShowing = true;
// Chrome gets messed up if user changes voice pitch, this API allows hiding pitch option for Chrome
export function isChrome() {
return !!window.chrome && (!!window.chrome.webstore || !!window.chrome.runtime);
};
export function search(text) {
text = (typeof text === 'string') ? text : TextEntryRowGetText();
updateMain(text);
};
let appMinOrMax = 'Min'; // either 'Min' or 'Max', controls whether bottom of screen is blocked off for onscreen keyboard
export function getAppMinOrMax() {
return appMinOrMax;
}
export function setAppMinOrMax(minOrMax) {
appMinOrMax = minOrMax;
let appinitiallyblankpercent = minOrMax === 'Min' ? (100-mainAppPercentWhenSmall())+'%' : '0%';
document.querySelector('.appinitiallyblank').style.height = appinitiallyblankpercent;
document.querySelector('.appinitiallyblank').style.display = minOrMax === 'Min' ? 'flex' : 'none';
updateMain();
setTimeout(() => {
let event = new CustomEvent("AppLayoutChanged", { detail: null } );
window.dispatchEvent(event);
}, 0);
}
/*
* Returns a block of lit-html nodes that can be used to render
* the title row of a screen that slides in from the right.
* @param {string} title Title that will appear at the top
* @param {function} [returnFunc] Optional function that
* is called when user clicks on back arrow that will appear at top/left.
* If this param isn't provided, the arrow won't appear.
* @returns {lit-html html`` return object} renderable object for the title
*/
export function buildSlideRightTitle(title, returnFunc) {
let onClickReturn = e => {
e.preventDefault();
returnFunc();
}
return html`<div class=SlideRightTitle>
${returnFunc ? html`<a href="" @click=${onClickReturn} class=SlideRightBackArrow></a>` : '' }
<span class=SlideRightTitleText>${title}</span>
</div>`;
}
function slideInScreenShow(leftContentDiv, rightContentDiv, rightRenderDiv, params) {
let { renderFunc, renderFuncParams } = params;
renderFunc(rightRenderDiv, renderFuncParams);
let animParams = {
leftContentDiv,
rightContentDiv,
animClassName: 'slideFromRightAnim',
endAnimClassName: 'endFromRightAnim',
};
fromRight(animParams);
}
function slideInScreenHide(leftContentDiv, rightContentDiv) {
let animParams = {
leftContentDiv,
rightContentDiv,
origAnimClassName: 'endFromRightAnim',
undoAnimClassName: 'undoSlideFromRightAnim'
};
fromLeft(animParams);
}
export function secondLevelScreenShow(params) {
mainShowing = false;
slideInScreenShow(document.querySelector('.mainleft'), document.querySelector('.mainright'),
document.querySelector('.secondlevelleft'), params);
}
export function secondLevelScreenHide() {
slideInScreenHide(document.querySelector('.mainleft'), document.querySelector('.mainright'));
mainShowing = true;
updateMain();
}
export function thirdLevelScreenShow(params) {
slideInScreenShow(document.querySelector('.secondlevelleft'), document.querySelector('.secondlevelright'),
document.querySelector('.secondlevelright'), params);
}
export function thirdLevelScreenHide() {
slideInScreenHide(document.querySelector('.secondlevelleft'), document.querySelector('.secondlevelright'));
}
/**
* updateMain is the redraw everything function
* @param {string} [searchString] optional filter string to filter content shown in Notes, History, Favorites
* @param {object} [updateWhat] what sections to redraw, the following booleans. Default is redraw all.
* @param {boolean} [updateWhat.TextEntryRow]
* @param {boolean} [updateWhat.Notes]
* @param {boolean} [updateWhat.History]
* @param {boolean} [updateWhat.Favorites]
**/
let updateMainInProcess = false;
export function updateMain(searchString, updateWhat) {
if (updateMainInProcess) return;
updateMainInProcess = true;
updateWhat = updateWhat || { TextEntryRow: true, Notes: true, History: true, Favorites: true };
let appFontSize = getAppFontSize();
let TextEntryRowProps = getLastTextSelection();
let PhrasesProps = { searchString, updateWhat };
let onMinOrMax = e => {
e.preventDefault();
setAppMinOrMax('Max');
};
let trial = window.eyevocalizeUserEmail ? '' :
html`<div class=TrialVersion>You are using the Trial Version.
To remove this message, sign up and log in. (The app is free) </div>`;
render(html`
<div class=appfullheight style=${styleMap({fontSize: (appFontSize*1.1)+'%'})}>
${trial}
<div class=appmaincontent>
<div class=main>
<div class=mainleft>
<div class=mainleftcontent>
<div id=TextEntryRowContainer></div>
<div id=PhrasesContainer></div>
</div>
</div>
<div class=mainright>
<div class=secondlevelleft></div>
<div class=secondlevelright></div>
</div>
<div class=Help style=${styleMap({display: helpShowing() ? 'block' : 'none'})}></div>
</div>
</div>
<div class=appinitiallyblank>
<p>This area is intentionally blank to provide room for an onscreen keyboard.</p>
<p>To use this area instead for the application's user interface, press the
<span @click=${onMinOrMax} class=icon></span> toggle icon at the top-right of the application.</p>
</div>
<div class=footer>
<a href="/">Home</a>
<a href="/About">About</a>
<a href="/TermsOfUse">Terms of Use</a>
<a href="/PrivacyPolicy">Privacy Policy</a>
<a href="/Cookies">Cookies</a>
<a href="/Contact">Contact</a>
</div>
</div>
`, document.body);
setAppMinOrMax(appMinOrMax);
if (updateWhat.TextEntryRow) {
updateTextEntryRow(document.getElementById('TextEntryRowContainer'), TextEntryRowProps);
}
updatePhrases(document.getElementById('PhrasesContainer'), PhrasesProps);
if (mainShowing) TextEntryRowSetFocus();
updateMainInProcess = false;
};
let socket;
/**
* main entry point for application after startupChecks, only called once
**/
function main() {
localization = window.EvcLocalization;
window.eyevocalizeClientId = localStorage.getItem('clientId');
if (window.eyevocalizeClientId) {
window.eyevocalizeLastSync = parseInt(localStorage.getItem('lastSync'));
if (isNaN(window.eyevocalizeLastSync)) {
window.eyevocalizeLastSync = 0;
localStorage.setItem('lastSync', window.eyevocalizeLastSync.toString());
}
} else {
window.eyevocalizeClientId = Date.now().toString();
localStorage.setItem('clientId', window.eyevocalizeClientId);
window.eyevocalizeLastSync = 0;
localStorage.setItem('lastSync', window.eyevocalizeLastSync.toString());
}
let currentVersion = 9;
let initializationProps = { currentVersion };
initializeSettings(initializationProps);
initializeNotes(initializationProps);
initializeHistory(initializationProps);
initializeFavorites(initializationProps);
updateMain();
let autoLoginPromise = new Promise((resolve, reject) => {
if (window.eyevocalizeUserEmail && window.eyevocalizeUserChecksum) {
localStorage.setItem('userEmail', window.eyevocalizeUserEmail);
localStorage.setItem('userChecksum', window.eyevocalizeUserChecksum);
resolve();
} else {
let lsEmail = localStorage.getItem('userEmail');
let lsChecksum = localStorage.getItem('userChecksum');
if (lsEmail && lsChecksum) {
let fetchPostOptions = {
method: 'POST',
mode: 'same-origin',
headers: { "Content-type": "application/json" },
credentials: 'include',
};
let credentials = {
email: lsEmail,
checksum: lsChecksum,
};
fetchPostOptions.body = JSON.stringify(credentials);
fetch('/api/autologin', fetchPostOptions).then(resp => {
if (resp.status === 200) {
resp.json().then(data => {
window.eyevocalizeUserEmail = lsEmail;
window.eyevocalizeUserChecksum = lsChecksum;
localStorage.setItem('userEmail', window.eyevocalizeUserEmail);
localStorage.setItem('userChecksum', window.eyevocalizeUserChecksum);
updateMain();
resolve();
});
} else if (resp.status === 401) {
resp.json().then(data => {
let errorMessage;
if (data.error === 'EMAIL_NOT_REGISTERED') {
errorMessage = `*** Error: '${lsEmail}' not registered ***`;
} else if (data.error === 'EMAIL_NOT_VERIFIED') {
errorMessage = `*** Error: '${lsEmail}' not verified ***`;
} else if (data.error === 'INCORRECT_PASSWORD') {
errorMessage = `*** Error: incorrect password for '${lsEmail}' ***`;
} else {
errorMessage = `Very sorry. Something unexpected went wrong(autologin 401-1). `;
}
console.error(errorMessage);
reject();
}).catch(e => {
console.error(`Very sorry. Something unexpected went wrong (autologin 401-2). `);
reject();
});
} else {
console.error('autologin fetch bad status='+resp.status);
reject();
}
}).catch(e => {
console.error('autologin fetch error e='+e);
reject();
});
}
}
});
// ==============================
// after completing autologin process, initialize socket.io for messaging between this browser window and server
// socket.io is only used to implement the sync feature
// ==============================
autoLoginPromise.then(() => {
try {
socket = io();
socket.on('disconnect', msg => {
console.log ('socket.io disconnect. msg='+msg);
});
socket.on('reconnect', msg => {
console.log ('socket.io reconnect. msg='+msg);
// when reconnecting with server, do an immediate ClientInitiatedSync.
// If server restarted, it will respond with a ServerInitiatedRefresh,
// which tells this browser window to reload, which will result in a new ClientInitiatedSync
sync();
});
socket.on('ServerInitiatedRefresh', (serverRefreshDataJson, fn) => {
try {
let o = JSON.parse(serverRefreshDataJson);
let { serverInstance } = o;
// after every server restart, pull down the latest client code, but only once
if (window.EvcServerInstance != serverInstance) {
window.location.reload();
}
} catch(e) {
console.error('error in handling ServerInitiatedRefresh');
}
});
socket.on('ServerInitiatedSync', (serverSyncDataJson, fn) => {
console.log('ServerInitiatedSync serverSyncDataJson='+serverSyncDataJson);
try {
let serverSyncData = JSON.parse(serverSyncDataJson);
let { updates } = serverSyncData;
let thisComputerTime = Date.now(); // use this computer's clock because comparisons will use this computer's clock too
NotesSync(thisComputerTime, updates && updates.Notes);
HistorySync(thisComputerTime, updates && updates.History);
FavoritesSync(thisComputerTime, updates && updates.Favorites);
SettingsSync(thisComputerTime, updates && updates.Settings);
window.eyevocalizeLastSync = thisComputerTime;
localStorage.setItem('lastSync', window.eyevocalizeLastSync.toString());
if (typeof fn === 'function') {
fn(JSON.stringify({ success: true }));
}
updateMain(null, { Notes:true, History: true, Favorites: true });
} catch(e) {
console.error('sync exception, possibly bad JSON. e=');
console.dir(e);
if (typeof fn === 'function') {
fn(JSON.stringify({ success: false, error: 'client side exception' }));
}
}
});
sync();
} catch(e) {
console.error('socket.io initialization failed. ');
}
}, () => {
console.error('autoLoginPromise reject.');
window.eyevocalizeUserEmail = null;
window.eyevocalizeUserChecksum = null;
}).catch(e => {
console.error('autoLoginPromise error'+e);
window.eyevocalizeUserEmail = null;
window.eyevocalizeUserChecksum = null;
}).finally(() => {
if (window.eyevocalizeUserEmail === '' || !localStorage.getItem('LoginHelpClosed')) {
setTimeout(() => {
showHelp('Starting', 'tall-wide');
}, 0);
}
});
document.addEventListener('keydown', e => {
let shift = e.getModifierState("Shift");
let control = e.getModifierState("Control");
let meta = e.getModifierState("Meta");
if (e.key === 'Enter') {
if (shift && !control && !meta) {
// just pass through to default processing, which will add a newline
} else if (!shift && (control || meta)) {
e.preventDefault();
AddTextToNotes();
} else {
e.preventDefault();
speak();
}
} else if (e.key === 's' && !shift && (control || meta)) {
e.preventDefault();
search();
} else if (e.key === '.' && !shift && (control || meta)) {
// Control+period speaks the most recent entry in the History
e.preventDefault();
playLastHistoryItem();
} else if (e.key === 'h' && !shift && (control || meta)) {
// Control+h toggles visibility of the Help popup
e.preventDefault();
toggleHelp();
} else {
// just pass through to default processing, which will add the character
}
}, false);
document.addEventListener('focusout', e => {
if (mainShowing && !popupShowing() && !e.relatedTarget) {
event.preventDefault();
TextEntryRowSetFocus();
}
}, false);
document.addEventListener('visibilitychange', e => {
if (!document.hidden) {
if (mainShowing && !popupShowing()) {
TextEntryRowSetFocus();
}
}
if (socket /*&& socket.connected*/ && window.eyevocalizeUserEmail && getSyncMyData()) {
let lastSync = window.eyevocalizeLastSync;
let clientData = {
email: window.eyevocalizeUserEmail,
clientId: window.eyevocalizeClientId,
lastSync,
};
socket.emit(document.hidden ? 'ClientHidden' : 'ClientVisible', JSON.stringify(clientData), msg => {
});
if (!document.hidden) {
sync();
}
}
}, false);
window.addEventListener('resize', e => {
setTimeout(() => {
let event = new CustomEvent("AppLayoutChanged", { detail: null } );
window.dispatchEvent(event);
}, 0);
}, false);
};
export function sync() {
if (socket /*&& socket.connected*/ && window.eyevocalizeUserEmail && getSyncMyData()) {
let lastSync = window.eyevocalizeLastSync;
let syncData = {
email: window.eyevocalizeUserEmail,
clientId: window.eyevocalizeClientId,
lastSync,
thisSyncClientTimestamp: Date.now(),
thisSyncServerInstance: window.EvcServerInstance,
updates: {
Notes: NotesGetPending(lastSync),
History: HistoryGetPending(lastSync),
Favorites: FavoritesGetPending(lastSync),
Settings: SettingsGetPending(lastSync),
}
};
console.log('sync entered. syncData=');
console.dir(syncData);
socket.emit('ClientInitiatedSync', JSON.stringify(syncData), msg => {
});
}
}
startupChecks(() => {
main();
}, () => {});
|
graphisoft-python/TextEngine
|
Support/Modules/VectorImage/ProfileVectorImageBuilder.hpp
|
#ifndef PROFILEVECTORIMAGEBUILDER_HPP
#define PROFILEVECTORIMAGEBUILDER_HPP
#pragma once
// from GSRoot
#include "GSRoot.hpp"
// from Pattern
#include "PolygonDrawingDirection.hpp"
// from VectorImage
#include "VectorImageTypedefs.hpp"
#include "IVectorImageBuilder.hpp"
#include "AssociatedEdgeId.hpp"
#include "HatchVertexId.hpp"
class Sy_HatchType;
class HatchObject;
class Sy_LinType;;
class ProfileVectorImage;
namespace VI {
namespace HatchPriority {
class HatchPriorityComparer;
}
}
namespace PVI {
class ProfileVectorImageParameter;
class EdgeOverrideData;
class Anchor;
class DimensionControlTool;
enum class DimProjectionType;
}
namespace VI {
class VI_DLL_CLASS_EXPORT ProfileVectorImageBuilder : public IVectorImageBuilder
{
public:
ProfileVectorImageBuilder (ProfileVectorImage& inImage);
virtual ~ProfileVectorImageBuilder ();
//IVectorImageBuilder functions
virtual GSErrCode AddArc (const Sy_ArcType& arc) override;
virtual GSErrCode AddFont (const Sy_FontType& font, const GS::UniString& fontName) override;
virtual GSErrCode AddHatch (Sy_HatchType& hatchRef, HatchObject& hatch, const Geometry::Vector2d* offsCo) override;
virtual GSErrCode AddHatchFromPolygon (Sy_HatchType& hatchRef,
HatchObject& syHatch,
const Geometry::IrregularPolygon2D& polygon,
const IPolygonDrawingDirection* drawingDirection,
const Geometry::Vector2d* offsCo,
const ProfileItem* profileItem,
const GS::HashTable<Geometry::PolyId, ProfileEdgeData>* profileEdgeData) override;
virtual GSErrCode AddHotspot (const Sy_HotType& hotspot) override;
virtual GSErrCode AddLine (const Sy_LinType& line) override;
virtual GSErrCode AddPixMap (const Sy_PixMapType& pixHeader, GSConstHandle pixMemoHdl, const char* mimeType) override;
virtual GSErrCode AddPolyLine (const Sy_PolyLinType& syPolyLine, const Geometry::Polyline2D& polyline) override;
virtual GSErrCode AddRichText (const Sy_RichTextType* richtext) override;
virtual GSErrCode AddSpline (const Sy_SplineType& spline, const char* pCoordPtr, const char* pSDirsPtr, const Geometry::Vector2d* offsCo) override;
virtual void SetDrawQueueUsing () override;
virtual void SetScale (Int32 defScaleIn) override;
//Member functions
GSErrCode AddAnchor (const PVI::ProfileAnchorId& id, const PVI::Anchor& a);
GSErrCode AddDimControlTool (const PVI::ProfileDimControlToolId& dimToolID, const PVI::DimensionControlTool& tool);
GSErrCode AddProfileParameter (const PVI::ProfileParameterId& parID);
GSErrCode AddOffsetParameterSetup (const PVI::ProfileParameterId& paramID,
const GS::Array<PVI::AssociatedEdge>& offsetEdgeDefs,
const PVI::Anchor& begAnchor,
const PVI::Anchor& endAnchor,
const PVI::DimProjectionType projType,
const double projAngleRad,
PVI::ProfileDimControlToolId* dimId = nullptr);
void AddEdgeOverrideData (const PVI::EdgeOverrideData& edgeOverrideData);
void Finalize (const VI::HatchPriority::HatchPriorityComparer& comparer);
private:
ProfileVectorImage& image;
GS::Optional<double> verticalStretchLineFirstLimit;
GS::Optional<double> horizontalStretchLineFirstLimit;
bool verticalStretchZoneSet;
bool horizontalStretchZoneSet;
};
}
#endif
|
lasersonlab/zappy
|
tests/test_array.py
|
import concurrent.futures
import logging
import pytest
import sys
import numpy as np
import zappy.executor
import zappy.direct
import zappy.spark
import zarr
from numpy.testing import assert_allclose
from pyspark.sql import SparkSession
# add/change to "pywren_ndarray" to run the tests using Pywren (requires Pywren to be installed)
TESTS = [
"direct_ndarray",
"direct_zarr",
"executor_ndarray",
"executor_zarr",
"spark_ndarray",
"spark_zarr",
]
# only run Beam tests on Python 2, and don't run executor tests
if sys.version_info[0] == 2:
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
import zappy.beam
TESTS = [
"direct_ndarray",
"direct_zarr",
"spark_ndarray",
"spark_zarr",
"beam_ndarray",
"beam_zarr",
]
class TestZappyArray:
@pytest.fixture()
def x(self):
return np.array(
[
[0.0, 1.0, 0.0, 3.0, 0.0],
[2.0, 0.0, 3.0, 4.0, 5.0],
[4.0, 0.0, 0.0, 6.0, 7.0],
]
)
@pytest.fixture()
def chunks(self):
return (2, 5)
@pytest.fixture()
def xz(self, x, chunks, tmpdir):
input_file_zarr = str(tmpdir.join("x.zarr"))
z = zarr.open(
input_file_zarr, mode="w", shape=x.shape, dtype=x.dtype, chunks=chunks
)
z[:] = x.copy() # write as zarr locally
return input_file_zarr
@pytest.fixture(scope="module")
def sc(self):
logger = logging.getLogger("py4j")
logger.setLevel(logging.WARN)
spark = (
SparkSession.builder.master("local[2]")
.appName("my-local-testing-pyspark-context")
.getOrCreate()
)
yield spark.sparkContext
spark.stop()
@pytest.fixture(params=TESTS)
def xd(self, sc, x, xz, chunks, request):
if request.param == "direct_ndarray":
yield zappy.direct.from_ndarray(x.copy(), chunks)
elif request.param == "direct_zarr":
yield zappy.direct.from_zarr(xz)
elif request.param == "executor_ndarray":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.from_ndarray(executor, x.copy(), chunks)
elif request.param == "executor_zarr":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.from_zarr(executor, xz)
elif request.param == "spark_ndarray":
yield zappy.spark.from_ndarray(sc, x.copy(), chunks)
elif request.param == "spark_zarr":
yield zappy.spark.from_zarr(sc, xz)
elif request.param == "beam_ndarray":
pipeline_options = PipelineOptions()
pipeline = beam.Pipeline(options=pipeline_options)
yield zappy.beam.from_ndarray(pipeline, x.copy(), chunks)
elif request.param == "beam_zarr":
pipeline_options = PipelineOptions()
pipeline = beam.Pipeline(options=pipeline_options)
yield zappy.beam.from_zarr(pipeline, xz)
elif request.param == "pywren_ndarray":
executor = zappy.executor.PywrenExecutor()
yield zappy.executor.from_ndarray(executor, x.copy(), chunks)
@pytest.fixture(params=TESTS)
def xd_and_temp_store(self, sc, x, xz, chunks, request):
if request.param == "direct_ndarray":
yield zappy.direct.from_ndarray(x.copy(), chunks), zarr.TempStore()
elif request.param == "direct_zarr":
yield zappy.direct.from_zarr(xz), zarr.TempStore()
elif request.param == "executor_ndarray":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.from_ndarray(
executor, x.copy(), chunks
), zarr.TempStore()
elif request.param == "executor_zarr":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.from_zarr(executor, xz), zarr.TempStore()
elif request.param == "spark_ndarray":
yield zappy.spark.from_ndarray(sc, x.copy(), chunks), zarr.TempStore()
elif request.param == "spark_zarr":
yield zappy.spark.from_zarr(sc, xz), zarr.TempStore()
elif request.param == "beam_ndarray":
pipeline_options = PipelineOptions()
pipeline = beam.Pipeline(options=pipeline_options)
yield zappy.beam.from_ndarray(pipeline, x.copy(), chunks), zarr.TempStore()
elif request.param == "beam_zarr":
pipeline_options = PipelineOptions()
pipeline = beam.Pipeline(options=pipeline_options)
yield zappy.beam.from_zarr(pipeline, xz), zarr.TempStore()
elif request.param == "pywren_ndarray":
import s3fs.mapping
def create_unique_bucket_name(prefix):
import uuid
return "%s-%s" % (prefix, str(uuid.uuid4()).replace("-", ""))
s3 = s3fs.S3FileSystem()
bucket = create_unique_bucket_name("zappy-test")
s3.mkdir(bucket)
path = "%s/%s" % (bucket, "test.zarr")
s3store = s3fs.mapping.S3Map(path, s3=s3)
executor = zappy.executor.PywrenExecutor()
yield zappy.executor.from_ndarray(executor, x.copy(), chunks), s3store
s3.rm(bucket, recursive=True)
@pytest.fixture(params=["direct", "executor", "spark"]) # TODO: beam
def zeros(self, sc, request):
if request.param == "direct":
yield zappy.direct.zeros((3, 5), chunks=(2, 5), dtype=int)
elif request.param == "executor":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.zeros(executor, (3, 5), chunks=(2, 5), dtype=int)
elif request.param == "spark":
yield zappy.spark.zeros(sc, (3, 5), chunks=(2, 5), dtype=int)
@pytest.fixture(params=["direct", "executor", "spark"]) # TODO: beam
def ones(self, sc, request):
if request.param == "direct":
yield zappy.direct.ones((3, 5), chunks=(2, 5), dtype=int)
elif request.param == "executor":
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
yield zappy.executor.ones(executor, (3, 5), chunks=(2, 5), dtype=int)
elif request.param == "spark":
yield zappy.spark.ones(sc, (3, 5), chunks=(2, 5), dtype=int)
def test_identity(self, x, xd):
assert_allclose(np.asarray(xd), x)
def test_astype(self, x, xd):
xd = xd.astype(int)
x = x.astype(int)
assert xd.dtype == x.dtype
assert_allclose(np.asarray(xd), x)
def test_astype_inplace(self, x, xd):
original_id = id(xd)
xd = xd.astype(int, copy=False)
assert original_id == id(xd)
x = x.astype(int, copy=False)
assert xd.dtype == x.dtype
assert_allclose(np.asarray(xd), x)
def test_asarray(self, x, xd):
assert_allclose(np.asarray(xd), x)
def test_scalar_arithmetic(self, x, xd):
xd = (((xd + 1) * 2) - 4) / 1.1
x = (((x + 1) * 2) - 4) / 1.1
assert_allclose(np.asarray(xd), x)
def test_arithmetic(self, x, xd):
xd = xd * 2 + xd
x = x * 2 + x
assert_allclose(np.asarray(xd), x)
def test_broadcast_row(self, x, xd):
a = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
xd = xd + a
x = x + a
assert_allclose(np.asarray(xd), x)
def test_broadcast_col(self, x, xd):
if sys.version_info[0] == 2 and isinstance(
xd, zappy.beam.array.BeamZappyArray
): # TODO: fix this
return
a = np.array([[1.0], [2.0], [3.0]])
xd = xd + a
x = x + a
assert_allclose(np.asarray(xd), x)
def test_eq(self, x, xd):
xd = xd == 0.0
x = x == 0.0
assert xd.dtype == x.dtype
assert_allclose(np.asarray(xd), x)
def test_ne(self, x, xd):
xd = xd != 0.0
x = x != 0.0
assert_allclose(np.asarray(xd), x)
def test_invert(self, x, xd):
xd = ~(xd == 0.0)
x = ~(x == 0.0)
assert_allclose(np.asarray(xd), x)
def test_inplace(self, x, xd):
original_id = id(xd)
xd += 1
assert original_id == id(xd)
x += 1
assert_allclose(np.asarray(xd), x)
def test_simple_index(self, x, xd):
xd = xd[0]
x = x[0]
assert_allclose(xd, x)
def test_boolean_index(self, x, xd):
xd = np.sum(xd, axis=1) # sum rows
xd = xd[xd > 5]
x = np.sum(x, axis=1) # sum rows
x = x[x > 5]
assert_allclose(np.asarray(xd), x)
def test_slice_cols(self, x, xd):
xd = xd[:, 1:3]
x = x[:, 1:3]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_slice_rows(self, x, xd):
xd = xd[1:3, :]
x = x[1:3, :]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_slice_rows_shrink_partitions(self, x, xd):
if sys.version_info[0] == 2 and isinstance(
xd, zappy.beam.array.BeamZappyArray
): # TODO: fix this
return
xd = xd[0:2, :]
x = x[0:2, :]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_subset_cols_boolean(self, x, xd):
subset = np.array([True, False, True, False, True])
xd = xd[:, subset]
x = x[:, subset]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_subset_rows_boolean(self, x, xd):
subset = np.array([True, False, True])
xd = xd[subset, :]
x = x[subset, :]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_subset_cols_int(self, x, xd):
subset = np.array([1, 3])
xd = xd[:, subset]
x = x[:, subset]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_subset_rows_int(self, x, xd):
subset = np.array([1, 2])
xd = xd[subset, :]
x = x[subset, :]
assert xd.shape == x.shape
assert_allclose(np.asarray(xd), x)
def test_newaxis(self, x, xd):
xd = np.sum(xd, axis=1)[:, np.newaxis]
x = np.sum(x, axis=1)[:, np.newaxis]
assert_allclose(np.asarray(xd), x)
def test_log1p(self, x, xd):
log1pnps = np.asarray(np.log1p(xd))
log1pnp = np.log1p(x)
assert_allclose(log1pnps, log1pnp)
def test_sum(self, x, xd):
if sys.version_info[0] == 2 and isinstance(
xd, zappy.beam.array.BeamZappyArray
): # TODO: fix this
return
totald = np.sum(xd)
total = np.sum(x)
assert totald == pytest.approx(total)
def test_sum_cols(self, x, xd):
xd = np.sum(xd, axis=0)
x = np.sum(x, axis=0)
assert_allclose(np.asarray(xd), x)
def test_sum_rows(self, x, xd):
xd = np.sum(xd, axis=1)
x = np.sum(x, axis=1)
assert_allclose(np.asarray(xd), x)
def test_mean(self, x, xd):
if sys.version_info[0] == 2 and isinstance(
xd, zappy.beam.array.BeamZappyArray
): # TODO: fix this
return
meand = np.mean(xd)
mean = np.mean(x)
assert meand == pytest.approx(mean)
def test_mean_cols(self, x, xd):
xd = np.mean(xd, axis=0)
x = np.mean(x, axis=0)
assert_allclose(np.asarray(xd), x)
def test_mean_rows(self, x, xd):
xd = np.mean(xd, axis=1)
x = np.mean(x, axis=1)
assert_allclose(np.asarray(xd), x)
def test_var(self, x, xd):
def var(x):
mean = x.mean(axis=0)
mean_sq = np.multiply(x, x).mean(axis=0)
return mean_sq - mean ** 2
varnps = np.asarray(var(xd))
varnp = var(x)
assert_allclose(varnps, varnp)
def test_median(self, x, xd):
mediand = np.median(xd) # implicitly converts to np.array
median = np.median(x)
assert mediand == pytest.approx(median)
def test_write_zarr(self, x, xd_and_temp_store):
xd, temp_store = xd_and_temp_store
xd.to_zarr(temp_store, xd.chunks)
# read back as zarr directly and check it is the same as x
z = zarr.open(temp_store, mode="r", shape=x.shape, dtype=x.dtype, chunks=(2, 5))
arr = z[:]
assert_allclose(arr, x)
def test_write_zarr_ncopies(self, x, xd_and_temp_store):
xd, temp_store = xd_and_temp_store
if sys.version_info[0] == 2 and isinstance(
xd, zappy.beam.array.BeamZappyArray
): # TODO: fix this
return
xd = xd._repartition_chunks((3, 5))
ncopies = 3
xd.to_zarr(temp_store, xd.chunks, ncopies=ncopies)
# read back as zarr directly and check it is the same as x
z = zarr.open(
temp_store,
mode="r",
shape=(x.shape[0] * ncopies, x.shape[1]),
dtype=x.dtype,
chunks=(1, 5),
)
arr = z[:]
x_ncopies = np.vstack((x,) * ncopies)
assert_allclose(arr, x_ncopies)
def test_zeros(self, zeros):
totals = np.sum(zeros, axis=0)
x = np.array([0, 0, 0, 0, 0])
assert_allclose(np.asarray(totals), x)
def test_ones(self, ones):
totals = np.sum(ones, axis=0)
x = np.array([3, 3, 3, 3, 3])
assert_allclose(np.asarray(totals), x)
def test_asndarrays(self, x, xd):
if not isinstance(xd, zappy.executor.array.ExecutorZappyArray):
return
xd1, xd2 = zappy.executor.asndarrays((xd + 1, xd + 2))
assert_allclose(xd1, x + 1)
assert_allclose(xd2, x + 2)
|
AYCH-Inc/aych.bitlight.network
|
daemon/peer.c
|
#include "bitcoind.h"
#include "close_tx.h"
#include "commit_tx.h"
#include "controlled_time.h"
#include "cryptopkt.h"
#include "dns.h"
#include "find_p2sh_out.h"
#include "jsonrpc.h"
#include "lightningd.h"
#include "log.h"
#include "names.h"
#include "peer.h"
#include "pseudorand.h"
#include "secrets.h"
#include "state.h"
#include "timeout.h"
#include <bitcoin/base58.h>
#include <bitcoin/script.h>
#include <bitcoin/tx.h>
#include <ccan/array_size/array_size.h>
#include <ccan/io/io.h>
#include <ccan/list/list.h>
#include <ccan/noerr/noerr.h>
#include <ccan/ptrint/ptrint.h>
#include <ccan/str/hex/hex.h>
#include <ccan/structeq/structeq.h>
#include <ccan/tal/str/str.h>
#include <ccan/tal/tal.h>
#include <errno.h>
#include <inttypes.h>
#include <netinet/in.h>
#include <stdlib.h>
#include <sys/socket.h>
#include <sys/types.h>
#define FIXME_STUB(peer) do { log_broken((peer)->dstate->base_log, "%s:%u: Implement %s!", __FILE__, __LINE__, __func__); abort(); } while(0)
struct json_connecting {
/* This owns us, so we're freed after command_fail or command_success */
struct command *cmd;
const char *name, *port;
u64 satoshis;
};
struct pending_cmd {
struct list_node list;
void (*dequeue)(struct peer *, void *arg);
void *arg;
};
static struct peer *find_peer(struct lightningd_state *dstate,
const char *buffer,
jsmntok_t *peeridtok)
{
struct pubkey peerid;
struct peer *peer;
if (!pubkey_from_hexstr(dstate->secpctx,
buffer + peeridtok->start,
peeridtok->end - peeridtok->start, &peerid))
return NULL;
list_for_each(&dstate->peers, peer, list) {
if (peer->state != STATE_INIT && pubkey_eq(&peer->id, &peerid))
return peer;
}
return NULL;
}
static struct json_result *null_response(const tal_t *ctx)
{
struct json_result *response;
response = new_json_result(ctx);
json_object_start(response, NULL);
json_object_end(response);
return response;
}
static void peer_cmd_complete(struct peer *peer, enum command_status status)
{
assert(peer->curr_cmd.cmd != INPUT_NONE);
/* If it's a json command, complete that now. */
if (peer->curr_cmd.jsoncmd) {
if (status == CMD_FAIL)
/* FIXME: y'know, details. */
command_fail(peer->curr_cmd.jsoncmd, "Failed");
else {
assert(status == CMD_SUCCESS);
command_success(peer->curr_cmd.jsoncmd,
null_response(peer->curr_cmd.jsoncmd));
}
}
peer->curr_cmd.cmd = INPUT_NONE;
}
static void set_current_command(struct peer *peer,
const enum state_input input,
void *idata,
struct command *jsoncmd)
{
assert(peer->curr_cmd.cmd == INPUT_NONE);
assert(input != INPUT_NONE);
peer->curr_cmd.cmd = input;
/* This is a union, so assign to any member. */
peer->curr_cmd.cmddata.pkt = idata;
peer->curr_cmd.jsoncmd = jsoncmd;
}
static void state_single(struct peer *peer,
const enum state_input input,
const union input *idata)
{
enum command_status status;
const struct bitcoin_tx *broadcast;
size_t old_outpkts = tal_count(peer->outpkt);
status = state(peer, input, idata, &broadcast);
log_debug(peer->log, "%s => %s",
input_name(input), state_name(peer->state));
switch (status) {
case CMD_NONE:
break;
case CMD_SUCCESS:
log_add(peer->log, " (command success)");
peer_cmd_complete(peer, CMD_SUCCESS);
break;
case CMD_FAIL:
log_add(peer->log, " (command FAIL)");
peer_cmd_complete(peer, CMD_FAIL);
break;
case CMD_REQUEUE:
log_add(peer->log, " (Command requeue)");
break;
}
if (tal_count(peer->outpkt) > old_outpkts) {
Pkt *outpkt = peer->outpkt[old_outpkts].pkt;
log_add(peer->log, " (out %s)", input_name(outpkt->pkt_case));
}
if (broadcast) {
struct sha256_double txid;
bitcoin_txid(broadcast, &txid);
/* FIXME: log_struct */
log_add(peer->log, " (tx %02x%02x%02x%02x...)",
txid.sha.u.u8[0], txid.sha.u.u8[1],
txid.sha.u.u8[2], txid.sha.u.u8[3]);
bitcoind_send_tx(peer->dstate, broadcast);
}
/* Start output if not running already; it will close conn. */
if (peer->cond == PEER_CLOSED)
io_wake(peer);
/* FIXME: Some of these should just result in this peer being killed? */
if (state_is_error(peer->state)) {
log_broken(peer->log, "Entered error state %s",
state_name(peer->state));
fatal("Peer entered error state");
}
/* Break out and free this peer if it's completely done. */
if (peer->state == STATE_CLOSED)
io_break(peer);
}
static void try_command(struct peer *peer)
{
/* If we can accept a command, and we have one queued, run it. */
while (peer->cond == PEER_CMD_OK
&& !list_empty(&peer->pending_cmd)) {
struct pending_cmd *pend = list_pop(&peer->pending_cmd,
struct pending_cmd, list);
assert(peer->curr_cmd.cmd == INPUT_NONE);
/* This can fail to enqueue a command! */
pend->dequeue(peer, pend->arg);
tal_free(pend);
if (peer->curr_cmd.cmd != INPUT_NONE) {
state_single(peer, peer->curr_cmd.cmd,
&peer->curr_cmd.cmddata);
}
}
}
#define queue_cmd(peer, cb, arg) \
queue_cmd_((peer), \
typesafe_cb_preargs(void, void *, \
(cb), (arg), \
struct peer *), \
(arg))
static void queue_cmd_(struct peer *peer,
void (*dequeue)(struct peer *peer, void *arg),
void *arg)
{
struct pending_cmd *pend = tal(peer, struct pending_cmd);
pend->dequeue = dequeue;
pend->arg = arg;
list_add_tail(&peer->pending_cmd, &pend->list);
try_command(peer);
};
/* All unrevoked commit txs must have no HTLCs in them. */
bool committed_to_htlcs(const struct peer *peer)
{
const struct commit_info *i;
/* Before anchor exchange, we don't even have cstate. */
if (!peer->us.commit || !peer->us.commit->cstate)
return false;
i = peer->us.commit;
while (i && !i->revocation_preimage) {
if (tal_count(i->cstate->a.htlcs))
return true;
if (tal_count(i->cstate->b.htlcs))
return true;
i = i->prev;
}
i = peer->them.commit;
while (i && !i->revocation_preimage) {
if (tal_count(i->cstate->a.htlcs))
return true;
if (tal_count(i->cstate->b.htlcs))
return true;
i = i->prev;
}
return false;
}
static void state_event(struct peer *peer,
const enum state_input input,
const union input *idata)
{
state_single(peer, input, idata);
if (peer->cleared != INPUT_NONE && !committed_to_htlcs(peer)) {
enum state_input all_done = peer->cleared;
peer->cleared = INPUT_NONE;
state_single(peer, all_done, NULL);
}
try_command(peer);
}
static struct io_plan *pkt_out(struct io_conn *conn, struct peer *peer)
{
struct out_pkt out;
size_t n = tal_count(peer->outpkt);
if (n == 0) {
/* We close the connection once we've sent everything. */
if (peer->cond == PEER_CLOSED)
return io_close(conn);
return io_out_wait(conn, peer, pkt_out, peer);
}
out = peer->outpkt[0];
memmove(peer->outpkt, peer->outpkt + 1, (sizeof(*peer->outpkt)*(n-1)));
tal_resize(&peer->outpkt, n-1);
return peer_write_packet(conn, peer, out.pkt, out.ack_cb, out.ack_arg,
pkt_out);
}
static struct io_plan *pkt_in(struct io_conn *conn, struct peer *peer)
{
union input idata;
const tal_t *ctx = tal(peer, char);
idata.pkt = tal_steal(ctx, peer->inpkt);
/* We ignore packets if they tell us to. */
if (peer->cond != PEER_CLOSED)
state_event(peer, peer->inpkt->pkt_case, &idata);
/* Free peer->inpkt unless stolen above. */
tal_free(ctx);
return peer_read_packet(conn, peer, pkt_in);
}
static void do_anchor_offer(struct peer *peer, void *unused)
{
set_current_command(peer, peer->us.offer_anchor, NULL, NULL);
}
/* Crypto is on, we are live. */
static struct io_plan *peer_crypto_on(struct io_conn *conn, struct peer *peer)
{
peer_secrets_init(peer);
peer_get_revocation_hash(peer, 0, &peer->us.next_revocation_hash);
assert(peer->state == STATE_INIT);
/* Using queue_cmd is overkill here, but it works. */
queue_cmd(peer, do_anchor_offer, NULL);
return io_duplex(conn,
peer_read_packet(conn, peer, pkt_in),
pkt_out(conn, peer));
}
static void destroy_peer(struct peer *peer)
{
if (peer->conn)
io_close(peer->conn);
list_del_from(&peer->dstate->peers, &peer->list);
}
static void peer_disconnect(struct io_conn *conn, struct peer *peer)
{
const struct bitcoin_tx *broadcast;
log_info(peer->log, "Disconnected");
/* No longer connected. */
peer->conn = NULL;
/* Not even set up yet? Simply free.*/
if (peer->state == STATE_INIT) {
tal_free(peer);
return;
}
/* FIXME: Try to reconnect. */
if (peer->cond == PEER_CLOSED)
return;
state(peer, INPUT_CONNECTION_LOST, NULL, &broadcast);
if (broadcast) {
struct sha256_double txid;
bitcoin_txid(broadcast, &txid);
/* FIXME: log_struct */
log_debug(peer->log, "INPUT_CONN_LOST: tx %02x%02x%02x%02x...",
txid.sha.u.u8[0], txid.sha.u.u8[1],
txid.sha.u.u8[2], txid.sha.u.u8[3]);
bitcoind_send_tx(peer->dstate, broadcast);
}
}
static struct peer *new_peer(struct lightningd_state *dstate,
struct io_conn *conn,
int addr_type, int addr_protocol,
enum state_input offer_anchor,
const char *in_or_out)
{
struct peer *peer = tal(dstate, struct peer);
assert(offer_anchor == CMD_OPEN_WITH_ANCHOR
|| offer_anchor == CMD_OPEN_WITHOUT_ANCHOR);
/* FIXME: Stop listening if too many peers? */
list_add(&dstate->peers, &peer->list);
peer->state = STATE_INIT;
peer->cond = PEER_CMD_OK;
peer->dstate = dstate;
peer->addr.type = addr_type;
peer->addr.protocol = addr_protocol;
peer->io_data = NULL;
peer->secrets = NULL;
list_head_init(&peer->watches);
peer->outpkt = tal_arr(peer, struct out_pkt, 0);
peer->curr_cmd.cmd = INPUT_NONE;
list_head_init(&peer->pending_cmd);
peer->commit_tx_counter = 0;
peer->close_watch_timeout = NULL;
peer->anchor.watches = NULL;
peer->cur_commit.watch = NULL;
peer->closing.their_sig = NULL;
peer->cleared = INPUT_NONE;
/* Make it different from other node (to catch bugs!), but a
* round number for simple eyeballing. */
peer->htlc_id_counter = pseudorand(1ULL << 32) * 1000;
/* If we free peer, conn should be closed, but can't be freed
* immediately so don't make peer a parent. */
peer->conn = conn;
io_set_finish(conn, peer_disconnect, peer);
peer->us.offer_anchor = offer_anchor;
if (!seconds_to_rel_locktime(dstate->config.rel_locktime,
&peer->us.locktime))
fatal("Invalid locktime configuration %u",
dstate->config.rel_locktime);
peer->us.mindepth = dstate->config.anchor_confirms;
peer->us.commit_fee_rate = dstate->config.commitment_fee_rate;
peer->us.commit = peer->them.commit = NULL;
peer->us.staging_cstate = peer->them.staging_cstate = NULL;
/* FIXME: Attach IO logging for this peer. */
tal_add_destructor(peer, destroy_peer);
peer->addr.addrlen = sizeof(peer->addr.saddr);
if (getpeername(io_conn_fd(conn), &peer->addr.saddr.s,
&peer->addr.addrlen) != 0) {
log_unusual(dstate->base_log,
"Could not get address for peer: %s",
strerror(errno));
return tal_free(peer);
}
peer->log = new_log(peer, dstate->log_record, "%s%s:%s:",
log_prefix(dstate->base_log), in_or_out,
netaddr_name(peer, &peer->addr));
return peer;
}
static struct io_plan *peer_connected_out(struct io_conn *conn,
struct lightningd_state *dstate,
struct json_connecting *connect)
{
/* Initiator currently funds channel */
struct peer *peer = new_peer(dstate, conn, SOCK_STREAM, IPPROTO_TCP,
CMD_OPEN_WITH_ANCHOR, "out");
if (!peer) {
command_fail(connect->cmd, "Failed to make peer for %s:%s",
connect->name, connect->port);
return io_close(conn);
}
log_info(peer->log, "Connected out to %s:%s",
connect->name, connect->port);
peer->anchor.satoshis = connect->satoshis;
command_success(connect->cmd, null_response(connect));
return peer_crypto_setup(conn, peer, peer_crypto_on);
}
static struct io_plan *peer_connected_in(struct io_conn *conn,
struct lightningd_state *dstate)
{
struct peer *peer = new_peer(dstate, conn, SOCK_STREAM, IPPROTO_TCP,
CMD_OPEN_WITHOUT_ANCHOR, "in");
if (!peer)
return io_close(conn);
log_info(peer->log, "Peer connected in");
return peer_crypto_setup(conn, peer, peer_crypto_on);
}
static int make_listen_fd(struct lightningd_state *dstate,
int domain, void *addr, socklen_t len)
{
int fd = socket(domain, SOCK_STREAM, 0);
if (fd < 0) {
log_debug(dstate->base_log, "Failed to create %u socket: %s",
domain, strerror(errno));
return -1;
}
if (!addr || bind(fd, addr, len) == 0) {
if (listen(fd, 5) == 0)
return fd;
log_unusual(dstate->base_log,
"Failed to listen on %u socket: %s",
domain, strerror(errno));
} else
log_debug(dstate->base_log, "Failed to bind on %u socket: %s",
domain, strerror(errno));
close_noerr(fd);
return -1;
}
void setup_listeners(struct lightningd_state *dstate, unsigned int portnum)
{
struct sockaddr_in addr;
struct sockaddr_in6 addr6;
socklen_t len;
int fd1, fd2;
u16 listen_port;
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = INADDR_ANY;
addr.sin_port = htons(portnum);
addr6.sin6_family = AF_INET6;
addr6.sin6_addr = in6addr_any;
addr6.sin6_port = htons(portnum);
/* IPv6, since on Linux that (usually) binds to IPv4 too. */
fd1 = make_listen_fd(dstate, AF_INET6, portnum ? &addr6 : NULL,
sizeof(addr6));
if (fd1 >= 0) {
struct sockaddr_in6 in6;
len = sizeof(in6);
if (getsockname(fd1, (void *)&in6, &len) != 0) {
log_unusual(dstate->base_log,
"Failed get IPv6 sockname: %s",
strerror(errno));
close_noerr(fd1);
} else {
addr.sin_port = in6.sin6_port;
listen_port = ntohs(addr.sin_port);
log_info(dstate->base_log,
"Creating IPv6 listener on port %u",
listen_port);
io_new_listener(dstate, fd1, peer_connected_in, dstate);
}
}
/* Just in case, aim for the same port... */
fd2 = make_listen_fd(dstate, AF_INET,
addr.sin_port ? &addr : NULL, sizeof(addr));
if (fd2 >= 0) {
len = sizeof(addr);
if (getsockname(fd2, (void *)&addr, &len) != 0) {
log_unusual(dstate->base_log,
"Failed get IPv4 sockname: %s",
strerror(errno));
close_noerr(fd2);
} else {
listen_port = ntohs(addr.sin_port);
log_info(dstate->base_log,
"Creating IPv4 listener on port %u",
listen_port);
io_new_listener(dstate, fd2, peer_connected_in, dstate);
}
}
if (fd1 < 0 && fd2 < 0)
fatal("Could not bind to a network address");
}
static void peer_failed(struct lightningd_state *dstate,
struct json_connecting *connect)
{
/* FIXME: Better diagnostics! */
command_fail(connect->cmd, "Failed to connect to peer %s:%s",
connect->name, connect->port);
}
static void json_connect(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct json_connecting *connect;
jsmntok_t *host, *port, *satoshis;
if (!json_get_params(buffer, params,
"host", &host,
"port", &port,
"satoshis", &satoshis,
NULL)) {
command_fail(cmd, "Need host, port and satoshis");
return;
}
connect = tal(cmd, struct json_connecting);
connect->cmd = cmd;
connect->name = tal_strndup(connect, buffer + host->start,
host->end - host->start);
connect->port = tal_strndup(connect, buffer + port->start,
port->end - port->start);
if (!json_tok_u64(buffer, satoshis, &connect->satoshis))
command_fail(cmd, "'%.*s' is not a valid number",
(int)(satoshis->end - satoshis->start),
buffer + satoshis->start);
if (!dns_resolve_and_connect(cmd->dstate, connect->name, connect->port,
peer_connected_out, peer_failed, connect)) {
command_fail(cmd, "DNS failed");
return;
}
}
const struct json_command connect_command = {
"connect",
json_connect,
"Connect to a {host} at {port} offering anchor of {satoshis}",
"Returns an empty result on success"
};
struct anchor_watch {
struct peer *peer;
enum state_input depthok;
enum state_input timeout;
enum state_input unspent;
enum state_input theyspent;
enum state_input otherspent;
/* If timeout != INPUT_NONE, this is the timer. */
struct oneshot *timer;
};
static void anchor_depthchange(struct peer *peer, int depth,
const struct sha256_double *blkhash,
struct anchor_watch *w)
{
/* Still waiting for it to reach depth? */
if (w->depthok != INPUT_NONE) {
/* Beware sign! */
if (depth >= (int)peer->us.mindepth) {
enum state_input in = w->depthok;
w->depthok = INPUT_NONE;
/* We don't need the timeout timer any more. */
w->timer = tal_free(w->timer);
state_event(peer, in, NULL);
}
} else {
if (depth < 0 && w->unspent != INPUT_NONE) {
enum state_input in = w->unspent;
w->unspent = INPUT_NONE;
state_event(peer, in, NULL);
}
}
}
/* We don't compare scriptSigs: we don't know them anyway! */
static bool txmatch(const struct bitcoin_tx *txa, const struct bitcoin_tx *txb)
{
size_t i;
if (txa->version != txb->version
|| txa->input_count != txb->input_count
|| txa->output_count != txb->output_count
|| txa->lock_time != txb->lock_time)
return false;
for (i = 0; i < txa->input_count; i++) {
if (!structeq(&txa->input[i].txid, &txb->input[i].txid)
|| txa->input[i].index != txb->input[i].index
|| txa->input[i].sequence_number != txb->input[i].sequence_number)
return false;
}
for (i = 0; i < txa->output_count; i++) {
if (txa->output[i].amount != txb->output[i].amount
|| txa->output[i].script_length != txb->output[i].script_length
|| memcmp(txa->output[i].script, txb->output[i].script,
txa->output[i].script_length != 0))
return false;
}
return true;
}
/* We may have two possible "current" commits; this loop will check them both. */
static bool is_unrevoked_commit(const struct commit_info *ci,
const struct bitcoin_tx *tx)
{
while (ci && !ci->revocation_preimage) {
if (txmatch(ci->tx, tx))
return true;
ci = ci->prev;
}
return false;
}
/* A mutual close is a simple 2 output p2sh to the final addresses, but
* without knowing fee we can't determine order, so examine each output. */
static bool is_mutual_close(const struct peer *peer,
const struct bitcoin_tx *tx)
{
const u8 *ctx, *our_p2sh, *their_p2sh;
bool matches;
if (tx->output_count != 2)
return false;
if (!is_p2sh(tx->output[0].script, tx->output[0].script_length)
|| !is_p2sh(tx->output[1].script, tx->output[1].script_length))
return false;
/* FIXME: Cache these! */
ctx = tal(NULL, u8);
our_p2sh = scriptpubkey_p2sh(ctx,
bitcoin_redeem_single(tx, &peer->us.finalkey));
their_p2sh = scriptpubkey_p2sh(ctx,
bitcoin_redeem_single(tx, &peer->them.finalkey));
matches =
(memcmp(tx->output[0].script, our_p2sh, tal_count(our_p2sh)) == 0
&& memcmp(tx->output[1].script, their_p2sh, tal_count(their_p2sh)) == 0)
|| (memcmp(tx->output[0].script, their_p2sh, tal_count(their_p2sh)) == 0
&& memcmp(tx->output[1].script, our_p2sh, tal_count(our_p2sh)) == 0);
tal_free(ctx);
return matches;
}
static void close_depth_cb(struct peer *peer, int depth)
{
if (depth >= peer->dstate->config.forever_confirms) {
state_event(peer, BITCOIN_CLOSE_DONE, NULL);
}
}
/* We assume the tx is valid! Don't do a blockchain.info and feed this
* invalid transactions! */
static void anchor_spent(struct peer *peer,
const struct bitcoin_tx *tx,
struct anchor_watch *w)
{
union input idata;
/* FIXME: change type in idata? */
idata.btc = (struct bitcoin_event *)tx;
if (is_unrevoked_commit(peer->them.commit, tx))
state_event(peer, w->theyspent, &idata);
else if (is_mutual_close(peer, tx))
add_close_tx_watch(peer, peer, tx, close_depth_cb);
else
state_event(peer, w->otherspent, &idata);
}
static void anchor_timeout(struct anchor_watch *w)
{
assert(w == w->peer->anchor.watches);
state_event(w->peer, w->timeout, NULL);
/* Freeing this gets rid of the other watches, and timer, too. */
w->peer->anchor.watches = tal_free(w);
}
void peer_watch_anchor(struct peer *peer,
enum state_input depthok,
enum state_input timeout,
enum state_input unspent,
enum state_input theyspent,
enum state_input otherspent)
{
struct anchor_watch *w;
w = peer->anchor.watches = tal(peer, struct anchor_watch);
w->peer = peer;
w->depthok = depthok;
w->timeout = timeout;
w->unspent = unspent;
w->theyspent = theyspent;
w->otherspent = otherspent;
add_anchor_watch(w, peer, &peer->anchor.txid, peer->anchor.index,
anchor_depthchange,
anchor_spent,
w);
/* For anchor timeout, expect 20 minutes per block, +2 hours.
*
* Probability(no block in time N) = e^(-N/600).
* Thus for 1 block, P = e^(-(7200+1*1200)/600) = 0.83 in a million.
*
* <NAME> says, if we want to know how many 10-minute intervals for
* a 1 in a million chance of spurious failure for N blocks, put
* this into http://www.wolframalpha.com:
*
* e^(-x) * sum x^i / fact(i), i=0 to N < 1/1000000
*
* N=20: 51
* N=10: 35
* N=8: 31
* N=6: 28
* N=4: 24
* N=3: 22
* N=2: 20
*
* So, our formula of 12 + N*2 holds for N <= 20 at least.
*/
if (w->timeout != INPUT_NONE) {
w->timer = oneshot_timeout(peer->dstate, w,
7200 + 20*peer->us.mindepth,
anchor_timeout, w);
} else
w->timer = NULL;
}
void peer_unwatch_anchor_depth(struct peer *peer,
enum state_input depthok,
enum state_input timeout)
{
assert(peer->anchor.watches);
peer->anchor.watches = tal_free(peer->anchor.watches);
}
static void commit_tx_depth(struct peer *peer, int depth,
const struct sha256_double *blkhash,
ptrint_t *canspend)
{
log_debug(peer->log, "Commit tx reached depth %i", depth);
/* FIXME: Handle locktime in blocks, as well as seconds! */
/* Fell out of a block? */
if (depth < 0) {
/* Forget any old block. */
peer->cur_commit.start_time = 0;
memset(&peer->cur_commit.blockid, 0xFF,
sizeof(peer->cur_commit.blockid));
return;
}
/* In a new block? */
if (!structeq(blkhash, &peer->cur_commit.blockid)) {
peer->cur_commit.start_time = 0;
peer->cur_commit.blockid = *blkhash;
bitcoind_get_mediantime(peer->dstate, blkhash,
&peer->cur_commit.start_time);
return;
}
/* Don't yet know the median start time? */
if (!peer->cur_commit.start_time)
return;
/* FIXME: We should really use bitcoin time here. */
if (controlled_time().ts.tv_sec > peer->cur_commit.start_time
+ rel_locktime_to_seconds(&peer->them.locktime)) {
/* Free this watch; we're done */
peer->cur_commit.watch = tal_free(peer->cur_commit.watch);
state_event(peer, ptr2int(canspend), NULL);
}
}
/* FIXME: We tell bitcoind to watch all the outputs, which is overkill */
static void watch_tx_outputs(struct peer *peer, const struct bitcoin_tx *tx)
{
varint_t i;
for (i = 0; i < tx->output_count; i++) {
struct ripemd160 redeemhash;
if (!is_p2sh(tx->output[i].script, tx->output[i].script_length))
fatal("Unexpected non-p2sh output");
memcpy(&redeemhash, tx->output[i].script+2, sizeof(redeemhash));
bitcoind_watch_addr(peer->dstate, &redeemhash);
}
}
/* Watch the commit tx until our side is spendable. */
void peer_watch_delayed(struct peer *peer,
const struct bitcoin_tx *tx,
enum state_input canspend)
{
struct sha256_double txid;
/* We only ever spend the last one. */
assert(tx == peer->us.commit->tx);
bitcoin_txid(tx, &txid);
memset(&peer->cur_commit.blockid, 0xFF,
sizeof(peer->cur_commit.blockid));
peer->cur_commit.watch
= add_commit_tx_watch(tx, peer, &txid, commit_tx_depth,
int2ptr(canspend));
watch_tx_outputs(peer, tx);
}
static void spend_tx_done(struct peer *peer, int depth,
const struct sha256_double *blkhash,
ptrint_t *done)
{
log_debug(peer->log, "tx reached depth %i", depth);
if (depth >= (int)peer->dstate->config.forever_confirms)
state_event(peer, ptr2int(done), NULL);
}
uint64_t commit_tx_fee(const struct bitcoin_tx *commit, uint64_t anchor_satoshis)
{
uint64_t i, total = 0;
for (i = 0; i < commit->output_count; i++)
total += commit->output[i].amount;
assert(anchor_satoshis >= total);
return anchor_satoshis - total;
}
/* Watch this tx until it's buried enough to be forgotten. */
void peer_watch_tx(struct peer *peer,
const struct bitcoin_tx *tx,
enum state_input done)
{
struct sha256_double txid;
bitcoin_txid(tx, &txid);
log_debug(peer->log, "Watching tx %02x%02x%02x%02x...",
txid.sha.u.u8[0],
txid.sha.u.u8[1],
txid.sha.u.u8[2],
txid.sha.u.u8[3]);
add_commit_tx_watch(tx, peer, &txid, spend_tx_done, int2ptr(done));
}
struct bitcoin_tx *peer_create_close_tx(struct peer *peer, u64 fee)
{
struct channel_state cstate;
/* We don't need a deep copy here, just fee levels. */
cstate = *peer->us.staging_cstate;
if (!force_fee(&cstate, fee)) {
log_unusual(peer->log,
"peer_create_close_tx: can't afford fee %"PRIu64,
fee);
return NULL;
}
log_debug(peer->log,
"creating close-tx with fee %"PRIu64": to %02x%02x%02x%02x/%02x%02x%02x%02x, amounts %u/%u",
fee,
peer->us.finalkey.der[0], peer->us.finalkey.der[1],
peer->us.finalkey.der[2], peer->us.finalkey.der[3],
peer->them.finalkey.der[0], peer->them.finalkey.der[1],
peer->them.finalkey.der[2], peer->them.finalkey.der[3],
cstate.a.pay_msat / 1000,
cstate.b.pay_msat / 1000);
return create_close_tx(peer->dstate->secpctx, peer,
&peer->us.finalkey,
&peer->them.finalkey,
&peer->anchor.txid,
peer->anchor.index,
peer->anchor.satoshis,
cstate.a.pay_msat / 1000,
cstate.b.pay_msat / 1000);
}
void peer_calculate_close_fee(struct peer *peer)
{
/* Use actual worst-case length of close tx: based on BOLT#02's
* commitment tx numbers, but only 1 byte for output count */
const uint64_t txsize = 41 + 221 + 10 + 32 + 32;
uint64_t maxfee;
/* FIXME: Dynamic fee */
peer->closing.our_fee
= fee_by_feerate(txsize, peer->dstate->config.closing_fee_rate);
/* BOLT #2:
* The sender MUST set `close_fee` lower than or equal to the
* fee of the final commitment transaction and MUST set
* `close_fee` to an even number of satoshis.
*/
maxfee = commit_tx_fee(peer->us.commit->tx, peer->anchor.satoshis);
if (peer->closing.our_fee > maxfee) {
/* This shouldn't happen: we never accept a commit fee
* less than the min_rate, which is greater than the
* closing_fee_rate. Also, our txsize estimate for
* the closing tx is 2 bytes smaller than the commitment tx. */
log_unusual(peer->log,
"Closing fee %"PRIu64" exceeded commit fee %"PRIu64", reducing.",
peer->closing.our_fee, maxfee);
peer->closing.our_fee = maxfee;
/* This can happen if actual commit txfee is odd. */
if (peer->closing.our_fee & 1)
peer->closing.our_fee--;
}
assert(!(peer->closing.our_fee & 1));
}
bool peer_has_close_sig(const struct peer *peer)
{
return peer->closing.their_sig;
}
static void send_close_timeout(struct peer *peer)
{
/* FIXME: Remove any close_tx watches! */
state_event(peer, INPUT_CLOSE_COMPLETE_TIMEOUT, NULL);
}
void peer_watch_close(struct peer *peer,
enum state_input done, enum state_input timedout)
{
/* We save some work by assuming these. */
assert(done == BITCOIN_CLOSE_DONE);
/* FIXME: We can't send CLOSE, so timeout immediately */
if (!peer->conn) {
assert(timedout == INPUT_CLOSE_COMPLETE_TIMEOUT);
oneshot_timeout(peer->dstate, peer, 0,
send_close_timeout, peer);
return;
}
/* Give them a reasonable time to respond. */
/* FIXME: config? */
if (timedout != INPUT_NONE) {
assert(timedout == INPUT_CLOSE_COMPLETE_TIMEOUT);
peer->close_watch_timeout
= oneshot_timeout(peer->dstate, peer, 120,
send_close_timeout, peer);
}
/* anchor_spent will get called, we match against close_tx there. */
}
void peer_unwatch_close_timeout(struct peer *peer, enum state_input timedout)
{
assert(peer->close_watch_timeout);
peer->close_watch_timeout = tal_free(peer->close_watch_timeout);
}
bool peer_watch_our_htlc_outputs(struct peer *peer,
const struct bitcoin_tx *tx,
enum state_input tous_timeout,
enum state_input tothem_spent,
enum state_input tothem_timeout)
{
if (committed_to_htlcs(peer))
FIXME_STUB(peer);
return false;
}
bool peer_watch_their_htlc_outputs(struct peer *peer,
const struct bitcoin_event *tx,
enum state_input tous_timeout,
enum state_input tothem_spent,
enum state_input tothem_timeout)
{
FIXME_STUB(peer);
}
void peer_unwatch_htlc_output(struct peer *peer,
const struct htlc *htlc,
enum state_input all_done)
{
FIXME_STUB(peer);
}
void peer_unwatch_all_htlc_outputs(struct peer *peer)
{
FIXME_STUB(peer);
}
void peer_watch_htlc_spend(struct peer *peer,
const struct bitcoin_tx *tx,
const struct htlc *htlc,
enum state_input done)
{
/* FIXME! */
}
void peer_unwatch_htlc_spend(struct peer *peer,
const struct htlc *htlc,
enum state_input all_done)
{
FIXME_STUB(peer);
}
void peer_unexpected_pkt(struct peer *peer, const Pkt *pkt)
{
FIXME_STUB(peer);
}
/* An on-chain transaction revealed an R value. */
const struct htlc *peer_tx_revealed_r_value(struct peer *peer,
const struct bitcoin_event *btc)
{
FIXME_STUB(peer);
}
void peer_watch_htlcs_cleared(struct peer *peer,
enum state_input all_done)
{
assert(peer->cleared == INPUT_NONE);
assert(all_done != INPUT_NONE);
peer->cleared = all_done;
}
/* Create a bitcoin close tx, using last signature they sent. */
const struct bitcoin_tx *bitcoin_close(struct peer *peer)
{
struct bitcoin_tx *close_tx;
struct bitcoin_signature our_close_sig;
close_tx = peer_create_close_tx(peer, peer->closing.their_fee);
our_close_sig.stype = SIGHASH_ALL;
peer_sign_mutual_close(peer, close_tx, &our_close_sig.sig);
/* Complete the close_tx, using signatures. */
close_tx->input[0].script
= scriptsig_p2sh_2of2(close_tx,
peer->closing.their_sig,
&our_close_sig,
&peer->them.commitkey,
&peer->us.commitkey);
close_tx->input[0].script_length
= tal_count(close_tx->input[0].script);
return close_tx;
}
/* Create a bitcoin spend tx (to spend our commit's outputs) */
const struct bitcoin_tx *bitcoin_spend_ours(struct peer *peer)
{
u8 *redeemscript, *linear;
const struct bitcoin_tx *commit = peer->us.commit->tx;
struct bitcoin_signature sig;
struct bitcoin_tx *tx;
unsigned int p2sh_out;
/* The redeemscript for a commit tx is fairly complex. */
redeemscript = bitcoin_redeem_secret_or_delay(peer,
&peer->us.finalkey,
&peer->them.locktime,
&peer->them.finalkey,
&peer->us.commit->revocation_hash);
/* Now, create transaction to spend it. */
tx = bitcoin_tx(peer, 1, 1);
bitcoin_txid(commit, &tx->input[0].txid);
p2sh_out = find_p2sh_out(commit, redeemscript);
tx->input[0].index = p2sh_out;
tx->input[0].input_amount = commit->output[p2sh_out].amount;
tx->input[0].sequence_number = bitcoin_nsequence(&peer->them.locktime);
tx->output[0].amount = commit->output[p2sh_out].amount;
tx->output[0].script = scriptpubkey_p2sh(tx,
bitcoin_redeem_single(tx, &peer->us.finalkey));
tx->output[0].script_length = tal_count(tx->output[0].script);
/* Use signature, until we have fee. */
sig.stype = SIGHASH_ALL;
peer_sign_spend(peer, tx, redeemscript, &sig.sig);
tx->input[0].script = scriptsig_p2sh_secret(tx, NULL, 0, &sig,
redeemscript,
tal_count(redeemscript));
tx->input[0].script_length = tal_count(tx->input[0].script);
/* FIXME: Figure out length first, then calc fee! */
/* Now, calculate the fee, given length. */
/* FIXME: Dynamic fees! */
linear = linearize_tx(peer, tx);
tx->fee = fee_by_feerate(tal_count(linear),
peer->dstate->config.closing_fee_rate);
tal_free(linear);
/* FIXME: Fail gracefully in these cases (not worth collecting) */
if (tx->fee > tx->output[0].amount
|| is_dust_amount(tx->output[0].amount - tx->fee))
fatal("Amount of %"PRIu64" won't cover fee %"PRIu64,
tx->output[0].amount, tx->fee);
/* Re-sign with the real values. */
tx->input[0].script_length = 0;
tx->output[0].amount -= tx->fee;
peer_sign_spend(peer, tx, redeemscript, &sig.sig);
tx->input[0].script = scriptsig_p2sh_secret(tx, NULL, 0, &sig,
redeemscript,
tal_count(redeemscript));
tx->input[0].script_length = tal_count(tx->input[0].script);
return tx;
}
/* Create a bitcoin spend tx (to spend their commit's outputs) */
const struct bitcoin_tx *bitcoin_spend_theirs(const struct peer *peer,
const struct bitcoin_event *btc)
{
FIXME_STUB(peer);
}
/* Create a bitcoin steal tx (to steal all their commit's outputs) */
const struct bitcoin_tx *bitcoin_steal(const struct peer *peer,
struct bitcoin_event *btc)
{
FIXME_STUB(peer);
}
/* Sign and return our commit tx */
const struct bitcoin_tx *bitcoin_commit(struct peer *peer)
{
struct bitcoin_signature sig;
/* Can't be signed already! */
assert(peer->us.commit->tx->input[0].script_length == 0);
sig.stype = SIGHASH_ALL;
peer_sign_ourcommit(peer, peer->us.commit->tx, &sig.sig);
peer->us.commit->tx->input[0].script
= scriptsig_p2sh_2of2(peer->us.commit->tx,
peer->us.commit->sig,
&sig,
&peer->them.commitkey,
&peer->us.commitkey);
peer->us.commit->tx->input[0].script_length
= tal_count(peer->us.commit->tx->input[0].script);
return peer->us.commit->tx;
}
/* Create a HTLC refund collection */
const struct bitcoin_tx *bitcoin_htlc_timeout(const struct peer *peer,
const struct htlc *htlc)
{
FIXME_STUB(peer);
}
/* Create a HTLC collection */
const struct bitcoin_tx *bitcoin_htlc_spend(const struct peer *peer,
const struct htlc *htlc)
{
FIXME_STUB(peer);
}
static void created_anchor(struct lightningd_state *dstate,
const struct bitcoin_tx *tx,
struct peer *peer)
{
bitcoin_txid(tx, &peer->anchor.txid);
peer->anchor.index = find_p2sh_out(tx, peer->anchor.redeemscript);
assert(peer->anchor.satoshis == tx->output[peer->anchor.index].amount);
/* We'll need this later, when we're told to broadcast it. */
peer->anchor.tx = tal_steal(peer, tx);
state_event(peer, BITCOIN_ANCHOR_CREATED, NULL);
}
/* Start creation of the bitcoin anchor tx. */
void bitcoin_create_anchor(struct peer *peer, enum state_input done)
{
struct sha256 h;
struct ripemd160 redeemhash;
char *p2shaddr;
/* We must be offering anchor for us to try creating it */
assert(peer->us.offer_anchor);
sha256(&h, peer->anchor.redeemscript,
tal_count(peer->anchor.redeemscript));
ripemd160(&redeemhash, h.u.u8, sizeof(h));
p2shaddr = p2sh_to_base58(peer, peer->dstate->config.testnet,
&redeemhash);
assert(done == BITCOIN_ANCHOR_CREATED);
bitcoind_create_payment(peer->dstate, p2shaddr, peer->anchor.satoshis,
created_anchor, peer);
}
/* We didn't end up broadcasting the anchor: release the utxos.
* If done != INPUT_NONE, remove existing create_anchor too. */
void bitcoin_release_anchor(struct peer *peer, enum state_input done)
{
/* FIXME: stop bitcoind command */
log_unusual(peer->log, "Anchor not spent, please -zapwallettxs");
}
/* Get the bitcoin anchor tx. */
const struct bitcoin_tx *bitcoin_anchor(struct peer *peer)
{
return peer->anchor.tx;
}
/* Sets up the initial cstate and commit tx for both nodes: false if
* insufficient funds. */
bool setup_first_commit(struct peer *peer)
{
assert(!peer->us.commit->tx);
assert(!peer->them.commit->tx);
/* Revocation hashes already filled in, from pkt_open */
peer->us.commit->cstate = initial_funding(peer,
peer->us.offer_anchor
== CMD_OPEN_WITH_ANCHOR,
peer->anchor.satoshis,
peer->us.commit_fee_rate);
if (!peer->us.commit->cstate)
return false;
peer->them.commit->cstate = initial_funding(peer,
peer->them.offer_anchor
== CMD_OPEN_WITH_ANCHOR,
peer->anchor.satoshis,
peer->them.commit_fee_rate);
if (!peer->them.commit->cstate)
return false;
peer->us.commit->tx = create_commit_tx(peer->us.commit,
&peer->us.finalkey,
&peer->them.finalkey,
&peer->them.locktime,
&peer->anchor.txid,
peer->anchor.index,
peer->anchor.satoshis,
&peer->us.commit->revocation_hash,
peer->us.commit->cstate);
peer->them.commit->tx = create_commit_tx(peer->them.commit,
&peer->them.finalkey,
&peer->us.finalkey,
&peer->us.locktime,
&peer->anchor.txid,
peer->anchor.index,
peer->anchor.satoshis,
&peer->them.commit->revocation_hash,
peer->them.commit->cstate);
peer->us.staging_cstate = copy_funding(peer, peer->us.commit->cstate);
peer->them.staging_cstate = copy_funding(peer, peer->them.commit->cstate);
return true;
}
static void json_add_abstime(struct json_result *response,
const char *id,
const struct abs_locktime *t)
{
json_object_start(response, id);
if (abs_locktime_is_seconds(t))
json_add_num(response, "second", abs_locktime_to_seconds(t));
else
json_add_num(response, "block", abs_locktime_to_blocks(t));
json_object_end(response);
}
static void json_add_htlcs(struct json_result *response,
const char *id,
const struct channel_oneside *side)
{
size_t i;
json_array_start(response, id);
for (i = 0; i < tal_count(side->htlcs); i++) {
json_object_start(response, NULL);
json_add_u64(response, "msatoshis", side->htlcs[i].msatoshis);
json_add_abstime(response, "expiry", &side->htlcs[i].expiry);
json_add_hex(response, "rhash",
&side->htlcs[i].rhash,
sizeof(side->htlcs[i].rhash));
json_object_end(response);
}
json_array_end(response);
}
/* This is money we can count on. */
static const struct channel_state *last_signed_state(const struct commit_info *i)
{
while (i) {
if (i->sig)
return i->cstate;
i = i->prev;
}
return NULL;
}
/* FIXME: add history command which shows all prior and current commit txs */
/* FIXME: Somehow we should show running DNS lookups! */
/* FIXME: Show status of peers! */
static void json_getpeers(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *p;
struct json_result *response = new_json_result(cmd);
json_object_start(response, NULL);
json_array_start(response, "peers");
list_for_each(&cmd->dstate->peers, p, list) {
const struct channel_state *last;
json_object_start(response, NULL);
json_add_string(response, "name", log_prefix(p->log));
json_add_string(response, "state", state_name(p->state));
json_add_string(response, "cmd", input_name(p->curr_cmd.cmd));
/* This is only valid after crypto setup. */
if (p->state != STATE_INIT)
json_add_hex(response, "peerid",
p->id.der, sizeof(p->id.der));
/* FIXME: Report anchor. */
last = last_signed_state(p->us.commit);
if (!last) {
json_object_end(response);
continue;
}
json_add_num(response, "our_amount", last->a.pay_msat);
json_add_num(response, "our_fee", last->a.fee_msat);
json_add_num(response, "their_amount", last->b.pay_msat);
json_add_num(response, "their_fee", last->b.fee_msat);
json_add_htlcs(response, "our_htlcs", &last->a);
json_add_htlcs(response, "their_htlcs", &last->b);
/* Any changes since then? */
if (p->us.staging_cstate->changes != last->changes)
json_add_num(response, "staged_changes",
p->us.staging_cstate->changes
- last->changes);
json_object_end(response);
}
json_array_end(response);
json_object_end(response);
command_success(cmd, response);
}
const struct json_command getpeers_command = {
"getpeers",
json_getpeers,
"List the current peers",
"Returns a 'peers' array"
};
static void set_htlc_command(struct peer *peer,
struct command *jsoncmd,
enum state_input cmd,
const union htlc_staging *stage)
{
/* FIXME: memleak! */
/* FIXME: Get rid of struct htlc_progress */
struct htlc_progress *progress = tal(peer, struct htlc_progress);
progress->stage = *stage;
set_current_command(peer, cmd, progress, jsoncmd);
}
/* FIXME: Keep a timeout for each peer, in case they're unresponsive. */
/* FIXME: Make sure no HTLCs in any unrevoked commit tx are live. */
static void check_htlc_expiry(struct peer *peer, void *unused)
{
size_t i;
union htlc_staging stage;
stage.fail.fail = HTLC_FAIL;
/* Check their currently still-existing htlcs for expiry:
* We eliminate them from staging as we go. */
for (i = 0; i < tal_count(peer->them.staging_cstate->a.htlcs); i++) {
struct channel_htlc *htlc = &peer->them.staging_cstate->a.htlcs[i];
/* Not a seconds-based expiry? */
if (!abs_locktime_is_seconds(&htlc->expiry))
continue;
/* Not well-expired? */
if (controlled_time().ts.tv_sec - 30
< abs_locktime_to_seconds(&htlc->expiry))
continue;
stage.fail.id = htlc->id;
set_htlc_command(peer, NULL, CMD_SEND_HTLC_FAIL, &stage);
return;
}
}
static void htlc_expiry_timeout(struct peer *peer)
{
log_debug(peer->log, "Expiry timedout!");
queue_cmd(peer, check_htlc_expiry, NULL);
}
void peer_add_htlc_expiry(struct peer *peer,
const struct abs_locktime *expiry)
{
time_t when;
/* Add 30 seconds to be sure peers agree on timeout. */
when = abs_locktime_to_seconds(expiry) - controlled_time().ts.tv_sec;
when += 30;
oneshot_timeout(peer->dstate, peer, when, htlc_expiry_timeout, peer);
}
struct newhtlc {
struct channel_htlc htlc;
struct command *jsoncmd;
};
/* We do final checks just before we start command, as things may have
* changed. */
static void do_newhtlc(struct peer *peer, struct newhtlc *newhtlc)
{
struct channel_state *cstate;
union htlc_staging stage;
/* Now we can assign counter and guarantee uniqueness. */
newhtlc->htlc.id = peer->htlc_id_counter;
stage.add.add = HTLC_ADD;
stage.add.htlc = newhtlc->htlc;
/* BOLT #2:
*
* A node MUST NOT add a HTLC if it would result in it
* offering more than 1500 HTLCs in either commitment transaction.
*/
if (tal_count(peer->us.staging_cstate->a.htlcs) == 1500
|| tal_count(peer->them.staging_cstate->b.htlcs) == 1500) {
command_fail(newhtlc->jsoncmd, "Too many HTLCs");
}
/* BOLT #2:
*
* A node MUST NOT offer `amount_msat` it cannot pay for in
* both commitment transactions at the current `fee_rate`
*/
cstate = copy_funding(newhtlc, peer->them.staging_cstate);
if (!funding_b_add_htlc(cstate, newhtlc->htlc.msatoshis,
&newhtlc->htlc.expiry, &newhtlc->htlc.rhash,
newhtlc->htlc.id)) {
command_fail(newhtlc->jsoncmd,
"Cannot afford %"PRIu64
" milli-satoshis in their commit tx",
newhtlc->htlc.msatoshis);
return;
}
cstate = copy_funding(newhtlc, peer->us.staging_cstate);
if (!funding_a_add_htlc(cstate, newhtlc->htlc.msatoshis,
&newhtlc->htlc.expiry, &newhtlc->htlc.rhash,
newhtlc->htlc.id)) {
command_fail(newhtlc->jsoncmd,
"Cannot afford %"PRIu64
" milli-satoshis in our commit tx",
newhtlc->htlc.msatoshis);
return;
}
/* Make sure we never offer the same one twice. */
peer->htlc_id_counter++;
/* FIXME: Never propose duplicate rvalues? */
set_htlc_command(peer, newhtlc->jsoncmd, CMD_SEND_HTLC_ADD, &stage);
}
static void json_newhtlc(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *peer;
jsmntok_t *peeridtok, *msatoshistok, *expirytok, *rhashtok;
unsigned int expiry;
struct newhtlc *newhtlc;
if (!json_get_params(buffer, params,
"peerid", &peeridtok,
"msatoshis", &msatoshistok,
"expiry", &expirytok,
"rhash", &rhashtok,
NULL)) {
command_fail(cmd, "Need peerid, msatoshis, expiry and rhash");
return;
}
peer = find_peer(cmd->dstate, buffer, peeridtok);
if (!peer) {
command_fail(cmd, "Could not find peer with that peerid");
return;
}
if (!peer->them.commit || !peer->them.commit->cstate) {
command_fail(cmd, "peer not fully established");
return;
}
/* Attach to cmd until it's complete. */
newhtlc = tal(cmd, struct newhtlc);
newhtlc->jsoncmd = cmd;
if (!json_tok_u64(buffer, msatoshistok, &newhtlc->htlc.msatoshis)) {
command_fail(cmd, "'%.*s' is not a valid number",
(int)(msatoshistok->end - msatoshistok->start),
buffer + msatoshistok->start);
return;
}
if (!json_tok_number(buffer, expirytok, &expiry)) {
command_fail(cmd, "'%.*s' is not a valid number",
(int)(expirytok->end - expirytok->start),
buffer + expirytok->start);
return;
}
if (!seconds_to_abs_locktime(expiry, &newhtlc->htlc.expiry)) {
command_fail(cmd, "'%.*s' is not a valid number",
(int)(expirytok->end - expirytok->start),
buffer + expirytok->start);
return;
}
if (abs_locktime_to_seconds(&newhtlc->htlc.expiry) <
controlled_time().ts.tv_sec + peer->dstate->config.min_expiry) {
command_fail(cmd, "HTLC expiry too soon!");
return;
}
if (abs_locktime_to_seconds(&newhtlc->htlc.expiry) >
controlled_time().ts.tv_sec + peer->dstate->config.max_expiry) {
command_fail(cmd, "HTLC expiry too far!");
return;
}
if (!hex_decode(buffer + rhashtok->start,
rhashtok->end - rhashtok->start,
&newhtlc->htlc.rhash,
sizeof(newhtlc->htlc.rhash))) {
command_fail(cmd, "'%.*s' is not a valid sha256 hash",
(int)(rhashtok->end - rhashtok->start),
buffer + rhashtok->start);
return;
}
queue_cmd(peer, do_newhtlc, newhtlc);
}
/* FIXME: Use HTLC ids, not r values! */
const struct json_command newhtlc_command = {
"newhtlc",
json_newhtlc,
"Offer {peerid} an HTLC worth {msatoshis} in {expiry} (in seconds since Jan 1 1970) with {rhash}",
"Returns an empty result on success"
};
struct fulfillhtlc {
struct command *jsoncmd;
struct sha256 r;
};
static void do_fullfill(struct peer *peer,
struct fulfillhtlc *fulfillhtlc)
{
struct sha256 rhash;
size_t i;
union htlc_staging stage;
stage.fulfill.fulfill = HTLC_FULFILL;
stage.fulfill.r = fulfillhtlc->r;
sha256(&rhash, &fulfillhtlc->r, sizeof(fulfillhtlc->r));
i = funding_find_htlc(&peer->them.staging_cstate->a, &rhash);
if (i == tal_count(peer->them.staging_cstate->a.htlcs)) {
command_fail(fulfillhtlc->jsoncmd, "preimage htlc not found");
return;
}
stage.fulfill.id = peer->them.staging_cstate->a.htlcs[i].id;
set_htlc_command(peer, fulfillhtlc->jsoncmd,
CMD_SEND_HTLC_FULFILL, &stage);
}
static void json_fulfillhtlc(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *peer;
jsmntok_t *peeridtok, *rtok;
struct fulfillhtlc *fulfillhtlc;
if (!json_get_params(buffer, params,
"peerid", &peeridtok,
"r", &rtok,
NULL)) {
command_fail(cmd, "Need peerid and r");
return;
}
peer = find_peer(cmd->dstate, buffer, peeridtok);
if (!peer) {
command_fail(cmd, "Could not find peer with that peerid");
return;
}
if (!peer->them.commit || !peer->them.commit->cstate) {
command_fail(cmd, "peer not fully established");
return;
}
fulfillhtlc = tal(cmd, struct fulfillhtlc);
fulfillhtlc->jsoncmd = cmd;
if (!hex_decode(buffer + rtok->start,
rtok->end - rtok->start,
&fulfillhtlc->r, sizeof(fulfillhtlc->r))) {
command_fail(cmd, "'%.*s' is not a valid sha256 preimage",
(int)(rtok->end - rtok->start),
buffer + rtok->start);
return;
}
queue_cmd(peer, do_fullfill, fulfillhtlc);
}
const struct json_command fulfillhtlc_command = {
"fulfillhtlc",
json_fulfillhtlc,
"Redeem htlc proposed by {peerid} using {r}",
"Returns an empty result on success"
};
struct failhtlc {
struct command *jsoncmd;
struct sha256 rhash;
};
static void do_failhtlc(struct peer *peer,
struct failhtlc *failhtlc)
{
size_t i;
union htlc_staging stage;
stage.fail.fail = HTLC_FAIL;
/* Look in peer->them.staging_cstate->a, as that's where we'll
* immediately remove it from: avoids double-handling. */
/* FIXME: Make sure it's also committed in previous commit tx! */
i = funding_find_htlc(&peer->them.staging_cstate->a, &failhtlc->rhash);
if (i == tal_count(peer->them.staging_cstate->a.htlcs)) {
command_fail(failhtlc->jsoncmd, "htlc not found");
return;
}
stage.fail.id = peer->them.staging_cstate->a.htlcs[i].id;
set_htlc_command(peer, failhtlc->jsoncmd, CMD_SEND_HTLC_FAIL, &stage);
}
static void json_failhtlc(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *peer;
jsmntok_t *peeridtok, *rhashtok;
struct failhtlc *failhtlc;
if (!json_get_params(buffer, params,
"peerid", &peeridtok,
"rhash", &rhashtok,
NULL)) {
command_fail(cmd, "Need peerid and rhash");
return;
}
peer = find_peer(cmd->dstate, buffer, peeridtok);
if (!peer) {
command_fail(cmd, "Could not find peer with that peerid");
return;
}
if (!peer->them.commit || !peer->them.commit->cstate) {
command_fail(cmd, "peer not fully established");
return;
}
failhtlc = tal(cmd, struct failhtlc);
failhtlc->jsoncmd = cmd;
if (!hex_decode(buffer + rhashtok->start,
rhashtok->end - rhashtok->start,
&failhtlc->rhash, sizeof(failhtlc->rhash))) {
command_fail(cmd, "'%.*s' is not a valid sha256 preimage",
(int)(rhashtok->end - rhashtok->start),
buffer + rhashtok->start);
return;
}
queue_cmd(peer, do_failhtlc, failhtlc);
}
const struct json_command failhtlc_command = {
"failhtlc",
json_failhtlc,
"Fail htlc proposed by {peerid} which has redeem hash {rhash}",
"Returns an empty result on success"
};
static void do_commit(struct peer *peer, struct command *jsoncmd)
{
/* We can have changes we suggested, or changes they suggested. */
if (peer->them.staging_cstate->changes == peer->them.commit->cstate->changes) {
command_fail(jsoncmd, "no changes to commit");
return;
}
set_current_command(peer, CMD_SEND_COMMIT, NULL, jsoncmd);
}
static void json_commit(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *peer;
jsmntok_t *peeridtok;
if (!json_get_params(buffer, params,
"peerid", &peeridtok,
NULL)) {
command_fail(cmd, "Need peerid");
return;
}
peer = find_peer(cmd->dstate, buffer, peeridtok);
if (!peer) {
command_fail(cmd, "Could not find peer with that peerid");
return;
}
if (!peer->them.commit || !peer->them.commit->cstate) {
command_fail(cmd, "peer not fully established");
return;
}
queue_cmd(peer, do_commit, cmd);
}
const struct json_command commit_command = {
"commit",
json_commit,
"Commit all staged HTLC changes with {peerid}",
"Returns an empty result on success"
};
static void json_close(struct command *cmd,
const char *buffer, const jsmntok_t *params)
{
struct peer *peer;
jsmntok_t *peeridtok;
if (!json_get_params(buffer, params,
"peerid", &peeridtok,
NULL)) {
command_fail(cmd, "Need peerid");
return;
}
peer = find_peer(cmd->dstate, buffer, peeridtok);
if (!peer) {
command_fail(cmd, "Could not find peer with that peerid");
return;
}
if (peer->cond == PEER_CLOSING) {
command_fail(cmd, "Peer is already closing");
return;
}
/* Unlike other things, CMD_CLOSE is always valid. */
log_debug(peer->log, "Sending CMD_CLOSE");
state_event(peer, CMD_CLOSE, NULL);
command_success(cmd, null_response(cmd));
}
const struct json_command close_command = {
"close",
json_close,
"Close the channel with peer {peerid}",
"Returns an empty result on success"
};
|
G3G4X5X6/openrasp-iast
|
openrasp_iast/test/modules/preprocessor/conftest.py
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
Copyright 2017-2020 Baidu Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
import pytest
import signal
import psutil
import multiprocessing
import helper
from core import modules
from core.components.logger import Logger
from core.components.config import Config
from core.components.communicator import Communicator
@pytest.fixture(scope="module")
def preprocessor_fixture():
helper.reset_db()
Communicator()
Logger()
module_proc = modules.Process(modules.Preprocessor)
module_proc.start()
yield module_proc
root_proc = psutil.Process(module_proc.pid)
procs = root_proc.children(recursive=True)
try:
root_proc.terminate()
root_proc.wait(10)
module_proc.join(5)
for p in procs:
p.terminate()
p.wait(10)
except psutil.TimeoutExpired:
raise Exception("Module process may not be killed success!")
helper.reset_db()
Communicator.reset()
|
LambentClient/Lambent
|
src/minecraft/net/minecraft/client/gui/GuiSlider.java
|
<filename>src/minecraft/net/minecraft/client/gui/GuiSlider.java
package net.minecraft.client.gui;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.resources.I18n;
public class GuiSlider extends GuiButton
{
private float field_175227_p = 1.0F;
public boolean field_175228_o;
private String field_175226_q;
private final float field_175225_r;
private final float field_175224_s;
private final GuiPageButtonList.GuiResponder field_175223_t;
private GuiSlider.FormatHelper field_175222_u;
private static final String __OBFID = "CL_00001954";
public GuiSlider(GuiPageButtonList.GuiResponder p_i45541_1_, int p_i45541_2_, int p_i45541_3_, int p_i45541_4_, String p_i45541_5_, float p_i45541_6_, float p_i45541_7_, float p_i45541_8_, GuiSlider.FormatHelper p_i45541_9_)
{
super(p_i45541_2_, p_i45541_3_, p_i45541_4_, 150, 20, "");
this.field_175226_q = p_i45541_5_;
this.field_175225_r = p_i45541_6_;
this.field_175224_s = p_i45541_7_;
this.field_175227_p = (p_i45541_8_ - p_i45541_6_) / (p_i45541_7_ - p_i45541_6_);
this.field_175222_u = p_i45541_9_;
this.field_175223_t = p_i45541_1_;
this.displayString = this.func_175221_e();
}
public float func_175220_c()
{
return this.field_175225_r + (this.field_175224_s - this.field_175225_r) * this.field_175227_p;
}
public void func_175218_a(float p_175218_1_, boolean p_175218_2_)
{
this.field_175227_p = (p_175218_1_ - this.field_175225_r) / (this.field_175224_s - this.field_175225_r);
this.displayString = this.func_175221_e();
if (p_175218_2_)
{
this.field_175223_t.func_175320_a(this.id, this.func_175220_c());
}
}
public float func_175217_d()
{
return this.field_175227_p;
}
private String func_175221_e()
{
return this.field_175222_u == null ? I18n.format(this.field_175226_q, new Object[0]) + ": " + this.func_175220_c() : this.field_175222_u.func_175318_a(this.id, I18n.format(this.field_175226_q, new Object[0]), this.func_175220_c());
}
/**
* Returns 0 if the button is disabled, 1 if the mouse is NOT hovering over this button and 2 if it IS hovering over
* this button.
*/
protected int getHoverState(boolean mouseOver)
{
return 0;
}
/**
* Fired when the mouse button is dragged. Equivalent of MouseListener.mouseDragged(MouseEvent e).
*/
protected void mouseDragged(Minecraft mc, int mouseX, int mouseY)
{
if (this.visible)
{
if (this.field_175228_o)
{
this.field_175227_p = (float)(mouseX - (this.xPosition + 4)) / (float)(this.width - 8);
if (this.field_175227_p < 0.0F)
{
this.field_175227_p = 0.0F;
}
if (this.field_175227_p > 1.0F)
{
this.field_175227_p = 1.0F;
}
this.displayString = this.func_175221_e();
this.field_175223_t.func_175320_a(this.id, this.func_175220_c());
}
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
this.drawTexturedModalRect(this.xPosition + (int)(this.field_175227_p * (float)(this.width - 8)), this.yPosition, 0, 66, 4, 20);
this.drawTexturedModalRect(this.xPosition + (int)(this.field_175227_p * (float)(this.width - 8)) + 4, this.yPosition, 196, 66, 4, 20);
}
}
public void func_175219_a(float p_175219_1_)
{
this.field_175227_p = p_175219_1_;
this.displayString = this.func_175221_e();
this.field_175223_t.func_175320_a(this.id, this.func_175220_c());
}
/**
* Returns true if the mouse has been pressed on this control. Equivalent of MouseListener.mousePressed(MouseEvent
* e).
*/
public boolean mousePressed(Minecraft mc, int mouseX, int mouseY)
{
if (super.mousePressed(mc, mouseX, mouseY))
{
this.field_175227_p = (float)(mouseX - (this.xPosition + 4)) / (float)(this.width - 8);
if (this.field_175227_p < 0.0F)
{
this.field_175227_p = 0.0F;
}
if (this.field_175227_p > 1.0F)
{
this.field_175227_p = 1.0F;
}
this.displayString = this.func_175221_e();
this.field_175223_t.func_175320_a(this.id, this.func_175220_c());
this.field_175228_o = true;
return true;
}
else
{
return false;
}
}
/**
* Fired when the mouse button is released. Equivalent of MouseListener.mouseReleased(MouseEvent e).
*/
public void mouseReleased(int mouseX, int mouseY)
{
this.field_175228_o = false;
}
public interface FormatHelper
{
String func_175318_a(int var1, String var2, float var3);
}
}
|
VoterLin/Image-Dispose-Spring-Boot-Web-Project
|
src/main/java/com/felink/service/dispose/transitions/transitions/RectangleTransitions.java
|
<gh_stars>0
package com.felink.service.dispose.transitions.transitions;
public class RectangleTransitions extends AbstractTransitions {
private int minX, minY, maxX, maxY;
RectangleTransitions(String inputFile, String outputPath, int index) {
super(inputFile, null, outputPath, index);
}
public RectangleTransitions(String inputFile1, String inputFile2, String outputPath, int index) {
super(inputFile1, inputFile2, outputPath, index);
}
@Override
public int doTransform() {
int stepX = baseImage.getWidth() / frame;
int stepY = baseImage.getHeight() / frame;
int lenX = 0, lenY = 0;
for(int i = index; i < index + frame; i++) {
lenX += stepX;
lenY += stepY;
minX = (baseImage.getWidth() - lenX) / 2;
maxX = baseImage.getWidth() - minX;
minY = (baseImage.getHeight() - lenY) / 2;
maxY = baseImage.getHeight() - minY;
makeImage();
saveImage(i);
}
return index + frame;
}
@Override
void makeImage() {
for(int x = minX; x < maxX; x++) {
for(int y = minY; y < maxY; y++) {
baseImage.getOperateImage().setRGB(x, y, endImage.getRGB(x, y));
}
}
}
public static void main(String[] args) {
}
}
|
wix/petri
|
petri-server/src/main/java/com/wixpress/guineapig/spi/GuineaPigSpringConfigAddition.scala
|
<filename>petri-server/src/main/java/com/wixpress/guineapig/spi/GuineaPigSpringConfigAddition.scala<gh_stars>100-1000
package com.wixpress.guineapig.spi
import com.wixpress.guineapig.dto.SpecExposureIdViewDto
import com.wixpress.guineapig.entities.ui.UiSpecForScope
import com.wixpress.guineapig.entities.ui.UiSpecForScopeBuilder.anUiSpec
import com.wixpress.guineapig.topology.GuineapigDBTopology
import com.wixpress.petri.Main
import com.wixpress.petri.experiments.domain.ScopeDefinition
import com.wixpress.petri.petri.FullPetriClient
import org.springframework.context.annotation.{Bean, Configuration}
import scala.collection.JavaConversions._
@Configuration
class GuineaPigSpringConfigAddition {
@Bean
def petriClient: FullPetriClient = Main.rpcServer
// SPI BEANS
@Bean
private[guineapig] def hardCodedSpecsProvider: HardCodedScopesProvider = new HardCodedScopesProvider {
override def getHardCodedScopesList: java.util.List[ScopeDefinition] = List(new ScopeDefinition("publicUrl", false)).toList
override def getHardCodedScopes: Map[String, List[UiSpecForScope]] =
Map("publicUrl" -> List( anUiSpec().withScope("publicUrl").withForRegisteredUsers(false).build())) //TODO!!! must match above or consolidate...
}
@Bean
private[guineapig] def specExposureIdRetriever: SpecExposureIdRetriever = new SpecExposureIdRetriever {
override def getAll: java.util.List[SpecExposureIdViewDto] = seqAsJavaList(Seq())
}
@Bean
private[guineapig] def languageResolver: SupportedLanguagesProvider = new SupportedLanguagesProvider() {
def getSupportedLanguages: Set[String] = Set("en", "de", "es", "il")
}
@Bean
private[guineapig] def globalGroupsManagementService: GlobalGroupsManagementService =
new GlobalGroupsManagementService() {
def allGlobalGroups: Seq[String] = Seq.empty
}
@Bean
private[guineapig] def dataSourceTopology: GuineapigDBTopology = {
val gpDBTopology: GuineapigDBTopology = new GuineapigDBTopology
val ourServerDBConfig = Main.dbConfig()
gpDBTopology.url = ourServerDBConfig.url
gpDBTopology.username = ourServerDBConfig.username
gpDBTopology.password = ourServerDBConfig.password
gpDBTopology
}
}
|
DaanVanYperen/tox
|
core/src/net/mostlyoriginal/tox/system/PassiveSystem.java
|
<gh_stars>1-10
package net.mostlyoriginal.tox.system;
import com.artemis.Aspect;
import com.artemis.Entity;
import com.artemis.EntitySystem;
import com.artemis.utils.ImmutableBag;
/**
* @author <NAME>
*/
public class PassiveSystem extends EntitySystem {
public PassiveSystem() {
super(Aspect.getEmpty());
}
@Override
protected void processEntities(ImmutableBag<Entity> entities) {
}
@Override
protected boolean checkProcessing() {
return false;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.