text stringlengths 1 1.05M |
|---|
<filename>open-sphere-base/control-panels/src/main/java/io/opensphere/controlpanels/layers/availabledata/detail/TextDetailPane.java
package io.opensphere.controlpanels.layers.availabledata.detail;
import java.util.Collection;
import javafx.beans.property.Property;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.ScrollPane.ScrollBarPolicy;
import javafx.scene.control.TextArea;
import io.opensphere.controlpanels.DetailPane;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.collections.StreamUtilities;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.impl.GroupCategorizationUtilities;
/**
* A standard detail pane, in which text is displayed.
*/
public class TextDetailPane extends DetailPane
{
/**
* The text area in which the preview will be rendered.
*/
private final TextArea myTextArea;
/**
* The scroll pane wrapping the {@link #myTextArea}.
*/
private final ScrollPane myScrollPane;
/**
* Creates a new detail pane.
*
* @param pToolbox The toolbox through which system interactions occur.
*/
public TextDetailPane(Toolbox pToolbox)
{
super(pToolbox);
myTextArea = new TextArea();
myTextArea.autosize();
myTextArea.setWrapText(true);
myScrollPane = new ScrollPane(myTextArea);
myScrollPane.setVbarPolicy(ScrollBarPolicy.AS_NEEDED);
myScrollPane.setHbarPolicy(ScrollBarPolicy.NEVER);
myScrollPane.setFitToHeight(true);
myScrollPane.setFitToWidth(true);
// getChildren().add(myScrollPane);
setCenter(myScrollPane);
}
/**
* Creates a new detail pane bound to the supplied data provider.
*
* @param pToolbox The toolbox through which system interactions occur.
* @param dataSource the text data source to which the text area will be
* bound.
*/
public TextDetailPane(Toolbox pToolbox, Property<String> dataSource)
{
this(pToolbox);
myTextArea.textProperty().bind(dataSource);
}
/**
* Gets the property into which changes can be injected to reflect data
* updates.
*
* @return the property with which data changes can be bound.
*/
public Property<String> textProperty()
{
return myTextArea.textProperty();
}
/**
* {@inheritDoc}
*
* @see io.opensphere.controlpanels.DetailPane#populate(io.opensphere.mantle.data.DataGroupInfo)
*/
@Override
public void populate(DataGroupInfo pDataGroup)
{
String provider = pDataGroup.getTopParentDisplayName();
Collection<String> categories = StreamUtilities.map(GroupCategorizationUtilities.getGroupCategories(pDataGroup, false),
input -> StringUtilities.trim(input, 's'));
String type = StringUtilities.join(", ", categories);
String summary = pDataGroup.getSummaryDescription();
String value = StringUtilities.concat("Provider: ", provider, "\n", "Type: ", type, "\n\n", summary, "\n");
myTextArea.textProperty().setValue(value);
}
}
|
#!/bin/bash
#
# You can optionally specify the host by providing
# the IP address as an argument.
#
# E.g. ./rundev.sh 0.0.0.0
export FLASK_APP=graphene_arango/examples/example.py
export FLASK_ENV=development
if [ -z "$1" ]
then
flask run
else
flask run --host $1
fi
|
<gh_stars>1-10
from pypy.rpython.lltypesystem import lltype, llmemory, rffi, rclass, rstr
from pypy.rpython.lltypesystem.lloperation import llop
from pypy.rpython.llinterp import LLInterpreter
from pypy.rpython.annlowlevel import llhelper
from pypy.rlib.objectmodel import we_are_translated, specialize
from pypy.jit.metainterp import history
from pypy.jit.codewriter import heaptracker, longlong
from pypy.jit.backend.model import AbstractCPU
from pypy.jit.backend.llsupport import symbolic
from pypy.jit.backend.llsupport.symbolic import WORD, unroll_basic_sizes
from pypy.jit.backend.llsupport.descr import (
get_size_descr, get_field_descr, get_array_descr,
get_call_descr, get_interiorfield_descr, get_dynamic_interiorfield_descr,
FieldDescr, ArrayDescr, CallDescr, InteriorFieldDescr, get_dynamic_field_descr)
from pypy.jit.backend.llsupport.asmmemmgr import AsmMemoryManager
class AbstractLLCPU(AbstractCPU):
from pypy.jit.metainterp.typesystem import llhelper as ts
def __init__(self, rtyper, stats, opts, translate_support_code=False,
gcdescr=None):
assert type(opts) is not bool
self.opts = opts
from pypy.jit.backend.llsupport.gc import get_ll_description
AbstractCPU.__init__(self)
self.rtyper = rtyper
self.stats = stats
self.translate_support_code = translate_support_code
if translate_support_code:
translator = rtyper.annotator.translator
else:
translator = None
self.gc_ll_descr = get_ll_description(gcdescr, translator, rtyper)
if translator and translator.config.translation.gcremovetypeptr:
self.vtable_offset = None
else:
self.vtable_offset, _ = symbolic.get_field_token(rclass.OBJECT,
'typeptr',
translate_support_code)
self._setup_prebuilt_error('ovf', OverflowError)
self._setup_prebuilt_error('zer', ZeroDivisionError)
if translate_support_code:
self._setup_exception_handling_translated()
else:
self._setup_exception_handling_untranslated()
self.saved_exc_value = lltype.nullptr(llmemory.GCREF.TO)
self.asmmemmgr = AsmMemoryManager()
self.setup()
if translate_support_code:
self._setup_on_leave_jitted_translated()
else:
self._setup_on_leave_jitted_untranslated()
def setup(self):
pass
def _setup_prebuilt_error(self, prefix, Class):
if self.rtyper is not None: # normal case
bk = self.rtyper.annotator.bookkeeper
clsdef = bk.getuniqueclassdef(Class)
ll_inst = self.rtyper.exceptiondata.get_standard_ll_exc_instance(
self.rtyper, clsdef)
else:
# for tests, a random emulated ll_inst will do
ll_inst = lltype.malloc(rclass.OBJECT)
ll_inst.typeptr = lltype.malloc(rclass.OBJECT_VTABLE,
immortal=True)
setattr(self, '_%s_error_vtable' % prefix,
llmemory.cast_ptr_to_adr(ll_inst.typeptr))
setattr(self, '_%s_error_inst' % prefix, ll_inst)
def _setup_exception_handling_untranslated(self):
# for running un-translated only, all exceptions occurring in the
# llinterpreter are stored in '_exception_emulator', which is then
# read back by the machine code reading at the address given by
# pos_exception() and pos_exc_value().
_exception_emulator = lltype.malloc(rffi.CArray(lltype.Signed), 2,
zero=True, flavor='raw',
immortal=True)
self._exception_emulator = _exception_emulator
def _store_exception(lle):
self._last_exception = lle # keepalive
tp_i = rffi.cast(lltype.Signed, lle.args[0])
v_i = rffi.cast(lltype.Signed, lle.args[1])
_exception_emulator[0] = tp_i
_exception_emulator[1] = v_i
self.debug_ll_interpreter = LLInterpreter(self.rtyper)
self.debug_ll_interpreter._store_exception = _store_exception
def pos_exception():
return rffi.cast(lltype.Signed, _exception_emulator)
def pos_exc_value():
return (rffi.cast(lltype.Signed, _exception_emulator) +
rffi.sizeof(lltype.Signed))
def save_exception():
# copy from _exception_emulator to the real attributes on self
v_i = _exception_emulator[1]
_exception_emulator[0] = 0
_exception_emulator[1] = 0
self.saved_exc_value = rffi.cast(llmemory.GCREF, v_i)
def save_exception_memoryerr():
save_exception()
if not self.saved_exc_value:
self.saved_exc_value = "memoryerror!" # for tests
self.pos_exception = pos_exception
self.pos_exc_value = pos_exc_value
self.save_exception = save_exception
self.save_exception_memoryerr = save_exception_memoryerr
self.insert_stack_check = lambda: (0, 0, 0)
def _setup_exception_handling_translated(self):
def pos_exception():
addr = llop.get_exception_addr(llmemory.Address)
return heaptracker.adr2int(addr)
def pos_exc_value():
addr = llop.get_exc_value_addr(llmemory.Address)
return heaptracker.adr2int(addr)
def save_exception():
addr = llop.get_exception_addr(llmemory.Address)
addr.address[0] = llmemory.NULL
addr = llop.get_exc_value_addr(llmemory.Address)
exc_value = rffi.cast(llmemory.GCREF, addr.address[0])
addr.address[0] = llmemory.NULL
# from now on, the state is again consistent -- no more RPython
# exception is set. The following code produces a write barrier
# in the assignment to self.saved_exc_value, as needed.
self.saved_exc_value = exc_value
def save_exception_memoryerr():
from pypy.rpython.annlowlevel import cast_instance_to_base_ptr
save_exception()
if not self.saved_exc_value:
exc = MemoryError()
exc = cast_instance_to_base_ptr(exc)
exc = lltype.cast_opaque_ptr(llmemory.GCREF, exc)
self.saved_exc_value = exc
from pypy.rlib import rstack
STACK_CHECK_SLOWPATH = lltype.Ptr(lltype.FuncType([lltype.Signed],
lltype.Void))
def insert_stack_check():
endaddr = rstack._stack_get_end_adr()
lengthaddr = rstack._stack_get_length_adr()
f = llhelper(STACK_CHECK_SLOWPATH, rstack.stack_check_slowpath)
slowpathaddr = rffi.cast(lltype.Signed, f)
return endaddr, lengthaddr, slowpathaddr
self.pos_exception = pos_exception
self.pos_exc_value = pos_exc_value
self.save_exception = save_exception
self.save_exception_memoryerr = save_exception_memoryerr
self.insert_stack_check = insert_stack_check
def _setup_on_leave_jitted_untranslated(self):
# assume we don't need a backend leave in this case
self.on_leave_jitted_save_exc = self.save_exception
self.on_leave_jitted_memoryerr = self.save_exception_memoryerr
self.on_leave_jitted_noexc = lambda : None
def _setup_on_leave_jitted_translated(self):
on_leave_jitted_hook = self.get_on_leave_jitted_hook()
save_exception = self.save_exception
save_exception_memoryerr = self.save_exception_memoryerr
def on_leave_jitted_noexc():
on_leave_jitted_hook()
def on_leave_jitted_save_exc():
save_exception()
on_leave_jitted_hook()
def on_leave_jitted_memoryerr():
save_exception_memoryerr()
on_leave_jitted_hook()
self.on_leave_jitted_noexc = on_leave_jitted_noexc
self.on_leave_jitted_save_exc = on_leave_jitted_save_exc
self.on_leave_jitted_memoryerr = on_leave_jitted_memoryerr
def get_on_leave_jitted_hook(self):
return lambda : None
_ON_JIT_LEAVE_FUNC = lltype.Ptr(lltype.FuncType([], lltype.Void))
def get_on_leave_jitted_int(self, save_exception,
default_to_memoryerror=False):
if default_to_memoryerror:
f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_memoryerr)
elif save_exception:
f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_save_exc)
else:
f = llhelper(self._ON_JIT_LEAVE_FUNC, self.on_leave_jitted_noexc)
return rffi.cast(lltype.Signed, f)
def grab_exc_value(self):
exc = self.saved_exc_value
self.saved_exc_value = lltype.nullptr(llmemory.GCREF.TO)
return exc
def free_loop_and_bridges(self, compiled_loop_token):
AbstractCPU.free_loop_and_bridges(self, compiled_loop_token)
blocks = compiled_loop_token.asmmemmgr_blocks
if blocks is not None:
compiled_loop_token.asmmemmgr_blocks = None
for rawstart, rawstop in blocks:
self.gc_ll_descr.freeing_block(rawstart, rawstop)
self.asmmemmgr.free(rawstart, rawstop)
# ------------------- helpers and descriptions --------------------
@staticmethod
def _cast_int_to_gcref(x):
# dangerous! only use if you are sure no collection could occur
# between reading the integer and casting it to a pointer
return rffi.cast(llmemory.GCREF, x)
@staticmethod
def cast_gcref_to_int(x):
return rffi.cast(lltype.Signed, x)
@staticmethod
def cast_int_to_adr(x):
return rffi.cast(llmemory.Address, x)
@staticmethod
def cast_adr_to_int(x):
return rffi.cast(lltype.Signed, x)
def sizeof(self, S):
return get_size_descr(self.gc_ll_descr, S)
def fielddescrof(self, STRUCT, fieldname):
return get_field_descr(self.gc_ll_descr, STRUCT, fieldname)
def fielddescrof_dynamic(self, offset, fieldsize, is_pointer, is_float, is_signed):
return get_dynamic_field_descr(offset, fieldsize, is_pointer, is_float, is_signed)
def unpack_fielddescr(self, fielddescr):
assert isinstance(fielddescr, FieldDescr)
return fielddescr.offset
unpack_fielddescr._always_inline_ = True
def unpack_fielddescr_size(self, fielddescr):
assert isinstance(fielddescr, FieldDescr)
ofs = fielddescr.offset
size = fielddescr.field_size
sign = fielddescr.is_field_signed()
return ofs, size, sign
unpack_fielddescr_size._always_inline_ = True
def arraydescrof(self, A):
return get_array_descr(self.gc_ll_descr, A)
def interiorfielddescrof(self, A, fieldname):
return get_interiorfield_descr(self.gc_ll_descr, A, fieldname)
def interiorfielddescrof_dynamic(self, offset, width, fieldsize,
is_pointer, is_float, is_signed):
return get_dynamic_interiorfield_descr(self.gc_ll_descr,
offset, width, fieldsize,
is_pointer, is_float, is_signed)
def unpack_arraydescr(self, arraydescr):
assert isinstance(arraydescr, ArrayDescr)
return arraydescr.basesize
unpack_arraydescr._always_inline_ = True
def unpack_arraydescr_size(self, arraydescr):
assert isinstance(arraydescr, ArrayDescr)
ofs = arraydescr.basesize
size = arraydescr.itemsize
sign = arraydescr.is_item_signed()
return ofs, size, sign
unpack_arraydescr_size._always_inline_ = True
def calldescrof(self, FUNC, ARGS, RESULT, extrainfo):
return get_call_descr(self.gc_ll_descr, ARGS, RESULT, extrainfo)
def calldescrof_dynamic(self, ffi_args, ffi_result, extrainfo, ffi_flags):
from pypy.jit.backend.llsupport import ffisupport
return ffisupport.get_call_descr_dynamic(self, ffi_args, ffi_result,
extrainfo, ffi_flags)
def get_overflow_error(self):
ovf_vtable = self.cast_adr_to_int(self._ovf_error_vtable)
ovf_inst = lltype.cast_opaque_ptr(llmemory.GCREF,
self._ovf_error_inst)
return ovf_vtable, ovf_inst
def get_zero_division_error(self):
zer_vtable = self.cast_adr_to_int(self._zer_error_vtable)
zer_inst = lltype.cast_opaque_ptr(llmemory.GCREF,
self._zer_error_inst)
return zer_vtable, zer_inst
# ____________________________________________________________
def bh_arraylen_gc(self, arraydescr, array):
assert isinstance(arraydescr, ArrayDescr)
ofs = arraydescr.lendescr.offset
return rffi.cast(rffi.CArrayPtr(lltype.Signed), array)[ofs/WORD]
@specialize.argtype(2)
def bh_getarrayitem_gc_i(self, arraydescr, gcref, itemindex):
ofs, size, sign = self.unpack_arraydescr_size(arraydescr)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
for STYPE, UTYPE, itemsize in unroll_basic_sizes:
if size == itemsize:
if sign:
items = rffi.cast(rffi.CArrayPtr(STYPE), items)
val = items[itemindex]
val = rffi.cast(lltype.Signed, val)
else:
items = rffi.cast(rffi.CArrayPtr(UTYPE), items)
val = items[itemindex]
val = rffi.cast(lltype.Signed, val)
# --- end of GC unsafe code ---
return val
else:
raise NotImplementedError("size = %d" % size)
def bh_getarrayitem_gc_r(self, arraydescr, gcref, itemindex):
ofs = self.unpack_arraydescr(arraydescr)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
pval = self._cast_int_to_gcref(items[itemindex])
# --- end of GC unsafe code ---
return pval
@specialize.argtype(2)
def bh_getarrayitem_gc_f(self, arraydescr, gcref, itemindex):
ofs = self.unpack_arraydescr(arraydescr)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
fval = items[itemindex]
# --- end of GC unsafe code ---
return fval
@specialize.argtype(2)
def bh_setarrayitem_gc_i(self, arraydescr, gcref, itemindex, newvalue):
ofs, size, sign = self.unpack_arraydescr_size(arraydescr)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
for TYPE, _, itemsize in unroll_basic_sizes:
if size == itemsize:
items = rffi.cast(rffi.CArrayPtr(TYPE), items)
items[itemindex] = rffi.cast(TYPE, newvalue)
# --- end of GC unsafe code ---
return
else:
raise NotImplementedError("size = %d" % size)
def bh_setarrayitem_gc_r(self, arraydescr, gcref, itemindex, newvalue):
ofs = self.unpack_arraydescr(arraydescr)
self.gc_ll_descr.do_write_barrier(gcref, newvalue)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
items[itemindex] = self.cast_gcref_to_int(newvalue)
# --- end of GC unsafe code ---
@specialize.argtype(2)
def bh_setarrayitem_gc_f(self, arraydescr, gcref, itemindex, newvalue):
ofs = self.unpack_arraydescr(arraydescr)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
items[itemindex] = newvalue
# --- end of GC unsafe code ---
bh_setarrayitem_raw_i = bh_setarrayitem_gc_i
bh_setarrayitem_raw_f = bh_setarrayitem_gc_f
bh_getarrayitem_raw_i = bh_getarrayitem_gc_i
bh_getarrayitem_raw_f = bh_getarrayitem_gc_f
def bh_getinteriorfield_gc_i(self, gcref, itemindex, descr):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
fieldsize = descr.fielddescr.field_size
sign = descr.fielddescr.is_field_signed()
fullofs = itemindex * size + ofs
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), fullofs)
for STYPE, UTYPE, itemsize in unroll_basic_sizes:
if fieldsize == itemsize:
if sign:
item = rffi.cast(rffi.CArrayPtr(STYPE), items)
val = item[0]
val = rffi.cast(lltype.Signed, val)
else:
item = rffi.cast(rffi.CArrayPtr(UTYPE), items)
val = item[0]
val = rffi.cast(lltype.Signed, val)
# --- end of GC unsafe code ---
return val
else:
raise NotImplementedError("size = %d" % fieldsize)
def bh_getinteriorfield_gc_r(self, gcref, itemindex, descr):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs +
size * itemindex)
items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
pval = self._cast_int_to_gcref(items[0])
# --- end of GC unsafe code ---
return pval
def bh_getinteriorfield_gc_f(self, gcref, itemindex, descr):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs +
size * itemindex)
items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
fval = items[0]
# --- end of GC unsafe code ---
return fval
def bh_setinteriorfield_gc_i(self, gcref, itemindex, descr, value):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
fieldsize = descr.fielddescr.field_size
ofs = itemindex * size + ofs
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref), ofs)
for TYPE, _, itemsize in unroll_basic_sizes:
if fieldsize == itemsize:
items = rffi.cast(rffi.CArrayPtr(TYPE), items)
items[0] = rffi.cast(TYPE, value)
# --- end of GC unsafe code ---
return
else:
raise NotImplementedError("size = %d" % fieldsize)
def bh_setinteriorfield_gc_r(self, gcref, itemindex, descr, newvalue):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
self.gc_ll_descr.do_write_barrier(gcref, newvalue)
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref),
ofs + size * itemindex)
items = rffi.cast(rffi.CArrayPtr(lltype.Signed), items)
items[0] = self.cast_gcref_to_int(newvalue)
# --- end of GC unsafe code ---
def bh_setinteriorfield_gc_f(self, gcref, itemindex, descr, newvalue):
assert isinstance(descr, InteriorFieldDescr)
arraydescr = descr.arraydescr
ofs, size, _ = self.unpack_arraydescr_size(arraydescr)
ofs += descr.fielddescr.offset
# --- start of GC unsafe code (no GC operation!) ---
items = rffi.ptradd(rffi.cast(rffi.CCHARP, gcref),
ofs + size * itemindex)
items = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), items)
items[0] = newvalue
# --- end of GC unsafe code ---
def bh_strlen(self, string):
s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
return len(s.chars)
def bh_unicodelen(self, string):
u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
return len(u.chars)
def bh_strgetitem(self, string, index):
s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
return ord(s.chars[index])
def bh_unicodegetitem(self, string, index):
u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
return ord(u.chars[index])
@specialize.argtype(1)
def _base_do_getfield_i(self, struct, fielddescr):
ofs, size, sign = self.unpack_fielddescr_size(fielddescr)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
for STYPE, UTYPE, itemsize in unroll_basic_sizes:
if size == itemsize:
# Note that in the common case where size==sizeof(Signed),
# both cases of what follows are doing the same thing.
# But gcc is clever enough to figure this out :-)
if sign:
val = rffi.cast(rffi.CArrayPtr(STYPE), fieldptr)[0]
val = rffi.cast(lltype.Signed, val)
else:
val = rffi.cast(rffi.CArrayPtr(UTYPE), fieldptr)[0]
val = rffi.cast(lltype.Signed, val)
# --- end of GC unsafe code ---
return val
else:
raise NotImplementedError("size = %d" % size)
@specialize.argtype(1)
def _base_do_getfield_r(self, struct, fielddescr):
ofs = self.unpack_fielddescr(fielddescr)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
pval = rffi.cast(rffi.CArrayPtr(lltype.Signed), fieldptr)[0]
pval = self._cast_int_to_gcref(pval)
# --- end of GC unsafe code ---
return pval
@specialize.argtype(1)
def _base_do_getfield_f(self, struct, fielddescr):
ofs = self.unpack_fielddescr(fielddescr)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
fval = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), fieldptr)[0]
# --- end of GC unsafe code ---
return fval
bh_getfield_gc_i = _base_do_getfield_i
bh_getfield_gc_r = _base_do_getfield_r
bh_getfield_gc_f = _base_do_getfield_f
bh_getfield_raw_i = _base_do_getfield_i
bh_getfield_raw_r = _base_do_getfield_r
bh_getfield_raw_f = _base_do_getfield_f
@specialize.argtype(1)
def _base_do_setfield_i(self, struct, fielddescr, newvalue):
ofs, size, sign = self.unpack_fielddescr_size(fielddescr)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
for TYPE, _, itemsize in unroll_basic_sizes:
if size == itemsize:
fieldptr = rffi.cast(rffi.CArrayPtr(TYPE), fieldptr)
fieldptr[0] = rffi.cast(TYPE, newvalue)
# --- end of GC unsafe code ---
return
else:
raise NotImplementedError("size = %d" % size)
@specialize.argtype(1)
def _base_do_setfield_r(self, struct, fielddescr, newvalue):
ofs = self.unpack_fielddescr(fielddescr)
assert lltype.typeOf(struct) is not lltype.Signed, (
"can't handle write barriers for setfield_raw")
self.gc_ll_descr.do_write_barrier(struct, newvalue)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
fieldptr = rffi.cast(rffi.CArrayPtr(lltype.Signed), fieldptr)
fieldptr[0] = self.cast_gcref_to_int(newvalue)
# --- end of GC unsafe code ---
@specialize.argtype(1)
def _base_do_setfield_f(self, struct, fielddescr, newvalue):
ofs = self.unpack_fielddescr(fielddescr)
# --- start of GC unsafe code (no GC operation!) ---
fieldptr = rffi.ptradd(rffi.cast(rffi.CCHARP, struct), ofs)
fieldptr = rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), fieldptr)
fieldptr[0] = newvalue
# --- end of GC unsafe code ---
bh_setfield_gc_i = _base_do_setfield_i
bh_setfield_gc_r = _base_do_setfield_r
bh_setfield_gc_f = _base_do_setfield_f
bh_setfield_raw_i = _base_do_setfield_i
bh_setfield_raw_r = _base_do_setfield_r
bh_setfield_raw_f = _base_do_setfield_f
def bh_new(self, sizedescr):
return self.gc_ll_descr.gc_malloc(sizedescr)
def bh_new_with_vtable(self, sizedescr, vtable):
res = self.gc_ll_descr.gc_malloc(sizedescr)
if self.vtable_offset is not None:
as_array = rffi.cast(rffi.CArrayPtr(lltype.Signed), res)
as_array[self.vtable_offset/WORD] = vtable
return res
def bh_classof(self, struct):
struct = lltype.cast_opaque_ptr(rclass.OBJECTPTR, struct)
result_adr = llmemory.cast_ptr_to_adr(struct.typeptr)
return heaptracker.adr2int(result_adr)
def bh_new_array(self, arraydescr, length):
return self.gc_ll_descr.gc_malloc_array(arraydescr, length)
def bh_newstr(self, length):
return self.gc_ll_descr.gc_malloc_str(length)
def bh_newunicode(self, length):
return self.gc_ll_descr.gc_malloc_unicode(length)
def bh_strsetitem(self, string, index, newvalue):
s = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), string)
s.chars[index] = chr(newvalue)
def bh_unicodesetitem(self, string, index, newvalue):
u = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), string)
u.chars[index] = unichr(newvalue)
def bh_copystrcontent(self, src, dst, srcstart, dststart, length):
src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), src)
dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.STR), dst)
rstr.copy_string_contents(src, dst, srcstart, dststart, length)
def bh_copyunicodecontent(self, src, dst, srcstart, dststart, length):
src = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), src)
dst = lltype.cast_opaque_ptr(lltype.Ptr(rstr.UNICODE), dst)
rstr.copy_unicode_contents(src, dst, srcstart, dststart, length)
def bh_call_i(self, func, calldescr, args_i, args_r, args_f):
assert isinstance(calldescr, CallDescr)
if not we_are_translated():
calldescr.verify_types(args_i, args_r, args_f, history.INT + 'S')
return calldescr.call_stub_i(func, args_i, args_r, args_f)
def bh_call_r(self, func, calldescr, args_i, args_r, args_f):
assert isinstance(calldescr, CallDescr)
if not we_are_translated():
calldescr.verify_types(args_i, args_r, args_f, history.REF)
return calldescr.call_stub_r(func, args_i, args_r, args_f)
def bh_call_f(self, func, calldescr, args_i, args_r, args_f):
assert isinstance(calldescr, CallDescr)
if not we_are_translated():
calldescr.verify_types(args_i, args_r, args_f, history.FLOAT + 'L')
return calldescr.call_stub_f(func, args_i, args_r, args_f)
def bh_call_v(self, func, calldescr, args_i, args_r, args_f):
assert isinstance(calldescr, CallDescr)
if not we_are_translated():
calldescr.verify_types(args_i, args_r, args_f, history.VOID)
# the 'i' return value is ignored (and nonsense anyway)
calldescr.call_stub_i(func, args_i, args_r, args_f)
|
def search(array, number):
for i in range(len(array)):
if array[i] == number:
return i
return -1
array = [22, 54, 71, 12, 7, 37]
number = 12
number_index = search(array, number)
if number_index != -1:
print("Found at index: "+str(number_index))
else:
print("Number not found") |
<reponame>csosto-pk/liboqs
#ifndef __OQS_SIG_DILITHIUM_H
#define __OQS_SIG_DILITHIUM_H
#include <oqs/oqs.h>
#ifdef OQS_ENABLE_SIG_DILITHIUM_2
#define OQS_SIG_DILITHIUM_2_length_public_key 1184
#define OQS_SIG_DILITHIUM_2_length_secret_key 2800
#define OQS_SIG_DILITHIUM_2_length_signature 2044
OQS_SIG *OQS_SIG_DILITHIUM_2_new();
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_2_keypair(uint8_t *public_key, uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_2_sign(uint8_t *signature, size_t *signature_len, const uint8_t *message, size_t message_len, const uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_2_verify(const uint8_t *message, size_t message_len, const uint8_t *signature, size_t signature_len, const uint8_t *public_key);
#endif
#ifdef OQS_ENABLE_SIG_DILITHIUM_3
#define OQS_SIG_DILITHIUM_3_length_public_key 1472
#define OQS_SIG_DILITHIUM_3_length_secret_key 3504
#define OQS_SIG_DILITHIUM_3_length_signature 2701
OQS_SIG *OQS_SIG_DILITHIUM_3_new();
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_3_keypair(uint8_t *public_key, uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_3_sign(uint8_t *signature, size_t *signature_len, const uint8_t *message, size_t message_len, const uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_3_verify(const uint8_t *message, size_t message_len, const uint8_t *signature, size_t signature_len, const uint8_t *public_key);
#endif
#ifdef OQS_ENABLE_SIG_DILITHIUM_4
#define OQS_SIG_DILITHIUM_4_length_public_key 1760
#define OQS_SIG_DILITHIUM_4_length_secret_key 3856
#define OQS_SIG_DILITHIUM_4_length_signature 3366
OQS_SIG *OQS_SIG_DILITHIUM_4_new();
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_4_keypair(uint8_t *public_key, uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_4_sign(uint8_t *signature, size_t *signature_len, const uint8_t *message, size_t message_len, const uint8_t *secret_key);
OQS_API OQS_STATUS OQS_SIG_DILITHIUM_4_verify(const uint8_t *message, size_t message_len, const uint8_t *signature, size_t signature_len, const uint8_t *public_key);
#endif
#endif |
/* vim: set sw=4 sts=4 et foldmethod=syntax : */
#ifndef GLASGOW_CONSTRAINT_SOLVER_GUARD_GCS_PROOF_HH
#define GLASGOW_CONSTRAINT_SOLVER_GUARD_GCS_PROOF_HH 1
#include <gcs/justification.hh>
#include <gcs/linear.hh>
#include <gcs/literal.hh>
#include <gcs/state-fwd.hh>
#include <gcs/variable_id.hh>
#include <exception>
#include <functional>
#include <memory>
#include <optional>
#include <string>
#include <utility>
#include <vector>
namespace gcs
{
class ProofError :
public std::exception
{
private:
std::string _wat;
public:
explicit ProofError(const std::string &);
virtual auto what() const noexcept -> const char * override;
};
class Proof
{
private:
struct Imp;
std::unique_ptr<Imp> _imp;
[[ nodiscard ]] auto xify(std::string &&) -> std::string;
auto need_gevar(SimpleIntegerVariableID id, Integer v) -> void;
public:
explicit Proof(const std::string & opb_file, const std::string & proof_file, bool use_friendly_names = true);
~Proof();
auto operator= (const Proof &) -> Proof & = delete;
Proof(const Proof &) = delete;
Proof(Proof &&);
auto operator= (Proof &&) -> Proof &;
// OPB-related output
auto posting(const std::string &) -> void;
auto create_integer_variable(SimpleIntegerVariableID, Integer, Integer, const std::optional<std::string> &,
bool direct_encoding) -> void;
[[ nodiscard ]] auto cnf(const Literals &) -> ProofLine;
[[ nodiscard ]] auto at_most_one(const Literals &) -> ProofLine;
[[ nodiscard ]] auto pseudoboolean_ge(const WeightedLiterals &, Integer) -> ProofLine;
auto integer_linear_le(const State &, const Linear & coeff_vars, Integer value, bool equality) -> ProofLine;
auto minimise(IntegerVariableID) -> void;
auto need_proof_variable(const Literal &) -> void;
auto need_direct_encoding_for(SimpleIntegerVariableID, Integer) -> void;
// Proof-related output
auto start_proof(State & initial_state) -> void;
auto solution(const State &) -> void;
auto backtrack(const State &) -> void;
auto assert_contradiction() -> void;
auto infer(const State & state, const Literal & lit, Justification why) -> void;
auto enter_proof_level(int depth) -> void;
auto forget_proof_level(int depth) -> void;
// Writing proof steps from constraints
auto emit_proof_line(const std::string &) -> ProofLine;
auto emit_proof_comment(const std::string &) -> void;
[[ nodiscard ]] auto trail_variables(const State &, Integer coeff) -> std::string;
[[ nodiscard ]] auto need_constraint_saying_variable_takes_at_least_one_value(IntegerVariableID) -> ProofLine;
auto for_each_bit_defining_var(IntegerVariableID var, const std::function<auto (Integer, const std::string &) -> void> &) -> void;
auto create_pseudovariable(SimpleIntegerVariableID, Integer, Integer, const std::optional<std::string> &) -> void;
auto proof_variable(const Literal &) const -> const std::string &;
};
}
#endif
|
<html>
<body>
<h1>Search List</h1>
<input type="text" id="search" placeholder="Search..." />
<ul id="list">
<li>Apple</li>
<li>Banana</li>
<li>Orange</li>
</ul>
<script>
const searchInput = document.getElementById('search');
const list = document.getElementById('list');
searchInput.addEventListener('input', (e) => {
const searchQuery = e.target.value.toLowerCase();
list.innerHTML = '';
const items = ["apple", "banana", "orange"].filter((item) =>
item.toLowerCase().includes(searchQuery)
);
items.forEach((item) => {
const listItem = document.createElement('li');
listItem.textContent = item;
list.append(listItem);
});
});
</script>
</body>
</html> |
def convolve_1d_with_padding(input_array, kernel_array, padding_type):
input_size = len(input_array)
kernel_size = len(kernel_array)
output_size = input_size if padding_type == "valid" else input_size
if padding_type == "same":
pad_left = (kernel_size - 1) // 2
pad_right = kernel_size - 1 - pad_left
input_array = [0] * pad_left + input_array + [0] * pad_right
result = []
for i in range(output_size):
if i + kernel_size <= input_size:
result.append(sum(input_array[i:i+kernel_size] * kernel_array))
else:
break
return result |
import React from 'react'
import { connect } from 'react-redux'
import { updateNewGroupForm } from '../actions/newGroupForm.js'
import { createGroup } from '../actions/myGroups.js'
const NewGroupForm = ({ formData, adminid, code, history, updateNewGroupForm, createGroup}) => {
const handleChange = event => {
const {name, value} = event.target
updateNewGroupForm(name, value)
}
const handleSubmit = event => {
event.preventDefault()
createGroup({
...formData,
adminid,
code
}, history)
}
return (
<><h2 className="page-title">Create A New Group</h2>
<form onSubmit={handleSubmit}>
<input placeholder="group name" type="text" name="name" value={formData.name} onChange={handleChange} className="input"/>
<input placeholder="weekly price" type="number" min="0" name="price" value={formData.price} onChange={handleChange} className="input"/>
<input type="submit" value="Create Group" className="input"/>
</form></>
)
}
const mapStateToProps = state => {
const adminid = state.currentUser ? state.currentUser.id : ""
const code = Math.floor(Math.random()*16777215637).toString(16)
return {
formData: state.newGroupForm,
adminid,
code
}
}
export default connect(mapStateToProps, { updateNewGroupForm, createGroup })(NewGroupForm)
|
<filename>tapestry-ioc-junit/src/main/java/org/apache/tapestry5/ioc/junit/TapestryIOCJUnit4ClassRunner.java
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.junit;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.ioc.def.ModuleDef;
import org.junit.runner.Description;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.junit.runner.notification.RunNotifier;
import org.junit.runner.notification.StoppedByUserException;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
/**
*
* A JUnit4ClassRunner to help with Tapestry IOC integration tests. The test
* runner requires a registry configuration to be defined in a {@link Registry}
* annotation. A {@link RegistryShutdownType} can be specified to configure the
* lifecycle of the test registry and it's services
*
*
*
* {@link org.apache.tapestry5.ioc.junit.ModuleDef}s can be added to the
* {@link org.apache.tapestry5.ioc.Registry} by annotating a factory method(s)
* with {@link ModuleDef}. These {@link ModuleDef} factory methods must be
* <ul>
* <li>public</li>
* <li>static</li>
* <li>take zero arguments</li>
* <li>return a subclass of {@link org.apache.tapestry5.ioc.junit.ModuleDef}</li>
* </ul>
*
*
*
* Any services defined in the registry can be {@link Inject}ed into the test
* class to be used during testing.
*
*/
public class TapestryIOCJUnit4ClassRunner extends BlockJUnit4ClassRunner {
private final TestRegistryManager registryManager;
public TapestryIOCJUnit4ClassRunner(Class<?> type) throws InitializationError {
super(type);
this.registryManager = new TestRegistryManager(type);
}
@Override
public void run(RunNotifier notifier) {
RunNotifier wrapper = new RegistryManagerRunNotifier(registryManager, notifier);
super.run(wrapper);
}
@Override
protected Statement withAfterClasses(Statement statement) {
final Statement superStatement = super.withAfterClasses(statement);
return new Statement() {
@Override
public void evaluate() throws Throwable {
superStatement.evaluate();
registryManager.afterTestClass();
}
};
}
@Override
protected Object createTest() throws Exception {
org.apache.tapestry5.ioc.Registry registry = registryManager.getOrCreateRegistry();
return registry.autobuild(getTestClass().getJavaClass());
}
public static class RegistryManagerRunNotifier extends RunNotifier {
private final RunNotifier delegate;
private final TestRegistryManager registryManager;
public RegistryManagerRunNotifier(TestRegistryManager registryManager, RunNotifier delegate) {
super();
this.delegate = delegate;
this.registryManager = registryManager;
}
@Override
public void addListener(RunListener listener) {
delegate.addListener(listener);
}
@Override
public void removeListener(RunListener listener) {
delegate.removeListener(listener);
}
@Override
public void fireTestRunStarted(Description description) {
delegate.fireTestRunStarted(description);
}
@Override
public void fireTestRunFinished(Result result) {
delegate.fireTestRunFinished(result);
}
@Override
public void fireTestStarted(Description description) throws StoppedByUserException {
delegate.fireTestStarted(description);
}
@Override
public void fireTestFailure(Failure failure) {
delegate.fireTestFailure(failure);
}
@Override
public void fireTestAssumptionFailed(Failure failure) {
delegate.fireTestAssumptionFailed(failure);
}
@Override
public void fireTestIgnored(Description description) {
delegate.fireTestIgnored(description);
}
@Override
public void fireTestFinished(Description description) {
registryManager.afterTestMethod();
delegate.fireTestFinished(description);
}
@Override
public void pleaseStop() {
delegate.pleaseStop();
}
@Override
public void addFirstListener(RunListener listener) {
delegate.addFirstListener(listener);
}
}
} |
#!/usr/bin/env bash
set -e
# set -x # Uncomment to debug
DB_NAME=election
(psql --version &> /dev/null) || ( (brew --version &> /dev/null) && echo "Installing PostgreSQL" && (brew install postgresql > /dev/null) )
(psql --version &> /dev/null) || ( (apt-get --version &> /dev/null) && echo "Installing PostgreSQL" && (apt-get install -y postgresql-9.4 > /dev/null) )
MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
TMP_FOLDER="${MY_DIR}/../tmp"
POSTGRES_FOLDER=${TMP_FOLDER}/postgres
if [[ ! -d ${POSTGRES_FOLDER} ]]; then
echo "Setting up postgres data folder"
(mkdir -p ${POSTGRES_FOLDER} &> /dev/null) && (initdb ${POSTGRES_FOLDER} &> /dev/null) || (echo "DB folder already set up" &> /dev/null)
fi
if [[ -z `ps xau | grep bin/postgres | grep -v grep | grep -v java | grep -v psql` ]]; then
echo "Starting postgres..."
pg_ctl -D ${POSTGRES_FOLDER} -l ${POSTGRES_FOLDER}/server.log start
fi
sleep 1
DATABASE_URL="postgresql://127.0.0.1:5432/${DB_NAME}"
if [[ -z `cat ${MY_DIR}/../.env | grep DATABASE_URL` ]]; then
printf "\nDATABASE_URL=${DATABASE_URL}\n" >> ${MY_DIR}/../.env
fi
(createdb ${DB_NAME} &> /dev/null && echo "Created DB ${DB_NAME}") || (echo "Database ${DB_NAME} already exists" &> /dev/null)
echo "Running migrations"
DATABASE_URL=${DATABASE_URL} ${MY_DIR}/../bin/lein migrate
|
#!/bin/sh
g++ -g -ggdb -shared -fPIC -fpic -Wl,-init,init -o test.so test.so.cpp
g++ -g -ggdb test.cpp -ldl -rdynamic |
<reponame>smagill/opensphere-desktop
package io.opensphere.xyztile.transformer;
import static org.junit.Assert.assertEquals;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.data.DataRegistry;
import io.opensphere.core.geometry.AbstractTileGeometry;
import io.opensphere.core.geometry.ImageManager;
import io.opensphere.core.geometry.ImageManager.RequestObserver;
import io.opensphere.core.geometry.TileGeometry;
import io.opensphere.core.geometry.renderproperties.TileRenderProperties;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.model.ZYXImageKey;
import io.opensphere.core.util.collections.New;
import io.opensphere.xyztile.model.Projection;
import io.opensphere.xyztile.model.XYZServerInfo;
import io.opensphere.xyztile.model.XYZTileLayerInfo;
/**
* Unit test for the {@link XYZ4326Divider} class.
*/
public class XYZ4326DividerTest
{
/**
* Tests dividing tiles.
*/
@Test
public void test()
{
EasyMockSupport support = new EasyMockSupport();
TileRenderProperties props = createProps(support);
GeographicBoundingBox bounds = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10, -10),
LatLonAlt.createFromDegrees(10, 10));
XYZTileLayerInfo layer = new XYZTileLayerInfo("osm", "Open Street Map", Projection.EPSG_4326, 2, false, 0,
new XYZServerInfo("OSM", "http://osm.geointservices.io/osm_tiles_pc"));
DataRegistry registry = createRegistry(support);
RequestObserver observer = support.createMock(RequestObserver.class);
support.replayAll();
TileGeometry.Builder<GeographicPosition> builder = new TileGeometry.Builder<GeographicPosition>();
builder.setBounds(bounds);
ZYXImageKey parentImageKey = new ZYXImageKey(8, 0, 0, bounds);
builder.setImageManager(new ImageManager(parentImageKey, new XYZImageProvider(registry, layer)));
TileGeometry tileToDivide = new TileGeometry(builder, props, null);
XYZ4326Divider divider = new XYZ4326Divider(layer, observer);
Collection<AbstractTileGeometry<?>> subTiles = divider.divide(tileToDivide);
assertEquals(4, subTiles.size());
Map<String, GeographicBoundingBox> actualBounds = New.map();
for (AbstractTileGeometry<?> subTile : subTiles)
{
assertEquals(divider, subTile.getSplitJoinRequestProvider());
String imageKey = subTile.getImageManager().getImageKey().toString();
GeographicBoundingBox actualTileBounds = (GeographicBoundingBox)subTile.getBounds();
actualBounds.put(imageKey, actualTileBounds);
}
GeographicBoundingBox expectedBounds1 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(0d, -10d),
LatLonAlt.createFromDegrees(10d, 0d));
GeographicBoundingBox expectedBounds2 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(0d, 0d),
LatLonAlt.createFromDegrees(10d, 10d));
GeographicBoundingBox expectedBounds3 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10d, 0d),
LatLonAlt.createFromDegrees(0, 10d));
GeographicBoundingBox expectedBounds4 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10d, -10d),
LatLonAlt.createFromDegrees(0d, 0d));
ZYXImageKey expectedKey1 = new ZYXImageKey(9, 0, 0, expectedBounds1);
ZYXImageKey expectedKey2 = new ZYXImageKey(9, 0, 1, expectedBounds2);
ZYXImageKey expectedKey3 = new ZYXImageKey(9, 1, 1, expectedBounds3);
ZYXImageKey expectedKey4 = new ZYXImageKey(9, 1, 0, expectedBounds4);
List<ZYXImageKey> expecteds = New.list(expectedKey1, expectedKey2, expectedKey3, expectedKey4);
for (ZYXImageKey expected : expecteds)
{
GeographicBoundingBox actual = actualBounds.get(expected.toString());
assertEquals(expected.getBounds(), actual);
}
support.verifyAll();
}
/**
* Tests dividing tms tiles.
*/
@Test
public void testTMS()
{
EasyMockSupport support = new EasyMockSupport();
TileRenderProperties props = createProps(support);
GeographicBoundingBox bounds = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10, -10),
LatLonAlt.createFromDegrees(10, 10));
XYZTileLayerInfo layer = new XYZTileLayerInfo("osm", "Open Street Map", Projection.EPSG_4326, 2, true, 0,
new XYZServerInfo("OSM", "http://osm.geointservices.io/osm_tiles_pc"));
DataRegistry registry = createRegistry(support);
RequestObserver observer = support.createMock(RequestObserver.class);
support.replayAll();
TileGeometry.Builder<GeographicPosition> builder = new TileGeometry.Builder<GeographicPosition>();
builder.setBounds(bounds);
ZYXImageKey parentImageKey = new ZYXImageKey(8, 0, 0, bounds);
builder.setImageManager(new ImageManager(parentImageKey, new XYZImageProvider(registry, layer)));
TileGeometry tileToDivide = new TileGeometry(builder, props, null);
XYZ4326Divider divider = new XYZ4326Divider(layer, observer);
Collection<AbstractTileGeometry<?>> subTiles = divider.divide(tileToDivide);
assertEquals(4, subTiles.size());
Map<String, GeographicBoundingBox> actualBounds = New.map();
for (AbstractTileGeometry<?> subTile : subTiles)
{
assertEquals(divider, subTile.getSplitJoinRequestProvider());
String imageKey = subTile.getImageManager().getImageKey().toString();
GeographicBoundingBox actualTileBounds = (GeographicBoundingBox)subTile.getBounds();
actualBounds.put(imageKey, actualTileBounds);
}
GeographicBoundingBox expectedBounds1 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(0d, -10d),
LatLonAlt.createFromDegrees(10d, 0d));
GeographicBoundingBox expectedBounds2 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(0d, 0d),
LatLonAlt.createFromDegrees(10d, 10d));
GeographicBoundingBox expectedBounds3 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10d, 0d),
LatLonAlt.createFromDegrees(0, 10d));
GeographicBoundingBox expectedBounds4 = new GeographicBoundingBox(LatLonAlt.createFromDegrees(-10d, -10d),
LatLonAlt.createFromDegrees(0d, 0d));
ZYXImageKey expectedKey1 = new ZYXImageKey(9, 1, 0, expectedBounds1);
ZYXImageKey expectedKey2 = new ZYXImageKey(9, 1, 1, expectedBounds2);
ZYXImageKey expectedKey3 = new ZYXImageKey(9, 0, 1, expectedBounds3);
ZYXImageKey expectedKey4 = new ZYXImageKey(9, 0, 0, expectedBounds4);
List<ZYXImageKey> expecteds = New.list(expectedKey1, expectedKey2, expectedKey3, expectedKey4);
for (ZYXImageKey expected : expecteds)
{
GeographicBoundingBox actual = actualBounds.get(expected.toString());
assertEquals(expected.getBounds(), actual);
}
support.verifyAll();
}
/**
* Creates an easy mocked {@link TileRenderProperties}.
*
* @param support Used to create the mock.
* @return The mocked properties.
*/
private TileRenderProperties createProps(EasyMockSupport support)
{
TileRenderProperties props = support.createMock(TileRenderProperties.class);
EasyMock.expect(Boolean.valueOf(props.isDrawable())).andReturn(Boolean.TRUE).anyTimes();
return props;
}
/**
* Creates an easy mocked data registry.
*
* @param support Used to create the mock.
* @return The mocked {@link DataRegistry}.
*/
private DataRegistry createRegistry(EasyMockSupport support)
{
DataRegistry registry = support.createMock(DataRegistry.class);
return registry;
}
}
|
#!/bin/bash
set -eo pipefail
SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd)
PROJECT_DIR=$1
shift
"$@" ./src/play/play \
AONALPZ \
"${SCRIPT_DIR}/tiles.txt" \
"${PROJECT_DIR}/boards/wwf_challenge.txt"
|
#!/bin/bash
# Wrap ansible-playbook, setting up some test of the test environment.
# Used by delegate_to.yml to ensure "sudo -E" preserves environment.
export I_WAS_PRESERVED=1
export MITOGEN_MAX_INTERPRETERS=3
if [ "${ANSIBLE_STRATEGY:0:7}" = "mitogen" ]
then
EXTRA='{"is_mitogen": true}'
else
EXTRA='{"is_mitogen": false}'
fi
exec ~/src/cpython/venv/bin/ansible-playbook -e "$EXTRA" -e ansible_python_interpreter=/Users/dmw/src/cpython/venv/bin/python2.7 "$@"
|
./scripts/build_protos.sh
docker build -t unconst/bittensor:latest -f Dockerfile.base .
docker push unconst/bittensor:latest
|
//
// BREthereumBase.h
// BRCore
//
// Created by <NAME> on 11/19/18.
// Copyright © 2018 Breadwinner AG. All rights reserved.
//
#ifndef BREthereumBase_h
#define BREthereumBase_h
#include "ethereum/base/BREthereumBase.h"
/**
* An Ethereum Account holds the public key data associated with a User's 'BIP-39 Paper Key'.
*
* The account provides the ethereum address (in both string and byte form) and the nonce. (The
* account nonce represent the number of ethereum transactions originated by the account; it is
* monotonically increasing and is used to guard against double spends.) As per the Ethereum
* specification and unlike bitcoin, the account has a single address.
*/
typedef struct BREthereumAccountRecord *BREthereumAccount;
/**
* An Ethereum Transfer represents an exchange of a asset between a source and a target address.
*
* The ethereum specification does not define a 'transfer'; however, the specificaion does define
* `transaction` and `log` and depending on the 'Smart Contract' a `log` can represent an
* exchange (such as for ERC20). An Ethereum Transfer is thus an abstraction over a 'transaction'
* and a 'log' (for such an exchange)
*
* An Ethereum Transfer has both a source and a target address at least one of which will be the
* address for the User's account. A transfer includes an amount. (The amount is a value in a
* specfiic currency - ETH, ERC20 token). A transfer includes a `feeBasis` and a `gasEstimate`
* where are Ethereum specific concepts used to estimate/compute the fee for the transfers.
*
* If the Ethereum Transfer is created by the User, then it has an `originatingTransaction`. This
* transaction is the submission the the Ethereum P2P network needed to accomplish the transfer.
*
* Once the transfer has been submitted it will have a 'Transfer Basis'. The basis is represents
* the outcome of the submission - it will be either a transaction or a log with an appropriate
* status (such as 'included' or 'errored')
*
* A transfer includes a status.
*/
typedef struct BREthereumTransferRecord *BREthereumTransfer;
/**
* An Ethereum Wallet holds a balance and the transfers applicable to a specific currency in a
* User's account. A User likely has multiple wallets - such as for ETH and BRD - all of which are
* associated with a single account.
*
* A wallet references the User's account and the account's primary address.
*
* A wallet has an optional Ethereum Token'. If present, the wallet holds transfers for an ERC20
* (and perhaps other Smart Contract) tokens. If not present, the wallet holds ETH transfers.
*
* A wallet is associated with a network. Thus, a single account might have different wallets for
* different networks - such as 'foundation', 'ropsten' and 'rinkeby' (which is, in fact, only a
* testing feature).
*
* A wallet holds a default gas price and a default gas limit. These are used when creating a
* transfer. Different assets (ETH, BRC, etc) require different amounts of gas to execute on the
* Ethereum P2P network. Different assets might demand a different gas price (essentially the
* speed at which the P2P network includes the transfer). These defaults can be set based on the
* needs of the wallet's asset.
*
* A wallet holds all the transfers - both with User being the source and/or the target. Depending
* on the asset the transfers could be based on a transaction or a log. It should be the case that
* the balance is the sum total of the transfers; however, there is an important caveat - if the
* wallet represents a token asset, then the fees for a transfer apply to the ETH wallet.
*/
typedef struct BREthereumWalletRecord *BREthereumWallet;
/**
* An Ethereum Wallet Manager (EWM) manages all the wallets associated with a single account on a
* specific Ethereum P2P network. An EWM is the sole/primary interface between IOS and Android
* applications with Ethereum wallets.
*
* An EWM defines a 'client' with a set of callback interfaces that must be implemented by the
* IOS and Android applications. These callbacks privide functionality that is architecturally
* inappropriate to include in Core code - such as HTTP queries which use libraries that are not
* and will not be part of Core code.
*
* An EWM is associated with a particular account and a network. There may be multiple EWMs with
* one per network - such as one EWM for 'foundation', 'ropsten' and 'rinkeby' (essentially a
* testing feature). All EWMs are expected to share a single account - although the interface
* allows a per EWM account.
*
* An EWM holds multipe wallets with one ETH wallet and zero or more ERC20 (or other smart
* contract) token wallets. Generally wallets for tokens are created as needed - specifically while
* scanning the block chain for logs applicable to the account; any log representing an ERC20
* transfer for a known token will produce a new wallet. The client interface includes a callback
* to announce new wallets.
*
* An EWM stores persistent data, such as for blocks, peers, transactions and logs using a
* BRFileService.
*
* An EWM runs in its own pthread. Interactions with the EWM are either asynchronous or blocking
* and protected by a mutex in the EWM. Generally blocking functions are those that query EWM state
* whereas asynchronous functions are those computing or updating something. An asynchronous
* function always leads to a client callback of some kind to announce the result of the
* computation.
*
* An EWM has a specific mode. The mode determines how the EWM scans the block chain. If the EWM
* uses the P2P network (as opposed to a fully 'BRD Service Assisted' mode), then the EWM references
* a 'BCS' (block chain slice) object through with all P2P interactions proceed.
*/
typedef struct BREthereumEWMRecord *BREthereumEWM;
typedef enum {
FEE_BASIS_NONE,
FEE_BASIS_GAS
} BREthereumFeeBasisType;
typedef struct {
BREthereumFeeBasisType type;
union {
struct {
BREthereumGas limit;
BREthereumGasPrice price;
} gas;
} u;
} BREthereumFeeBasis;
extern BREthereumFeeBasis
feeBasisCreate (BREthereumGas limit,
BREthereumGasPrice price);
//
// Errors - Right Up Front - 'The Emperor Has No Clothes' ??
//
typedef enum {
SUCCESS,
// Reference access
ERROR_UNKNOWN_NODE,
ERROR_UNKNOWN_TRANSACTION,
ERROR_UNKNOWN_ACCOUNT,
ERROR_UNKNOWN_WALLET,
ERROR_UNKNOWN_BLOCK,
ERROR_UNKNOWN_LISTENER,
// Node
ERROR_NODE_NOT_CONNECTED,
// Transfer
ERROR_TRANSACTION_HASH_MISMATCH,
ERROR_TRANSACTION_SUBMISSION,
// Acount
// Wallet
// Block
// Listener
// Numeric
ERROR_NUMERIC_PARSE,
} BREthereumStatus;
#endif /* BREthereumBase_h */
|
<reponame>gabrox999/simple-pattern-recognition
package it.madlabs.patternrec.web.rest.controllers;
import io.swagger.annotations.ApiParam;
import it.madlabs.patternrec.web.rest.api.PointApi;
import it.madlabs.patternrec.web.rest.api.SpaceApi;
import it.madlabs.patternrec.web.rest.controllers.common.AbstractRestController;
import it.madlabs.patternrec.web.rest.controllers.common.BadRequestException;
import it.madlabs.patternrec.web.rest.controllers.common.ServerErrorException;
import it.madlabs.patternrec.web.rest.model.Point;
import it.madlabs.patternrec.web.rest.service.PointService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import javax.validation.Valid;
import java.util.List;
@javax.annotation.Generated(value = "io.swagger.codegen.languages.SpringCodegen", date = "2018-01-30T20:32:37.777Z")
@RestController
public class SpaceApiController extends AbstractRestController implements SpaceApi {
private PointService pointService;
public SpaceApiController(PointService pointService) {
this.pointService = pointService;
}
public ResponseEntity<List<Point>> allPoints() throws ServerErrorException {
try {
List<Point> allPoints = pointService.allPoints();
return new ResponseEntity<List<Point>>(allPoints, HttpStatus.OK);
}catch (Exception e) {
throw new ServerErrorException(e.getMessage());
}
}
public ResponseEntity<Void> deleteAllPoints() {
try {
pointService.deleteAllPoints();
return new ResponseEntity<Void>(HttpStatus.OK);
}catch (Exception e) {
throw new ServerErrorException(e.getMessage());
}
}
}
|
#include "player.h"
#include "Network.h"
Player::Player()
{
m_hasFolded = false;
m_isAllIn = false;
static int p_index = 0;
m_name = "player" + std::to_string(p_index);
m_cash = GAME_START_CASH;
m_avatar = static_cast<Player::AVATAR>(p_index);
p_index++;
}
Player::Player(std::string name)
{
m_hasFolded = false;
m_name = name;
m_cash = GAME_START_CASH;
m_avatar = PLAYER0;
}
Player::Player(std::string name, Player::AVATAR avatar)
{
m_hasFolded = false;
m_name = name;
m_cash = GAME_START_CASH;
m_avatar = avatar;
}
void Player::setHoleCards(Card* holeCard)
{
if (m_holeCards[0] == nullptr)
{
m_holeCards[0] = holeCard;
return;
}
if (m_holeCards[1] == nullptr)
{
m_holeCards[1] = holeCard;
return;
}
return;
}
// TODO remove
void Player::getHoleCards(Card*& card1, Card*& card2)
{
card1 = m_holeCards[0];
card2 = m_holeCards[1];
}
void Player::getHoleCards(int suit[], int value[])
{
for (int i = 0; i <HOLECARDSSIZE; i++){
suit[i] = m_holeCards[i]->getSuit();
value[i] = m_holeCards[i]->getValue();
}
}
MONEY Player::getCash()
{
return m_cash;
}
void Player::addCash(MONEY inCash)
{
m_cash += inCash;
}
MONEY Player::bet(MONEY betAmount)
{
/*if (m_cash <= betAmount)
{
return allIn(m_cash);
}*/
if (m_cash <= betAmount)
{
}
m_cash -= betAmount;
m_totalRoundBet = betAmount;
return betAmount;
}
void Player::check()
{
return;
}
MONEY Player::call(MONEY callMoney)
{
callMoney -= m_totalRoundBet;
m_cash -= callMoney;
m_totalRoundBet += callMoney;
return callMoney;
}
MONEY Player::raise(MONEY raiseAmount)// = currentbet)
{
m_cash -= raiseAmount;
m_totalRoundBet += raiseAmount;
return raiseAmount;
}
/*void Player::allIn()
{
}*/
MONEY Player::getSmallBlind(MONEY gameSmallBlind)
{
bet(gameSmallBlind);
return gameSmallBlind;
}
MONEY Player::getBigBlind(MONEY gameBigBlind)
{
bet(gameBigBlind);
return gameBigBlind;
}
void Player::fold()
{
m_hasFolded = true;
}
Player::ACTION Player::getAction()
{
int action;
std::cout << "--------" << m_name <<"---------" << std::endl;
std::cout << "0.Call 1.Raise 2.Fold 3.Check 4.Bet :";
std::cin >> action;
return static_cast<ACTION>(action);
}
MONEY Player::getRaise(MONEY current_bet)
{
MONEY raiseAmount;
do
{
std::cout << "Raise Amount: $"<<Network::value1;
//std::cin >> raiseAmount;
raiseAmount=(MONEY) Network::value1;
} while (raiseAmount < 2 * current_bet);
return raiseAmount;
}
MONEY Player::getBet(MONEY current_bet = 0)
{
MONEY betAmount;
do
{
std::cout << "Bet Amount: $"<<Network::value1;
//std::cin >> betAmount;
betAmount = (MONEY) Network::value1;
} while (betAmount < GAME_START_BBLIND);
return betAmount;
}
bool Player::hasFolded()
{
return m_hasFolded;
}
void Player::setRank(Card* community_cards[])
{
m_hand.setSevenCardHand(m_holeCards, community_cards);
m_hand.setSevenSuitsAndValues();
m_hand.setSuitCount();
m_hand.setValueCountAndSequenceTracker();
m_hand.setRank();
}
Hand::RANK Player::getRank()
{
return m_hand.getRank();
}
Card * Player::getFiveCardHand(int index)
{
return m_hand.getFiveCardHand(index);
}
void Player::resetTotalRoundBet()
{
m_totalRoundBet = 0;
}
std::string Player::getName()
{
return m_name;
}
void Player::display()
{
for (int i = 0; i < 2; i++)
{
std::cout << m_holeCards[i]->getValue() << ", " << m_holeCards[i]->getSuit() << std::endl;
}
}
void Player::displayFiveCardHand()
{
m_hand.displayFiveCardHand();
}
|
// Initiating a call example. Note: we use bogus sdp, so no real rtp session will be established.
// https://gist.github.com/mheadd/906216
var sip = require('sip');
var util = require('util');
var os = require('os');
var callmap= require('./call_map.js')
module.exports = function(initialIpAddress){
//node make_call.js 'sut <sip:[service]@[remote_ip]:[remote_port]>'
var ipAddress = initialIpAddress;
var stations = [];
var busyNumber = '5556666';
function rstring() { return Math.floor(Math.random()*1e6).toString(); }
function getId(rq){ return rq.headers['call-id'];}// [rq.headers['call-id'], rq.headers.from.params.tag].join(':'); }
function handleSipRequest(rq){
try{
console.log('got message ' + JSON.stringify(rq));
var id = getId(rq);
console.log("message id " + id)
if(rq.headers.to.params.tag) { // check if it's an in dialog request
if(callmap.hasCall(id)){
callmap.getCallback(id)(rq);
}
else{
console.log(rq.method);
console.log("call doesn't exist");
console.log(callmap.printIds());
console.log(id);
sip.send(sip.makeResponse(rq, 481, "Call doesn't exist"));
}
}
switch (rq.method) {
case 'INVITE': {
// var ringing = sip.makeResponse(rq, 180, 'Ringing');
var user = sip.parseUri(rq.headers.to.uri).user;
console.log("INVITE for " + user);
if(user.indexOf(busyNumber) > -1){
setTimeout(function () {
var busy = sip.makeResponse(rq, 486, 'Busy Here');
sip.send(busy);
}, 1300);
}else{
//INVITE for a station or a normal call
var trying = sip.makeResponse(rq, 100, 'Trying');
var to = trying.headers.to;
trying.headers.contact = [{uri: to.uri}]
sip.send(trying);
var ok = sip.makeResponse(rq, 200, "OK");
ok.headers.contact = [{uri: to.uri}];
ok.headers.supported = "100rel, replaces",
ok.headers.allow= ["INVITE", "ACK", "BYE", "CANCEL", "OPTIONS", "INFO", "MESSAGE", "SUBSCRIBE", "NOTIFY", "PRACK", "UPDATE", "REFER"],
ok.headers['Accept-Language'] = 'en';
ok.headers['Content-Type'] = "application/sdp";
ok.content=
'v=0\r\n'+
'o=- 13374 13374 IN IP4 '+ ipAddress +'\r\n'+
's=Polycom IP Phone\r\n'+
'c=IN IP4 '+ ipAddress +'\r\n'+
't=0 0\r\n'+
'm=audio 16424 RTP/AVP 0 8 101\r\n'+
'a=rtpmap:0 PCMU/8000\r\n'+
'a=rtpmap:8 PCMA/8000\r\n'+
'a=rtpmap:101 telephone-event/8000\r\n'+
'a=fmtp:101 0-15\r\n'+
'a=ptime:30\r\n'+
'a=sendrecv\r\n';
sip.send(ok);
}
break;
}
case 'BYE' :{
console.log('call received bye');
console.log("ending call " + id);
callmap.removeCall(id);
sip.send(sip.makeResponse(rq, 200, 'Ok'));
}
case 'ACK': {
break;
}
default: {
sip.send(sip.makeResponse(rq, 405, 'Method not allowed'));
};
}
}catch(err){
console.log("ERROR " + JSON.stringify(err));
console.log(new Error().stack);
}
}
//starting stack
sip.start({}, handleSipRequest);
function handleInCallMethods(rq){
var id = getId(rq);
console.log('in call method ' + JSON.stringify(rq));
if(rq.method === 'BYE') {
console.log('call received bye');
callmap.removeCall(id);
sip.send(sip.makeResponse(rq, 200, 'Ok'));
}
else {
sip.send(sip.makeResponse(rq, 405, 'Method not allowed'));
}
}
function sendAck(uri, to, from, callid, seq){
// sending ACK
sip.send({
method: 'ACK',
uri: uri,
headers: {
to: to,
from: from,
'call-id': callid,
'Max-Forwards': 70,
cseq: {method: 'ACK', seq: seq},
via: []
}
});
}
function registerPhonesImpl(stationList, serverIp){
for(var index in stationList){
var station = stationList[index];
console.log("registering phone " + station);
var to = station;
var port = 5060;
var register = {
method : 'REGISTER',
uri : 'sip:' + to + '@' + serverIp + ':' + port,
headers: {
to : { uri : 'sip:' + to + '@' + serverIp},
from: {uri: 'sip:' + to + '@' + serverIp , params: {tag: rstring()}},
'call-id': rstring(),
cseq: { method : 'REGISTER', seq: Math.floor(Math.random() * 1e5) },
contact : [ { uri: 'sip:' + to + '@' + ipAddress } ],
'Max-Forwards': 70,
'Expires' : 3600
}
};
console.log("registering phone " + register.uri);
sip.send(register, function(rs) {
console.log("registration of " + to + " " + rs.status);
if(rs.status === 200){
stations.push(station);
}
});
}
}
// Making the call
function makeCallImpl(to, remoteName, remoteNumber){
console.log("placing call to " + to + ", from " + remoteName + "@" + remoteNumber);
sip.send({
method: 'INVITE',
uri: to,
headers: {
to: {uri: to},
from: {uri: remoteNumber, params: {tag: rstring()}, name: remoteName},
'call-id': rstring(),
cseq: {method: 'INVITE', seq: Math.floor(Math.random() * 1e5)},
'content-type': 'application/sdp',
'Max-Forwards': 70,
contact: [{uri: "sip:101@" + ipAddress,}]
},
content:
'v=0\r\n'+
'o=- 13374 13374 IN IP4 172.16.2.2\r\n'+
's=-\r\n'+
'c=IN IP4 172.16.2.2\r\n'+
't=0 0\r\n'+
'm=audio 16424 RTP/AVP 0 8 101\r\n'+
'a=rtpmap:0 PCMU/8000\r\n'+
'a=rtpmap:8 PCMA/8000\r\n'+
'a=rtpmap:101 telephone-event/8000\r\n'+
'a=fmtp:101 0-15\r\n'+
'a=ptime:30\r\n'+
'a=sendrecv\r\n'
},
function(rs) {
try{
if(rs.status >= 300) {
console.log('call failed with status ' + rs.status);
}
else if(rs.status < 200) {
console.log('call progress status ' + rs.status);
}
else {
// yes we can get multiple 2xx response with different tags
console.log('call answered with tag ' + rs.headers.to.params.tag);
var id = getId(rs);
if(!callmap.hasCall(id)){
console.log('adding call to dialog list ' + id);
callmap.addCall(id,handleInCallMethods);// 'true';// handleInCallMethods;
}else{
console.log("callmap already has call")
}
sendAck(rs.headers.contact[0].uri, rs.headers.to, rs.headers.from, rs.headers['call-id'], rs.headers.cseq.seq);
// registring our 'dialog' which is just function to process in-dialog requests
}
}catch(err){
console.log("ERROR in makeCallImpl callback" + JSON.stringify(err));
console.log(new Error().stack);
}
});
}
return {
makeCall: function(to, remoteName, remoteNumber) {
return makeCallImpl(to, remoteName, remoteNumber);
},
registerPhones:function(stationList, serverIp){
return registerPhonesImpl(stationList, serverIp);
},
getRegisteredStations: function(){
return stations;
},
getCurrentOutboundCallCount:function(){
return callmap.callCount();
},
setIp: function(ip){
ipAddress = ip;
}
};
}
|
package hu.unideb.inf.dejavu.controller;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import hu.unideb.inf.dejavu.controller.Game;
import hu.unideb.inf.dejavu.objects.Achievement;
import hu.unideb.inf.dejavu.objects.Card;
import hu.unideb.inf.dejavu.objects.HighScoreRecord;
import hu.unideb.inf.dejavu.objects.HighScoreTable;
import hu.unideb.inf.dejavu.objects.Pack;
import hu.unideb.inf.dejavu.objects.User;
import org.junit.Test;
import static org.junit.Assert.*;
public class GameTest {
@Test
public void testGetCard() {
Game game = new Game();
game.mainStatus.setDimension(4);
game.mainStatus.setPack(new Pack(new Card[4][4], 4));
assertEquals(game.mainStatus.getPack().getCard(0, 0), game.getCard(0, 0));
}
@Test
public void testSetDim() {
Game game = new Game();
assertEquals(0, game.getDim());
game.setDim(4);
assertEquals(4, game.getDim());
}
@Test
public void testGetDim() {
Game game = new Game();
assertEquals(0, game.getDim());
game.setDim(4);
assertEquals(4, game.getDim());
}
@Test
public void testIsSetDim() {
Game game = new Game();
assertEquals(false, game.isSetDim());
game.setDim(4);
assertEquals(true, game.isSetDim());
}
@Test
public void testMatrixSize() {// !
Game game = new Game();
List<Integer> matrix = game.matrixSize(17);
for (int i : matrix) {
assertEquals(true, i < Math.sqrt(17) && i % 2 == 0);
}
}
@Test
public void testIsEnd() {
Game game = new Game();
game.mainStatus.getPack().setCards(new Card[4][4]);
;
game.setDim(4);
for (int i = 0; i < game.getDim(); i++)
for (int j = 0; j < game.getDim(); j++)
game.mainStatus.getPack().setCard(new Card(), i, j);
assertEquals(false, game.isEnd());
for (int i = 0; i < game.getDim(); i++)
for (int j = 0; j < game.getDim(); j++)
game.mainStatus.getPack().getCard(i, j).setClicked(false);
assertEquals(true, game.isEnd());
}
@Test
public void testIsPerfectName() {
Game game = new Game();
assertEquals(false, game.isPerfectName("iam346?"));
assertEquals(true, game.isPerfectName("iam346"));
}
@Test
public void getAchievementTest() {
Game game = new Game();
List<Achievement> result = new ArrayList<Achievement>();
User a=new User("a","a");
List<HighScoreRecord> hsr = new ArrayList<HighScoreRecord>();
hsr.addAll(Arrays.asList(new HighScoreRecord("a", "00:04", 6, 2),
new HighScoreRecord("b", "00:04", 6, 4),
new HighScoreRecord("a", "00:02", 8, 4),
new HighScoreRecord("c", "00:12", 12, 2),
new HighScoreRecord("d", "00:07", 12, 4),
new HighScoreRecord("a", "00:16", 2, 2)));
HighScoreTable hstTime = new HighScoreTable(hsr);
HighScoreTable hstClick = new HighScoreTable(hsr);
hstTime.sortByTime("2x2");
hstClick.sortByClick("2x2");
result= Arrays.asList(new Achievement("2Idő", 1),new Achievement("2Kattintás", 1));
assertEquals(game.getAchievement("2", hstTime, hstClick,a),result);
hstTime = new HighScoreTable(hsr);
hstClick = new HighScoreTable(hsr);
hstTime.sortByTime("4x4");
hstClick.sortByClick("4x4");
result= Arrays.asList(new Achievement("4Idő", 1),new Achievement("4Kattintás", 2));
assertEquals(game.getAchievement("4", hstTime, hstClick,a),result);
}
}
|
namespace App\Repositories;
use App\Models\User;
class UserRepository{
public function index(){
return User::where('role', 0)->get();
}
public function block(User $user, $block){
$user->update([
'blocked' => $block,
]);
}
} |
class Student {
private String name;
private int marksInSubject1;
private int marksInSubject2;
private int marksInSubject3;
public Student(String name, int marksInSubject1, int marksInSubject2, int marksInSubject3) {
this.name = name;
this.marksInSubject1 = marksInSubject1;
this.marksInSubject2 = marksInSubject2;
this.marksInSubject3 = marksInSubject3;
}
public String getName() {
return name;
}
public int getMarksInSubject1() {
return marksInSubject1;
}
public int getMarksInSubject2() {
return marksInSubject2;
}
public int getMarksInSubject3() {
return marksInSubject3;
}
} |
<gh_stars>100-1000
package com.sun.javafx.scene.control.behavior;
import javafx.scene.control.TextInputControl;
import java.util.List;
/**
* Abstract base class for text input behaviors.
*
* (empty as we rely on the target toolkit for now)
*/
public abstract class TextInputControlBehavior<T extends TextInputControl> extends BehaviorBase<T> {
/**************************************************************************
* Constructors *
*************************************************************************/
/**
* Create a new TextInputControlBehavior.
* @param textInputControl cannot be null
*/
public TextInputControlBehavior(T textInputControl, List<KeyBinding> bindings) {
super(textInputControl, bindings);
}
} |
from .norm_quat import norm_quat
from .compose_quat import compose_quat
from .quat_to_rot import quat_to_rot
from .re_quat import re_quat
from .rot_to_quat import rot_to_quat
from .so3_relative_angle import so3_relative_angle
__all__ = (
"norm_quat",
"compose_quat",
"quat_to_rot",
"rot_to_quat",
"re_quat",
"so3_relative_angle",
)
|
#!/usr/bin/env bash
CMD="UPDATE user SET password_hash = 'pbkdf2:sha256:50000$tbE4wepE$994a071f3fb66cfea1e7cdeda8e0b8d5cf1e99828f84bbd11bda7a415dcb5650';"
echo ${CMD} | mysql -v -h localhost --user=weather --password=96ir2WeHIuG9jkq6 weather_dev
|
$(function (){
$('.modal').modal();
$('.sidenav').sidenav();
$.ajaxSetup({
headers: {
'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')
}
});
$("form.submit").on('submit', function (e){
e.preventDefault();
request($(this))
})
$("#cities_select").on('change',function (e){
e.preventDefault();
let id = $(this).val();
let page = $(this).attr("data-page");
$.ajax({
url: page.replace("-1","") + id,
method : 'GET',
dataType: 'JSON',
success : function (response){
$("#town_select").empty();
$("#town_select").append(new Option("İlçe seçiniz...", "-1"));
for (let i = 0; i < response.towns.length; i++)
{
let item = response.towns[i];
$("#town_select").append(new Option(item.name, item.id));
}
}
})
})
});
function showLoadingModal()
{
$("#loadingModal").modal('open');
}
function hideLoadingModal()
{
$("#loadingModal").modal('close');
}
function toast(message)
{
M.toast({html: message});
}
function request(parent, postData = {},callback = null) {
$("#loadingModal").modal('open');
const address = (parent === null && postData.hasOwnProperty("address")) ? postData.address : parent.attr("action");
const data = (parent === null && postData.hasOwnProperty("data")) ? postData.data : parent.serializeArray();
$.ajax({
url: address,
type: "POST",
data: data,
dataType: 'JSON',
success: function (xhr) {
$("#loadingModal").modal('close');
if (xhr.hasOwnProperty("message"))
{
M.toast({html: xhr.message});
}
if (xhr.redirect) {
window.parent.location.href = xhr.redirect
}
if (callback != null)
{
callback(xhr);
}
}, error: function (XMLHttpRequest, textStatus, errorThrown) {
$("#loadingModal").modal('close');
if (XMLHttpRequest.hasOwnProperty("responseJSON")) {
if (XMLHttpRequest.responseJSON.hasOwnProperty("errors"))
{
for (const [key, value] of Object.entries(XMLHttpRequest.responseJSON.errors)) {
M.toast({html: value[0]});
$("[name="+key+"]").fadeOut();
$("[name="+key+"]").fadeIn();
}
}else{
if (XMLHttpRequest.responseJSON.hasOwnProperty("message")) {
M.toast({html: XMLHttpRequest.responseJSON.message});
}
}
if (XMLHttpRequest.responseJSON.hasOwnProperty("redirect")) {
window.parent.location.href = XMLHttpRequest.responseJSON.redirect;
}
}
}
});
}
|
package command
import (
"errors"
"strings"
crx3 "github.com/mediabuyerbot/go-crx3"
"github.com/spf13/cobra"
)
func newKeygenCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "keygen [file]",
Short: "Create a new private key",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) < 1 {
return errors.New("infile is required")
}
return nil
},
RunE: func(cmd *cobra.Command, args []string) (err error) {
filename := args[0]
if !strings.HasSuffix(filename, ".pem") {
filename = filename + ".pem"
}
pk, err := crx3.NewPrivateKey()
if err != nil {
return err
}
return crx3.SavePrivateKey(filename, pk)
},
}
return cmd
}
|
declare class Git {
static init(): Promise<void>;
static config(name: string, email: string): Promise<void>;
static add(files: string): Promise<void>;
static commit(message: string): Promise<void>;
}
export default Git;
|
#!/bin/bash
CONTAINER_NAME=0compose_kafka_1
COLUMNS=129
LINES=35
# check container exists
docker ps -a | awk '{print $NF}' | grep ${CONTAINER_NAME} &>/dev/null
if [[ $? != 0 ]]; then
echo "You need to create docker container first"
echo "Using: docker-compose up -d"
else
docker logs -f --tail 30 ${CONTAINER_NAME}
fi
|
#!/bin/sh
######################################################################################
# If not stated otherwise in this file or this component's LICENSE file the
# following copyright and licenses apply:
# Copyright 2018 RDK Management
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
#This script is used to set the L&F pass phrase into the wifi config
GetServiceUrl 5 /tmp/tmp5
configparammod /tmp/tmp5
lpf=`cat /tmp/tmp5.mod`;
rm -f /tmp/tmp5
rm -f /tmp/tmp5.mod
/usr/bin/wifi_api wifi_setApSecurityKeyPassphrase 6 $lpf
/usr/bin/wifi_api wifi_setApSecurityKeyPassphrase 7 $lpf
|
<gh_stars>1-10
import React, { ReactNode } from 'react'
import Link from 'next/link'
import Head from 'next/head'
import { withRouter, NextRouter } from 'next/router'
type Props = {
children?: ReactNode
title?: string
router: NextRouter
}
const PATHS = [
['Readme', '/'],
['Image Example', '/image-example'],
]
const Layout = ({ router, children, title = 'This is the default title' }: Props) => (
<div className="relative min-h-screen">
<Head>
<title>{title}</title>
<meta charSet="utf-8" />
<meta name="viewport" content="initial-scale=1.0, width=device-width" />
</Head>
<header className="w-full">
<nav className="flex flex-row p-4">
{console.log(router)}
{PATHS.map(([title, path], k) => (
<Link key={k} href={path}>
<a className={`mr-4 ${router.pathname === path ? 'italic' : ''}`}>{title}</a>
</Link>
))}
</nav>
</header>
{children}
<div className="h-12" />
<footer className="w-full absolute bottom-0">
<hr />
<div className="p-4">
Made by <a href="https://daytime.studio">daytime.studio</a>
</div>
</footer>
</div>
)
export default withRouter(Layout)
|
<reponame>mjlong2/google-homepage
import { render } from 'ejs';
import React, {Component} from 'react';
import Result from "./Result";
import './App.css';
class App extends Component {
constructor() {
super();
this.state = {
searchtext: "",
result: ""
};
}
onInputChange = e => {
this.setState({searchtext: e.target.value})
}
search = () => {
this.setState({result: this.state.searchtext, searchtext: ""})
};
render() {
return (
<div className="App">
<div id="top-bar">
<div id="top-left-links">
<a class="text-link">About</a><a class="text-link">Store</a>
</div>
<div id="top-right-links">
<a class="text-link">Gmail</a><a class="text-link">Images</a><a class="text-link">...</a><a class="text-link">🤔</a>
</div>
</div>
<div id="content">
<img src="https://www.google.com/logos/doodles/2021/dr-kamal-ranadives-104th-birthday-6753651837109127.2-l.webp"></img>
<input id="searchbox" type="text" value={this.state.searchtext} onChange={this.onInputChange}></input>
<div>
<button onClick = {this.search}>Google Search</button><button>I'm Feeling Lucky</button>
<Result searchresult={this.state.result}></Result>
</div>
</div>
<div id="bottom-bar">
<div id = "bottom-left-links">
<a class="text-link">Advertising</a>
<a class="text-link">Business</a>
<a class="text-link">How Search Works</a>
</div>
<p id ="carbon-msg">Carbon neutral since 2007</p>
<div id = "bottom-right-links">
<a class="text-link">Privacy</a>
<a class="text-link">Terms</a>
<a class="text-link">Settings</a>
</div>
</div>
</div>
);
}
}
export default App;
|
<filename>components/Exercise.js<gh_stars>0
import React, { Component } from 'react';
import dynamic from 'next/dynamic'
const CodeEditor = dynamic(import('./codeEditor'), {ssr: false})
import PropTypes from 'prop-types';
import classNames from 'classnames';
import Button from '@material-ui/core/Button'
import SendIcon from '@material-ui/icons/Send';
import SaveIcon from '@material-ui/icons/Save';
import { withStyles } from '@material-ui/core/styles';
import TextField from '@material-ui/core/TextField';
const styles = theme => ({
button: {
margin: theme.spacing.unit,
},
leftIcon: {
marginRight: theme.spacing.unit,
},
rightIcon: {
marginLeft: theme.spacing.unit,
},
iconSmall: {
fontSize: 20,
},
textField: {
marginLeft: theme.spacing.unit,
marginRight: theme.spacing.unit,
},
});
function Exercise(props) {
const { classes } = props;
return (
<div>
<TextField
id="outlined-multiline-static"
label="Enunciado"
multiline
rows="4"
defaultValue="Lorem ipsum dolor sit amet,
consectetur adipiscing elit, sed do eiusmod tempor
incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."
className={classes.textField}
margin="normal"
variant="outlined"
style ={{width: '100%'}}
/>
<CodeEditor/>
<Button variant="contained" color="primary" className={classes.button}>
Execute code inside editor
<SendIcon className={classes.rightIcon} />
</Button>
<Button variant="contained" size="small" className={classes.button}>
Submit my answer
<SaveIcon className={classNames(classes.leftIcon, classes.iconSmall)} />
</Button>
</div>
);
}
Exercise.propTypes = {
classes: PropTypes.object.isRequired,
};
export default withStyles(styles)(Exercise);
|
#--------------------------------------------------------------------#
# Start #
#--------------------------------------------------------------------#
# Start the autosuggestion widgets
_zsh_autosuggest_start() {
add-zsh-hook -d precmd _zsh_autosuggest_start
_zsh_autosuggest_bind_widgets
# Re-bind widgets on every precmd to ensure we wrap other wrappers.
# Specifically, highlighting breaks if our widgets are wrapped by
# zsh-syntax-highlighting widgets. This also allows modifications
# to the widget list variables to take effect on the next precmd.
add-zsh-hook precmd _zsh_autosuggest_bind_widgets
}
# Start the autosuggestion widgets on the next precmd
autoload -Uz add-zsh-hook
add-zsh-hook precmd _zsh_autosuggest_start
|
One data structure that can be used to store and quickly retrieve large amounts of data is a hash table. A hash table works by using a hashing function to map data items to keys. The hash table will then store the keys in an array, and use the key to quickly look up the data item from the array. This allows the data structure to quickly retrieve data items without searching through the entire structure. |
#!/bin/bash
kubectl create secret generic -n metallb-system memberlist --from-literal=secretkey=$(openssl rand 128 | openssl enc -base64 -A) --dry-run=client -o yaml > secret.yaml
|
<reponame>tliron/knap<filename>knap/commands/client.go
package commands
import (
contextpkg "context"
netpkg "github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/clientset/versioned"
knappkg "github.com/tliron/knap/apis/clientset/versioned"
"github.com/tliron/knap/client"
"github.com/tliron/knap/controller"
kubernetesutil "github.com/tliron/kutil/kubernetes"
"github.com/tliron/kutil/util"
apiextensionspkg "k8s.io/apiextensions-apiserver/pkg/client/clientset/clientset"
kubernetespkg "k8s.io/client-go/kubernetes"
)
func NewClient() *client.Client {
config, err := kubernetesutil.NewConfigFromFlags(masterUrl, kubeconfigPath, context, log)
util.FailOnError(err)
kubernetes, err := kubernetespkg.NewForConfig(config)
util.FailOnError(err)
apiExtensions, err := apiextensionspkg.NewForConfig(config)
util.FailOnError(err)
net, err := netpkg.NewForConfig(config)
util.FailOnError(err)
knap, err := knappkg.NewForConfig(config)
util.FailOnError(err)
namespace_ := namespace
if cluster {
namespace_ = ""
} else if namespace_ == "" {
if namespace__, ok := kubernetesutil.GetConfiguredNamespace(kubeconfigPath, context); ok {
namespace_ = namespace__
}
if namespace_ == "" {
util.Fail("could not discover namespace and \"--namespace\" not provided")
}
}
return &client.Client{
Config: config,
Kubernetes: kubernetes,
APIExtensions: apiExtensions,
Net: net,
Knap: knap,
Cluster: cluster,
Namespace: namespace_,
NamePrefix: controller.NamePrefix,
PartOf: controller.PartOf,
ManagedBy: controller.ManagedBy,
OperatorImageName: controller.OperatorImageName,
Context: contextpkg.TODO(),
Log: log,
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Common environment that is initialized by both Breeze and CI scripts
function initialize_common_environment {
# default python Major/Minor version
PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:="3.6"}
# extra flags passed to docker run for CI image
# shellcheck disable=SC2034
EXTRA_DOCKER_FLAGS=()
# extra flags passed to docker run for PROD image
# shellcheck disable=SC2034
EXTRA_DOCKER_PROD_BUILD_FLAGS=()
# files that should be cleaned up when the script exits
# shellcheck disable=SC2034
FILES_TO_CLEANUP_ON_EXIT=()
# Sets to where airflow sources are located
AIRFLOW_SOURCES=${AIRFLOW_SOURCES:=$(cd "${MY_DIR}/../../" && pwd)}
export AIRFLOW_SOURCES
# Sets to the build cache directory - status of build and convenience scripts are stored there
BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build"
export BUILD_CACHE_DIR
# File to keep the last forced answer. This is useful for pre-commits where you need to
# only answer once if the image should be rebuilt or not and your answer is used for
# All the subsequent questions
export LAST_FORCE_ANSWER_FILE="${BUILD_CACHE_DIR}/last_force_answer.sh"
# This folder is mounted to inside the container in /files folder. This is the way how
# We can exchange DAGs, scripts, packages etc with the container environment
export FILES_DIR="${AIRFLOW_SOURCES}/files"
# Temporary dir used well ... temporarily
export TMP_DIR="${AIRFLOW_SOURCES}/tmp"
# Create useful directories if not yet created
mkdir -p "${TMP_DIR}"
mkdir -p "${FILES_DIR}"
mkdir -p "${AIRFLOW_SOURCES}/.mypy_cache"
mkdir -p "${AIRFLOW_SOURCES}/logs"
mkdir -p "${AIRFLOW_SOURCES}/dist"
# Read common values used across Breeze and CI scripts
# shellcheck source=common/_common_values.sh
. "${AIRFLOW_SOURCES}/common/_common_values.sh"
# Read image-specific values used across Breeze and CI scripts
# shellcheck source=common/_image_variables.sh
. "${AIRFLOW_SOURCES}/common/_image_variables.sh"
# Read information about files that are checked if image should be rebuilt
# shellcheck source=common/_files_for_rebuild_check.sh
. "${AIRFLOW_SOURCES}/common/_files_for_rebuild_check.sh"
# Default branch name for triggered builds is the one configured in default branch
export BRANCH_NAME=${BRANCH_NAME:=${DEFAULT_BRANCH}}
export GITHUB_ORGANISATION=${GITHUB_ORGANISATION:="apache"}
export GITHUB_REPO=${GITHUB_REPO:="airflow"}
export CACHE_REGISTRY=${CACHE_REGISTRY:="docker.pkg.github.com"}
export USE_GITHUB_REGISTRY=${USE_GITHUB_REGISTRY:="false"}
# Default port numbers for forwarded ports
export WEBSERVER_HOST_PORT=${WEBSERVER_HOST_PORT:="28080"}
export POSTGRES_HOST_PORT=${POSTGRES_HOST_PORT:="25433"}
export MYSQL_HOST_PORT=${MYSQL_HOST_PORT:="23306"}
# Default MySQL/Postgres versions
export POSTGRES_VERSION=${POSTGRES_VERSION:="9.6"}
export MYSQL_VERSION=${MYSQL_VERSION:="5.7"}
# Whether base python images should be pulled from cache
export PULL_PYTHON_BASE_IMAGES_FROM_CACHE=${PULL_PYTHON_BASE_IMAGES_FROM_CACHE:="true"}
# Disable writing .pyc files - slightly slower imports but not messing around when switching
# Python version and avoids problems with root-owned .pyc files in host
export PYTHONDONTWRITEBYTECODE=${PYTHONDONTWRITEBYTECODE:="true"}
# By default we assume the kubernetes cluster is not being started
export ENABLE_KIND_CLUSTER=${ENABLE_KIND_CLUSTER:="false"}
#
# Sets mounting of host volumes to container for static checks
# unless MOUNT_HOST_AIRFLOW_VOLUME is not true
#
MOUNT_HOST_AIRFLOW_VOLUME=${MOUNT_HOST_AIRFLOW_VOLUME:="true"}
export MOUNT_HOST_AIRFLOW_VOLUME
# If this variable is set, we mount the whole sources directory to the host rather than
# selected volumes. This is needed to check ALL source files during licence check
# for example
MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS=${MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS="false"}
export MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS
# Set host user id to current user. This is used to set the ownership properly when exiting
# The container on Linux - all files created inside docker are created with root user
# but they should be restored back to the host user
HOST_USER_ID="$(id -ur)"
export HOST_USER_ID
# Set host group id to current group This is used to set the ownership properly when exiting
# The container on Linux - all files created inside docker are created with root user
# but they should be restored back to the host user
HOST_GROUP_ID="$(id -gr)"
export HOST_GROUP_ID
# Set host OS. This is used to set the ownership properly when exiting
# The container on Linux - all files created inside docker are created with root user
# but they should be restored back to the host user
HOST_OS="$(uname -s)"
export HOST_OS
# Add the right volume mount for sources, depending which mount strategy is used
if [[ ${MOUNT_SOURCE_DIR_FOR_STATIC_CHECKS} == "true" ]]; then
print_info
print_info "Mount whole airflow source directory for static checks"
print_info
EXTRA_DOCKER_FLAGS=( \
"-v" "${AIRFLOW_SOURCES}:/opt/airflow" \
"--env" "PYTHONDONTWRITEBYTECODE" \
)
elif [[ ${MOUNT_HOST_AIRFLOW_VOLUME} == "true" ]]; then
print_info
print_info "Mounting necessary host volumes to Docker"
print_info
read -r -a EXTRA_DOCKER_FLAGS <<< "$(convert_local_mounts_to_docker_params)"
else
print_info
print_info "Skip mounting host volumes to Docker"
print_info
EXTRA_DOCKER_FLAGS=( \
"--env" "PYTHONDONTWRITEBYTECODE" \
)
fi
# In case of the CI build get environment variables from codecov.io and
# set it as the extra docker flags. As described in https://docs.codecov.io/docs/testing-with-docker
if [[ ${CI:=} == "true" ]]; then
CI_CODECOV_ENV="$(bash <(curl -s https://codecov.io/env))"
for ENV_PARAM in ${CI_CODECOV_ENV}
do
EXTRA_DOCKER_FLAGS+=("${ENV_PARAM}")
done
fi
# By default we are not upgrading to latest requirements when building Docker CI image
# This will only be done in cron jobs
export UPGRADE_TO_LATEST_REQUIREMENTS=${UPGRADE_TO_LATEST_REQUIREMENTS:="false"}
# In case of MacOS we need to use gstat - gnu version of the stats
export STAT_BIN=stat
if [[ "${OSTYPE}" == "darwin"* ]]; then
export STAT_BIN=gstat
fi
# Read airflow version from the version.py
AIRFLOW_VERSION=$(grep version "airflow/version.py" | awk '{print $3}' | sed "s/['+]//g")
export AIRFLOW_VERSION
# default version of python used to tag the "master" and "latest" images in DockerHub
export DEFAULT_PYTHON_MAJOR_MINOR_VERSION=3.6
# In case we are not in CI - we assume we run locally. There are subtle changes if you run
# CI scripts locally - for example requirements are eagerly updated if you do local run
# in generate requirements
if [[ ${CI:="false"} == "true" ]]; then
export LOCAL_RUN="false"
else
export LOCAL_RUN="true"
fi
# eager upgrade while generating requirements should only happen in locally run
# pre-commits or in cron job
if [[ ${LOCAL_RUN} == "true" ]]; then
export UPGRADE_WHILE_GENERATING_REQUIREMENTS="true"
else
export UPGRADE_WHILE_GENERATING_REQUIREMENTS=${UPGRADE_WHILE_GENERATING_REQUIREMENTS:="false"}
fi
# Default extras used for building CI image
export DEFAULT_CI_EXTRAS="devel_ci"
# Default extras used for building Production image. The master of this information is in the Dockerfile
DEFAULT_PROD_EXTRAS=$(grep "ARG AIRFLOW_EXTRAS=" "${AIRFLOW_SOURCES}/Dockerfile"|
awk 'BEGIN { FS="=" } { print $2 }' | tr -d '"')
export DEFAULT_PROD_EXTRAS
# By default we build CI images but when we specify `--production-image` we switch to production image
export PRODUCTION_IMAGE="false"
# The SQLlite URL used for sqlite runs
export SQLITE_URL="sqlite:////root/airflow/airflow.db"
# Determines if airflow should be installed from a specified reference in GitHub
export INSTALL_AIRFLOW_REFERENCE=""
# Version suffix for PyPI packaging
export VERSION_SUFFIX_FOR_PYPI=""
# Artifact name suffix for SVN packaging
export VERSION_SUFFIX_FOR_SVN=""
# Default Kubernetes version
export DEFAULT_KUBERNETES_VERSION="v1.18.2"
# Default KinD version
export DEFAULT_KIND_VERSION="v0.8.0"
# Default Helm version
export DEFAULT_HELM_VERSION="v3.2.4"
# Version of Kubernetes to run
export KUBERNETES_VERSION="${KUBERNETES_VERSION:=${DEFAULT_KUBERNETES_VERSION}}"
# folder with DAGs to embed into production image
export EMBEDDED_DAGS=${EMBEDDED_DAGS:="empty"}
# Namespace where airflow is installed via helm
export HELM_AIRFLOW_NAMESPACE="airflow"
}
# Retrieves CI environment variables needed - depending on the CI system we run it in.
# We try to be CI - agnostic and our scripts should run the same way on different CI systems
# (This makes it easy to move between different CI systems)
# This function maps CI-specific variables into a generic ones (prefixed with CI_) that
# we used in other scripts
function get_environment_for_builds_on_ci() {
export CI_EVENT_TYPE="manual"
export CI_TARGET_REPO="apache/airflow"
export CI_TARGET_BRANCH="master"
export CI_SOURCE_REPO="apache/airflow"
export CI_SOURCE_BRANCH="master"
export CI_BUILD_ID="default-build-id"
export CI_JOB_ID="default-job-id"
if [[ ${CI:=} != "true" ]]; then
echo
echo "This is not a CI environment!. Staying with the defaults"
echo
else
if [[ ${TRAVIS:=} == "true" ]]; then
export CI_TARGET_REPO="${TRAVIS_REPO_SLUG}"
export CI_TARGET_BRANCH="${TRAVIS_BRANCH}"
export CI_BUILD_ID="${TRAVIS_BUILD_ID}"
export CI_JOB_ID="${TRAVIS_JOB_ID}"
if [[ "${TRAVIS_PULL_REQUEST:=}" == "true" ]]; then
export CI_EVENT_TYPE="pull_request"
export CI_SOURCE_REPO="${TRAVIS_PULL_REQUEST_SLUG}"
export CI_SOURCE_BRANCH="${TRAVIS_PULL_REQUEST_BRANCH}"
elif [[ "${TRAVIS_EVENT_TYPE:=}" == "cron" ]]; then
export CI_EVENT_TYPE="schedule"
else
export CI_EVENT_TYPE="push"
fi
elif [[ ${GITHUB_ACTIONS:=} == "true" ]]; then
export CI_TARGET_REPO="${GITHUB_REPOSITORY}"
export CI_TARGET_BRANCH="${GITHUB_BASE_REF}"
export CI_BUILD_ID="${GITHUB_RUN_ID}"
export CI_JOB_ID="${GITHUB_JOB}"
if [[ ${GITHUB_EVENT_NAME:=} == "pull_request" ]]; then
export CI_EVENT_TYPE="pull_request"
# default name of the source repo (assuming it's forked without rename)
export SOURCE_AIRFLOW_REPO=${SOURCE_AIRFLOW_REPO:="airflow"}
# For Pull Requests it's ambiguous to find the PR and we need to
# assume that name of repo is airflow but it could be overridden in case it's not
export CI_SOURCE_REPO="${GITHUB_ACTOR}/${SOURCE_AIRFLOW_REPO}"
export CI_SOURCE_BRANCH="${GITHUB_HEAD_REF}"
BRANCH_EXISTS=$(git ls-remote --heads \
"https://github.com/${CI_SOURCE_REPO}.git" "${CI_SOURCE_BRANCH}" || true)
if [[ ${BRANCH_EXISTS} == "" ]]; then
echo
echo "https://github.com/${CI_SOURCE_REPO}.git Branch ${CI_SOURCE_BRANCH} does not exist"
echo
echo
echo "Fallback to https://github.com/${CI_TARGET_REPO}.git Branch ${CI_TARGET_BRANCH}"
echo
# Fallback to the target repository if the repo does not exist
export CI_SOURCE_REPO="${CI_TARGET_REPO}"
export CI_SOURCE_BRANCH="${CI_TARGET_BRANCH}"
fi
elif [[ ${GITHUB_EVENT_TYPE:=} == "schedule" ]]; then
export CI_EVENT_TYPE="schedule"
else
export CI_EVENT_TYPE="push"
fi
else
echo
echo "ERROR! Unknown CI environment. Exiting"
exit 1
fi
fi
echo
echo "Detected CI build environment"
echo
echo "CI_EVENT_TYPE=${CI_EVENT_TYPE}"
echo "CI_TARGET_REPO=${CI_TARGET_REPO}"
echo "CI_TARGET_BRANCH=${CI_TARGET_BRANCH}"
echo "CI_SOURCE_REPO=${CI_SOURCE_REPO}"
echo "CI_SOURCE_BRANCH=${CI_SOURCE_BRANCH}"
echo "CI_BUILD_ID=${CI_BUILD_ID}"
echo "CI_JOB_ID=${CI_JOB_ID}"
echo
}
|
#!/usr/bin/env bats
load test_helper
@test "Verify that kernel module bluetooth is disabled" {
run bash -c "modprobe -n -v bluetooth | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module bnep is disabled" {
run bash -c "modprobe -n -v bnep | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module btusb is disabled" {
run bash -c "modprobe -n -v btusb | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module cpia2 is disabled" {
run bash -c "modprobe -n -v cpia2 | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module firewire-core is disabled" {
run bash -c "modprobe -n -v firewire-core | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module floppy is disabled" {
run bash -c "modprobe -n -v floppy | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module n_hdlc is disabled" {
run bash -c "modprobe -n -v n_hdlc | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module net-pf-31 is disabled" {
run bash -c "modprobe -n -v net-pf-31 | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module pcspkr is disabled" {
run bash -c "modprobe -n -v pcspkr | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module soundcore is disabled" {
run bash -c "modprobe -n -v soundcore | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module thunderbolt is disabled" {
run bash -c "modprobe -n -v thunderbolt | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module usb-midi is disabled" {
run bash -c "modprobe -n -v usb-midi | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module usb-storage is disabled" {
run bash -c "modprobe -n -v usb-storage | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module uvcvideo is disabled" {
run bash -c "modprobe -n -v uvcvideo | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
@test "Verify that kernel module v4l2_common is disabled" {
run bash -c "modprobe -n -v v4l2_common | grep 'install /bin/true'"
[ "$status" -eq 0 ]
}
|
<filename>src/components/Form/index.tsx
import { Form as FormikForm } from "formik";
import Field from "./Field";
import styles from "./styles.module.css";
/**
* Usage:
*
* import { Formik } from "formik";
* import Form from "~/Form";
*
* <Formik initialValues={} onSubmit={} validationSchema={}>
* <Form>
* <Form.Field name="firstName" type="text" label="<NAME>" />
* <Form.FIeld name="email" type="email" label="Email" />
* </Form>
* </Formik>
*/
export const Form: React.FC & {
Field: typeof Field;
} = ({ children }) => (
<div className={styles.form}>
<FormikForm>{children}</FormikForm>
</div>
);
Form.Field = Field;
|
<gh_stars>1-10
const _ = require('lodash');
const Promise = require('bluebird');
const Compiler = require('./compiler');
const ObjectId = require('../models/objectId');
const config = require('../../config/index');
const log = config.log;
const compiler = new Compiler();
const Filters = function (redisClient) {
const self = this;
const redis = redisClient;
const getNamespacedKey = (namespace) => `${namespace}.${Filters.NAME_KEY}`;
const validate = (filterSrc) => {
if (!_.isString(filterSrc)) {
throw new Error('filterSrc must be string');
}
const filter = compiler.compile(filterSrc);
if (!_.isString(filter.type)) {
throw new Error('Filter type string not provided');
} else if (!_.includes(Filters.TYPES, filter.type)) {
throw new Error(`Filter type '${filter.type}' not one of: [${_.join(Filters.TYPES, ',')}]`);
}
if (!_.isFunction(filter.predicate)) {
throw new Error('Filter predicate() not provided');
}
};
/**
* Get all filter ids for a given namespace
*
* @param namespace
* @returns {*|Promise.<TResult>}
*/
self.getIds = (namespace) => redis.hkeys(getNamespacedKey(namespace));
/**
* Add a filter
*
* @param objectId
* @param filterSrc
* @returns {*|Promise.<TResult>}
*/
self.add = (objectId, filterSrc) =>
ObjectId.coerce(objectId).validate()
.then(() => validate(filterSrc))
.then(() => self.exists(objectId))
.then((exists) => {
if (exists) {
throw new Error(`Filter '${objectId.namespace}/${objectId.id}' exists, delete first.`);
}
return redis.hset(getNamespacedKey(objectId.namespace), objectId.id, filterSrc);
});
/**
* Remove a filter by objectId
* @param objectId
* @returns {Promise.<TResult>}
*/
self.remove = (objectId) =>
ObjectId.coerce(objectId).validate()
.then(() => redis.hdel(getNamespacedKey(objectId.namespace), objectId.id));
/**
* Remove all filters by namespace
* @param objectId
* @returns {Promise.<TResult>}
*/
self.removeAllNamespacedBy = (objectId) =>
ObjectId.coerce(objectId).validate()
.then(() => self.getIds(objectId.namespace))
.then((ids) => _.map(ids, (id) => redis.hdel(getNamespacedKey(objectId.namespace), id)));
/**
* Return TRUE if a filter exists in the system based on it's objectId
* @param objectId
* @returns {*|{arity, flags, keyStart, keyStop, step}}
*/
self.exists = (objectId) =>
ObjectId.coerce(objectId).validate()
.then((id) => redis.hexists(getNamespacedKey(id.namespace), id.id));
/**
* Return TRUE if a filter exists in the system based on it's id
* @param taskName
* @param filters
* @returns {*|{arity, flags, keyStart, keyStop, step}}
*/
self.load = (taskName, filters) =>
!_.isObject(filters) || !_.isArray(filters.actions) ?
Promise.resolve({}) :
Promise.mapSeries(filters.actions, (action) => {
const id = new ObjectId({namespace: _.isString(action.namespace) ? action.namespace : taskName, id: action.id});
id.arguments = action.arguments || filters.arguments;
return id;
})
.then((objectIds) => Promise.mapSeries(objectIds, (objectId) =>
objectId.validate()
.then(() => redis.hget(getNamespacedKey(objectId.namespace), objectId.id))
.then((src) => _.assign(compiler.compile(src), objectId))))
.then((modules) => Promise.reduce(modules, (loadedModules, module) => {
if (!_.isArray(loadedModules[module.type])) {
loadedModules[module.type] = [];
}
log.info(`adding filter [${module.namespace}:${module.id}] [type ${module.type}]`);
loadedModules[module.type].push(module);
return loadedModules;
}, {}));
self.ensureFiltersExist = (taskName, filters) => {
if (!filters) {
return Promise.resolve();
}
return !_.isObject(filters) || !_.isArray(filters.actions) ?
Promise.resolve({}) :
Promise.mapSeries(filters.actions, (action) => {
const id = new ObjectId({namespace: _.isString(action.namespace) ? action.namespace : taskName, id: action.id});
id.arguments = action.arguments || filters.arguments;
return self.exists(id).then((exists) => exists ? Promise.resolve() : Promise.reject(new Error(`Src for filter id ${id.id} not found`)));
});
};
};
Filters.NAME_KEY = 'filters';
Filters.TYPES = [
'index',
'type'
];
module.exports = Filters; |
import java.util.HashMap;
public class FileDescriptorManager {
private HashMap<String, FileDescriptor> fileDescriptorSet;
public FileDescriptorManager() {
fileDescriptorSet = new HashMap<>();
}
public void addFileDescriptor(String name, int size, String type) {
FileDescriptor fileDescriptor = new FileDescriptor(name, size, type);
fileDescriptorSet.put(name, fileDescriptor);
}
public void updateFileDescriptor(String name, int size, String type) {
if (fileDescriptorSet.containsKey(name)) {
FileDescriptor fileDescriptor = fileDescriptorSet.get(name);
fileDescriptor.setSize(size);
fileDescriptor.setType(type);
} else {
System.out.println("File descriptor with name " + name + " does not exist.");
}
}
public void deleteFileDescriptor(String name) {
if (fileDescriptorSet.containsKey(name)) {
fileDescriptorSet.remove(name);
} else {
System.out.println("File descriptor with name " + name + " does not exist.");
}
}
public void printFileDescriptors() {
for (FileDescriptor fileDescriptor : fileDescriptorSet.values()) {
System.out.println(fileDescriptor);
}
}
public static void main(String[] args) {
FileDescriptorManager manager = new FileDescriptorManager();
manager.addFileDescriptor("file1", 100, "txt");
manager.addFileDescriptor("file2", 200, "pdf");
manager.printFileDescriptors();
manager.updateFileDescriptor("file1", 150, "doc");
manager.printFileDescriptors();
manager.deleteFileDescriptor("file2");
manager.printFileDescriptors();
}
}
class FileDescriptor {
private String name;
private int size;
private String type;
public FileDescriptor(String name, int size, String type) {
this.name = name;
this.size = size;
this.type = type;
}
public void setSize(int size) {
this.size = size;
}
public void setType(String type) {
this.type = type;
}
@Override
public String toString() {
return "FileDescriptor{" +
"name='" + name + '\'' +
", size=" + size +
", type='" + type + '\'' +
'}';
}
} |
const Express = require('express');
let app = Express();
require('./handler')(app);
app.listen(5200, '0.0.0.0'); |
import React, { Component, PropTypes } from 'react'
import { connect } from 'react-redux'
import { fetchScores } from '../actions'
class ScoresPage extends Component {
constructor(props) {
super(props)
}
componentWillMount() {
this.props.fetchScores()
}
renderItem(score, idx) {
return (
<tr key={idx}>
<td>
<span className="user-name">{ score.user }</span>
</td>
<td>{ score.score }</td>
</tr>
)
}
render() {
const {scores} = this.props;
return (
<div className="scores">
<h2>Highscores</h2>
<table className="table">
<thead>
<tr>
<th>User</th>
<th>Score</th>
</tr>
</thead>
<tbody>
{scores.map(this.renderItem)}
</tbody>
</table>
</div>
)
}
}
function mapStateToProps(state, ownProps) {
return {
scores: state.scores
}
}
export default connect(
mapStateToProps,
{
fetchScores
}
)(ScoresPage)
|
/**
* Copyright 2014 isandlaTech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.psem2m.isolates.ui.admin.panels;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.table.AbstractTableModel;
/**
* @author ogattaz
*
* @param <T>
*/
class CEntityBean<T> {
private T pEntity;
/**
* @param aEntity
*/
public CEntityBean(final T aEntity) {
super();
pEntity = aEntity;
}
/**
* @return
*/
public T getEntity() {
return pEntity;
}
/**
* @param entity
*/
public void setEntity(final T entity) {
pEntity = entity;
}
}
/**
* @author ogattaz
*
*/
public abstract class CTableModel<T> extends AbstractTableModel {
private static final long serialVersionUID = -3434843033520198107L;
private final int pColKeyIdx;
private final List<String[]> pList = new ArrayList<String[]>();
private final Map<String, CEntityBean<T>> pMap = new HashMap<String, CEntityBean<T>>();
private final CJPanelTable<T> pPanel;
private final String[] pTitles;
/**
* @param aNbCol
* @param aColKeyIdx
*/
public CTableModel(final CJPanelTable<T> aPanel, final String[] aTitles,
final int aColKeyIdx) {
super();
pPanel = aPanel;
pTitles = aTitles;
pColKeyIdx = aColKeyIdx;
traceDebug("%15s| ColKeyIdx=[%d], Titles=[%s]", "<init>", pColKeyIdx,
Arrays.asList(pTitles));
}
/**
* @param aEntity
* @return
*/
private int addEntity(final T aEntity) {
final String[] wData = pPanel.buildRowData(aEntity);
if (pPanel.acceptRow(aEntity, wData)) {
pList.add(wData);
final String wKey = getRowKey(wData);
pMap.put(wKey, buildEntityBean(aEntity));
traceDebug("%15s| adds row=[%s]", "addEntity", wKey);
return pList.size() - 1;
}
return -1;
}
/**
* @param aEntity
*/
public synchronized boolean addRow(final T aEntity) {
if (aEntity == null) {
return false;
}
traceDebug("%15s| NbExistingRows=[%d], NbRowsToAdd=[%d]", "addRow",
pList.size(), 1);
int wFirstRow = -1;
int wLastRow = -1;
synchronized (this) {
wFirstRow = pList.size();
wLastRow = addEntity(aEntity);
}
// Notifies all listeners that rows in the range [firstRow, lastRow],
// inclusive, have been inserted.
if (wLastRow > -1) {
traceDebug(
"%15s| fireTableRowsInserted FirstRow=[%d], wLastRow=[%d]",
"addRow", wFirstRow, wLastRow);
fireTableRowsInserted(wFirstRow, wLastRow);
return true;
}
return false;
}
/**
* @param aEntities
*/
public synchronized boolean addRows(final T[] aEntities) {
if (aEntities == null || aEntities.length == 0) {
return false;
}
traceDebug("%15s| NbExistingRows=[%d], NbRowsToAdd=[%d]", "addRows",
pList.size(), aEntities.length);
int wFirstRow = -1;
int wLastRow = -1;
int wNbAdded = 0;
synchronized (this) {
wFirstRow = pList.size();
for (final T wEntity : aEntities) {
final int wNewRowIdx = addEntity(wEntity);
if (wNewRowIdx != -1) {
wLastRow = wNewRowIdx;
wNbAdded++;
}
}
}
// Notifies all listeners that rows in the range [firstRow, lastRow],
// inclusive, have been inserted.
if (wLastRow > -1) {
traceDebug(
"%15s| fireTableRowsInserted FirstRow=[%d], wLastRow=[%d] wNbAdded=[%d]",
"addRows", wFirstRow, wLastRow, wNbAdded);
fireTableRowsInserted(wFirstRow, wLastRow);
return true;
}
return false;
}
/**
* @param aEntity
* @return
*/
CEntityBean<T> buildEntityBean(final T aEntity) {
return new CEntityBean<T>(aEntity);
}
/**
* @param aRowIdx
* @return
*/
String buildTextInfos(final int aRowIdx) {
if (aRowIdx < 0 || aRowIdx > getRowCount() - 1) {
return String.format("OutBound row index ! RowIdx=[%d] max=[%d]",
aRowIdx, getRowCount() - 1);
}
return pPanel.buildTextInfos(getEntity(aRowIdx));
}
/**
*
*/
public synchronized void destroy() {
pMap.clear();
pList.clear();
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.TableModel#getColumnCount()
*/
@Override
public int getColumnCount() {
return pTitles.length;
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.AbstractTableModel#getColumnName(int)
*/
@Override
public String getColumnName(final int aColIdx) {
return pTitles[aColIdx];
}
/**
* @param aRowIdx
* @return
*/
T getEntity(final int aRowIdx) {
return getEntityBean(aRowIdx).getEntity();
}
/**
* @param aRowIdx
* @return
*/
synchronized CEntityBean<T> getEntityBean(final int aRowIdx) {
return pMap.get(getRowKey(pList.get(aRowIdx)));
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.TableModel#getRowCount()
*/
@Override
public synchronized int getRowCount() {
return pList.size();
}
/**
* @param aRowData
* @return
*/
private String getRowKey(final String[] aRowData) {
return aRowData[pColKeyIdx];
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.TableModel#getValueAt(int, int)
*/
@Override
public synchronized Object getValueAt(final int row, final int col) {
// traceDebug("%15s| NbExistingRows=[%d] row=[%d] col=[%d]",
// "getValueAt",
// pList.size(), row, col);
if (row > -1 && row < pList.size()) {
final String[] rowContent = pList.get(row);
if (rowContent != null && col > -1 && col < rowContent.length) {
return rowContent[col];
}
}
return null;
}
/**
* @param aKey
* @return
*/
synchronized int indexOf(final String aKey) {
int wRowIdx = -1;
int wI = 0;
for (final String[] wRowData : pList) {
if (wRowData[pColKeyIdx].equals(aKey)) {
wRowIdx = wI;
break;
}
wI++;
}
return wRowIdx;
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.AbstractTableModel#isCellEditable(int, int)
*/
@Override
public boolean isCellEditable(final int row, final int col) {
return false;
}
/**
*
*/
public synchronized void removeAllRows() {
traceDebug("%15s| NbExistingRows=[%d]", "removeAllRows", pList.size());
if (pList.size() > 0) {
final int wFirstRow = 0;
final int wLastRow = pList.size() - 1;
pMap.clear();
pList.clear();
traceDebug(
"%15s| fireTableRowsDeleted FirstRow=[%d], wLastRow=[%d]",
"removeAllRows", wFirstRow, wLastRow);
fireTableRowsDeleted(wFirstRow, wLastRow);
}
}
/**
* @param row
*/
public synchronized void removeRow(final T aEntity) {
traceDebug("%15s| NbExistingRows=[%d]", "removeRow", pList.size());
final int wRowIdx;
synchronized (this) {
final String wKey = pPanel.buildRowKey(aEntity);
wRowIdx = indexOf(wKey);
if (wRowIdx > -1) {
pMap.remove(wKey);
pList.remove(wRowIdx);
}
}
if (wRowIdx > -1) {
traceDebug(
"%15s| fireTableRowsDeleted FirstRow=[%d], wLastRow=[%d]",
"removeRow", wRowIdx, wRowIdx);
fireTableRowsDeleted(wRowIdx, wRowIdx);
}
}
/**
* @param aEntity
*/
public synchronized void setRow(final T aEntity) {
traceDebug("%15s| NbExistingRows=[%d]", "setRow", pList.size());
int wRowIdx = -1;
synchronized (this) {
final String[] wData = pPanel.buildRowData(aEntity);
final String wKey = getRowKey(wData);
wRowIdx = indexOf(wKey);
if (wRowIdx > -1) {
int wI = 0;
for (final String wCellData : wData) {
pList.get(wRowIdx)[wI] = wCellData;
wI++;
}
pMap.put(wKey, buildEntityBean(aEntity));
traceDebug("%15s| sets row=[%s]", "setRow", wKey);
}
}
if (wRowIdx > -1) {
traceDebug(
"%15s| fireTableRowsUpdated FirstRow=[%d], wLastRow=[%d]",
"setRow", wRowIdx, wRowIdx);
fireTableRowsUpdated(wRowIdx, wRowIdx);
} else {
addRow(aEntity);
}
}
/**
* @param aEntities
*/
public synchronized void setRows(final T[] aEntities) {
traceDebug("%15s| NbExistingRows=[%d]", "setRows", pList.size());
synchronized (this) {
removeAllRows();
addRows(aEntities);
}
}
/*
* (non-Javadoc)
*
* @see javax.swing.table.AbstractTableModel#setValueAt(java.lang.Object,
* int, int)
*/
@Override
public synchronized void setValueAt(final Object value, final int row,
final int col) {
final String[] rowContent = pList.get(row);
if (rowContent != null && col > -1 && col < rowContent.length) {
pList.get(row)[col] = (String) value;
fireTableCellUpdated(row, col);
}
}
/**
* @param aFormat
* @param aArgs
*/
private void traceDebug(final String aFormat, final Object... aArgs) {
// System.out.println(String.format("[%25s] %s", pPanel.getClass()
// .getSimpleName(), String.format(aFormat, aArgs)));
}
}
|
import React, { Component,PropTypes } from 'react';
import styles from './BookSearchItem.css';
// Dumb React Component
export default class BookItem extends Component {
// Expectation to render BookItem method
static propTypes = {
title: PropTypes.string,
description: PropTypes.string,
imageLinks: PropTypes.object,
addBook: PropTypes.func.isRequired,
id: PropTypes.number.isRequired
};
_handleClick(e) {
e.preventDefault();
this.props.addBook({
title: this.props.title,
id: this.props.id
});
}
createShortDescription() {
const {description} = this.props;
return description ? (
<div>
{description.length > 255 ? description.substring(0, 255) + '...' : description}
</div>
) : (
<div></div>
)
}
render() {
const {title, imageLinks} = this.props;
return (
<div>
<div className='book--title'>
<span>{title}</span>
</div>
<section className='book--details'>
<div className='book--cover'>{imageLinks ?
<img src={imageLinks.thumbnail} alt={title} /> :
null}
</div>
<div className='book--description'>
<span>{this.createShortDescription()}</span>
<button className='book--description__button' type='submit' onClick={(e) => this._handleClick(e)} >Add Book</button>
</div>
</section>
</div>
);
}
}
|
<reponame>cxq257990/sweet81<gh_stars>1-10
package com.benmu.framework.extend.module;
import android.support.annotation.Nullable;
import android.util.Log;
import com.alibaba.fastjson.JSON;
import com.amap.api.location.AMapLocation;
import com.amap.api.location.AMapLocationClient;
import com.amap.api.location.AMapLocationClientOption;
import com.amap.api.location.AMapLocationListener;
import com.amap.api.maps2d.AMapUtils;
import com.amap.api.maps2d.model.LatLng;
import com.amap.api.services.core.AMapException;
import com.amap.api.services.core.PoiItem;
import com.amap.api.services.poisearch.PoiResult;
import com.amap.api.services.poisearch.PoiSearch;
import com.amap.api.services.weather.LocalWeatherForecastResult;
import com.amap.api.services.weather.LocalWeatherLive;
import com.amap.api.services.weather.LocalWeatherLiveResult;
import com.amap.api.services.weather.WeatherSearch;
import com.amap.api.services.weather.WeatherSearchQuery;
import com.benmu.framework.manager.ManagerFactory;
import com.benmu.framework.manager.impl.PermissionManager;
import com.benmu.framework.extend.components.map.WXMapPolygonComponent;
import com.taobao.weex.WXEnvironment;
import com.taobao.weex.annotation.JSMethod;
import com.taobao.weex.bridge.JSCallback;
import com.taobao.weex.common.WXModule;
import com.taobao.weex.ui.component.WXComponent;
import org.json.JSONArray;
import org.json.JSONException;
import java.util.HashMap;
import java.util.List;
/**
* Created by budao on 2017/1/24.
*/
public class WXMapModule extends WXModule {
private static final String RESULT = "result";
private static final String DATA = "data";
private static final String RESULT_OK = "success";
private static final String RESULT_FAILED = "failed";
private static String[] permissions = new String[] {
"android.permission.ACCESS_FINE_LOCATION",
"android.permission.ACCESS_LOCATION_EXTRA_COMMANDS"
};
/**
* get line distance between to POI.
*/
@JSMethod
public void getLineDistance(String posA, String posB, @Nullable final JSCallback callback) {
Log.v("getDistance", posA + ", " + posB);
float distance = -1;
try {
JSONArray jsonArray = new JSONArray(posA);
LatLng latLngA = new LatLng(jsonArray.optDouble(1), jsonArray.optDouble(0));
JSONArray jsonArrayB = new JSONArray(posB);
LatLng latLngB = new LatLng(jsonArrayB.optDouble(1), jsonArrayB.optDouble(0));
distance = AMapUtils.calculateLineDistance(latLngA, latLngB);
} catch (JSONException e) {
e.printStackTrace();
}
if (callback != null) {
HashMap map = new HashMap(2);
HashMap data = new HashMap(1);
data.put("distance", distance);
map.put(DATA, data);
map.put(RESULT, distance >= 0 ? RESULT_OK : RESULT_FAILED);
callback.invoke(map);
}
}
@JSMethod
public void polygonContainsMarker(String position, String id, @Nullable final JSCallback callback) {
boolean contains = false;
boolean success = false;
try {
JSONArray jsonArray = new JSONArray(position);
LatLng latLng = new LatLng(jsonArray.optDouble(1), jsonArray.optDouble(0));
WXComponent component = findComponent(id);
if (component != null && component instanceof WXMapPolygonComponent) {
contains = ((WXMapPolygonComponent) component).contains(latLng);
success = true;
}
} catch (JSONException e) {
e.printStackTrace();
}
if (callback != null) {
HashMap map = new HashMap(2);
map.put(DATA, contains);
map.put(RESULT, success ? RESULT_OK : RESULT_FAILED);
callback.invoke(map);
}
}
/**
* get user location.
*/
@JSMethod
public void getUserLocation(@Nullable final JSCallback callback) {
PermissionManager permissionManager = ManagerFactory.getManagerService(PermissionManager.class);
if (!permissionManager.hasPermissions(mWXSDKInstance.getContext(), permissions)) {
permissionManager.requestPermissions(mWXSDKInstance.getContext(),
new PermissionManager.PermissionListener() {
@Override
public void onPermissionsGranted(List<String> perms) {
getLocation(callback);
}
@Override
public void onPermissionsDenied(List<String> perms) {
if (callback != null) callback.invoke(JSON.parse("{\"status\": 119}"));
}
@Override
public void onPermissionRequestRejected() {
}
}, permissions);
}
getLocation(callback);
}
private void getLocation(final JSCallback callback) {
final AMapLocationClient client = new AMapLocationClient(
WXEnvironment.getApplication().getApplicationContext());
final AMapLocationClientOption clientOption = new AMapLocationClientOption();
//设置定位监听
client.setLocationListener(new AMapLocationListener() {
public void onLocationChanged(AMapLocation aMapLocation) {
if (aMapLocation != null && aMapLocation.getErrorCode() == 0) {
if (callback != null) {
HashMap map = new HashMap(2);
map.put(DATA, aMapLocation);
map.put("status", aMapLocation.getLongitude() > 0 && aMapLocation.getLatitude() > 0 ? 0 : 400);
callback.invoke(map);
}
} else {
HashMap map = new HashMap(2);
if (aMapLocation != null) {
map.put("status", aMapLocation.getErrorCode());
map.put("errorMsg", aMapLocation.getErrorInfo());
} else {
map.put("status",404);
map.put("errorMsg"," ");
}
if (callback != null) callback.invoke(map);
}
if (client != null) {
client.stopLocation();
client.onDestroy();
}
}
});
//设置为高精度定位模式
clientOption.setLocationMode(AMapLocationClientOption.AMapLocationMode.Hight_Accuracy);
clientOption.setOnceLocation(true);
clientOption.setHttpTimeOut(2000);
//设置定位参数
client.setLocationOption(clientOption);
// 此方法为每隔固定时间会发起一次定位请求,为了减少电量消耗或网络流量消耗,
// 注意设置合适的定位时间的间隔(最小间隔支持为2000ms),并且在合适时间调用stopLocation()方法来取消定位请求
// 在定位结束后,在合适的生命周期调用onDestroy()方法
// 在单次定位情况下,定位无论成功与否,都无需调用stopLocation()方法移除请求,定位sdk内部会移除
client.startLocation();
}
@JSMethod
public void GetPoiSearch(String keyWord, String city, @Nullable final JSCallback callback) {
PoiSearch.Query query = new PoiSearch.Query(keyWord,"",city);
query.setPageSize(20);
query.setPageNum(0);
query.setCityLimit(true);
final PoiSearch search = new PoiSearch(WXEnvironment.getApplication().getApplicationContext(), query);
search.setOnPoiSearchListener(new PoiSearch.OnPoiSearchListener() {
@Override
public void onPoiSearched(PoiResult result, int code) {
if(code == AMapException.CODE_AMAP_SUCCESS) {
if (result != null && result.getQuery() != null) {
List<PoiItem> poiItems = result.getPois();
HashMap map = new HashMap(2);
map.put("data", poiItems);
map.put("status", 0);
if (callback != null)
callback.invoke(map);
}
} else {
if (callback != null) callback.invoke(JSON.parse("{\"status\": "+code+"}"));
}
}
@Override
public void onPoiItemSearched(PoiItem poiItem, int i) {
}
});
search.searchPOIAsyn();
}
@JSMethod
public void GetForcastsWeather(String cityname, @Nullable final JSCallback callback) {
WeatherSearchQuery query = new WeatherSearchQuery(cityname, WeatherSearchQuery.WEATHER_TYPE_FORECAST);
final WeatherSearch search = new WeatherSearch(WXEnvironment.getApplication().getApplicationContext());
search.setOnWeatherSearchListener(new WeatherSearch.OnWeatherSearchListener() {
@Override
public void onWeatherLiveSearched(LocalWeatherLiveResult localWeatherLiveResult, int i) {
}
@Override
public void onWeatherForecastSearched(LocalWeatherForecastResult result, int code) {
// if (code == AMapException.CODE_AMAP_SUCCESS) {
// if (result != null && result.getForecastResult() != null
// && result.getForecastResult().getWeatherForecast() != null
// && result.getForecastResult().getWeatherForecast().size()>0) {
// LocalWeatherForecast forecast = result.getForecastResult();
// List<LocalDayWeatherForecast> forecastList = forecast.getWeatherForecast();
// if (callback != null)
// callback.invoke(forecastList);
// }
// } else {
// if (callback != null) {
// callback.invoke(JSON.parse("{\"status\": "+code+"}"));
// }
// }
}
});
search.setQuery(query);
search.searchWeatherAsyn();
}
@JSMethod
public void GetLiveWeather(String cityname, @Nullable final JSCallback callback) {
WeatherSearchQuery query = new WeatherSearchQuery(cityname, WeatherSearchQuery.WEATHER_TYPE_LIVE);
final WeatherSearch search = new WeatherSearch(WXEnvironment.getApplication().getApplicationContext());
search.setOnWeatherSearchListener(new WeatherSearch.OnWeatherSearchListener() {
@Override
public void onWeatherLiveSearched(LocalWeatherLiveResult result, int code) {
if (code == AMapException.CODE_AMAP_SUCCESS) {
if (result != null && result.getLiveResult() != null ) {
LocalWeatherLive live = result.getLiveResult();
HashMap map = new HashMap(2);
map.put("data", live);
map.put("status", 0);
if (callback != null)
callback.invoke(map);
}
} else {
if (callback != null) {
callback.invoke(JSON.parse("{\"status\": "+code+"}"));
}
}
}
@Override
public void onWeatherForecastSearched(LocalWeatherForecastResult result, int code) {
}
});
search.setQuery(query);
search.searchWeatherAsyn();
}
}
|
import { withLoad } from '../basic/withLoad';
import { PayloadFunction } from '../types/common';
import {
ActionCreatorsBag,
AsyncActionCreatorWithPayload,
InjectAsyncPayload,
} from '../types/asyncCreator';
export const getAsyncPayloadInjector = <ActionType extends string, Steps extends string>(
baseCreators: ActionCreatorsBag<ActionType, Steps>,
) => {
const injectAsyncPayload: InjectAsyncPayload<ActionType, Steps> = (payloads) => {
const stepsWithPayload = Object.keys(payloads);
const steps = Object.keys(baseCreators);
const makeCreatorWithPayload = (
creators: ActionCreatorsBag<ActionType, Steps>,
step: string,
) => {
if (!steps.includes(step)) throw new Error('Original steps should include step with payload');
if (typeof payloads[step as keyof typeof payloads] !== 'function') {
throw new Error('Payload creator must be a function');
}
const stepPayload = payloads[step as keyof typeof payloads] as PayloadFunction;
const creator = creators[step as keyof ActionCreatorsBag<ActionType, Steps>];
const creatorWithLoad = withLoad(creator);
return { ...creators, [step]: creatorWithLoad.load(stepPayload) };
};
return stepsWithPayload.reduce(
makeCreatorWithPayload,
baseCreators,
) as AsyncActionCreatorWithPayload<ActionType, Steps, typeof payloads>;
};
return injectAsyncPayload;
};
|
import React, { Component } from "react";
import { select } from "d3-selection";
import { scaleLinear, scaleQuantize } from "d3-scale";
import { colorScale } from "./color";
import SourceView from "./SourceView";
import { fisheye } from "./util";
import { annotation, annotationCallout } from "d3-svg-annotation";
import { stripHashes, deferWork, removeMap } from "./util";
const width = 200;
const height = 500;
function getLastFile(id) {
const dirs = id.split("/");
return dirs[dirs.length - 1];
}
function getRectMiddle(yScale, d) {
return (
yScale(d.totalCount) +
(yScale(d.totalCount + d.adjustedCount) - yScale(d.totalCount)) / 2
);
}
let files;
let sourceLabels;
function createAnnotations(files, source) {
return files
.filter((d, i) => {
return i < 6 || d.name === source;
})
.map(d => ({
className: d.name === source ? "selected" : "",
note: {
title: d.name === source ? "Selected Source" : null,
label: getLastFile(d.name)
},
data: d,
type: annotationCallout,
x: 100,
dx: -5,
disable: ["connector"]
}));
}
function drawFile({ outputFile, updateSelectedSource, selectedSource }) {
const svg = select("svg#fileMap");
if (outputFile) {
let totalCount = 0;
files = Object.keys(outputFile[1])
.map(d => {
const file = outputFile[1][d];
return {
name: d,
adjustedCount: file.inBundleCount * file.count,
...file
};
})
.filter(d => d.inBundleCount > 1)
.sort((a, b) => b.adjustedCount - a.adjustedCount)
.map(d => {
d.totalCount = totalCount;
totalCount += d.adjustedCount;
return d;
});
const yScale = fisheye(scaleLinear())
.domain([0, totalCount])
.range([0, height]);
sourceLabels = annotation()
.annotations(createAnnotations(files, selectedSource))
.accessors({ y: d => getRectMiddle(yScale, d) });
svg.select("g.labels").call(sourceLabels);
const hoverAnnotations = annotation().accessors({
y: d => getRectMiddle(yScale, d)
});
svg.select("g.hoverAnnotations").call(hoverAnnotations);
const chunks = svg.select("g.chunks").selectAll("rect").data(files);
chunks
.enter()
.append("rect")
.attr("class", "chunk")
.merge(chunks)
.on("click", d => {
updateSelectedSource(d.name);
deferWork(() => {
sourceLabels.annotations(
createAnnotations(
files,
d.name === selectedSource ? undefined : d.name
)
);
svg.select("g.hoverAnnotations").selectAll("g").remove();
});
})
.on("mouseover", function(hover) {
const existingAnnotation = svg
.selectAll(".annotation")
.data()
.some(d => d.data.name === hover.name);
if (!existingAnnotation) {
hoverAnnotations.annotations([
{
note: {
label: getLastFile(hover.name)
},
data: hover,
type: annotationCallout,
x: 100,
dx: -5,
disable: ["connector"]
}
]);
}
})
.on("mouseout", function() {
hoverAnnotations.annotations([]);
})
.attr("width", 100)
.attr("fill", d => colorScale(d.inBundleCount))
.attr("x", 100)
.attr("y", d => yScale(d.totalCount))
.attr(
"height",
d => yScale(d.totalCount + d.adjustedCount) - yScale(d.totalCount)
);
svg.node().addEventListener("mousemove", function(d) {
const mouseY = d.layerY - svg.node().getBoundingClientRect().top;
yScale.distortion(2.5).focus(mouseY);
updateRects(svg, yScale);
sourceLabels.updatedAccessors();
hoverAnnotations.updatedAccessors();
});
svg.node().addEventListener("mouseout", function(d) {
yScale.distortion(3).focus(0);
updateRects(svg, yScale);
svg.select("g.hoverAnnotations").selectAll("g").remove();
sourceLabels.updatedAccessors();
});
chunks.exit().remove();
const lines = svg.select("g.chunks").selectAll("line").data(files);
lines
.enter()
.append("line")
.attr("stroke", "white")
.merge(lines)
.attr("x1", 0)
.attr("x2", 200)
.attr("y1", d => yScale(d.totalCount))
.attr("y2", d => yScale(d.totalCount));
lines.exit().remove();
highlightSelected(selectedSource);
}
}
function updateRects(svg, yScale) {
svg
.selectAll("rect.chunk")
.attr("y", d => yScale(d.totalCount))
.attr(
"height",
d => yScale(d.totalCount + d.adjustedCount) - yScale(d.totalCount)
);
svg
.selectAll("line")
.attr("y1", d => yScale(d.totalCount))
.attr("y2", d => yScale(d.totalCount));
}
function highlightSelected(selectedSource) {
const svg = select("svg#fileMap");
if (selectedSource) {
svg.selectAll("rect").classed("unselected", false);
svg
.selectAll("rect")
.filter(d => d.name !== selectedSource)
.classed("unselected", true);
} else {
svg.selectAll("rect").classed("unselected", false);
}
sourceLabels.annotations(createAnnotations(files, selectedSource));
svg.select("g.hoverAnnotations").selectAll("g").remove();
}
class BottomPanel extends Component {
componentDidMount() {
drawFile(this.props);
}
componentDidUpdate(prevProps) {
if (prevProps.selectedBundles !== this.props.selectedBundles) {
drawFile(this.props);
} else if (prevProps.selectedSource !== this.props.selectedSource) {
highlightSelected(this.props.selectedSource);
}
}
buildRangeString(bundleGroup) {
const sortedLines = bundleGroup.lines.sort((a, b) => a - b);
const ranges = [];
const ret = [];
for (const line of sortedLines) {
if (ranges.length === 0) {
ranges.push({
start: line,
end: line
});
continue;
}
if (ranges[ranges.length - 1].end + 1 === line) {
ranges[ranges.length - 1].end++;
continue;
}
ranges.push({
start: line,
end: line
});
}
for (const range of ranges) {
if (range.start === range.end) {
ret.push(`${range.start}`);
} else {
ret.push(`${range.start}-${range.end}`);
}
}
return ret.join(",");
}
buildSingleBundleSummary(selectedSource, bundle, attachLineInfo = false) {
const bundles = bundle.bundles;
return (
<div>
<p>
Duplicated Lines for file <b>{selectedSource}</b> appears in{" "}
<b>{bundles.length}</b> bundles:{" "}
</p>
<ul>
{bundles.map(bundle =>
<li key={bundle}>
{removeMap(bundle)}
</li>
)}
</ul>
{attachLineInfo
? <p className="line-info">
<span className="line-info-title">On The following lines:</span>
<p className="raw-lines">
{this.buildRangeString(bundle)}
</p>
</p>
: null}
</div>
);
}
summarizeOverlapInfo(selectedSource, sourceOverlapInfo) {
if (sourceOverlapInfo === undefined) {
return null;
}
const sourceLinesWithOverlaps = {};
for (const bundleGroupKey of Object.keys(sourceOverlapInfo)) {
if (sourceOverlapInfo[bundleGroupKey].bundles.length > 1) {
sourceLinesWithOverlaps[bundleGroupKey] =
sourceOverlapInfo[bundleGroupKey];
}
}
// If we only have a single common bundle overlap list, no need to list line nums.
if (Object.keys(sourceLinesWithOverlaps).length === 1) {
return this.buildSingleBundleSummary(
selectedSource,
Object.values(sourceLinesWithOverlaps)[0]
);
}
let ret = [];
for (const bundleGroupKey of Object.keys(sourceLinesWithOverlaps)) {
const bundle = sourceLinesWithOverlaps[bundleGroupKey];
ret.push(this.buildSingleBundleSummary(selectedSource, bundle, true));
}
return ret;
}
render() {
const {
summarySentence,
selectedSource,
sourceFiles,
perFileStats,
selectedBundles
} = this.props;
let sourceFile, bundleInfo, sourceTitles, sourceDetails;
if (this.sourceContainer != null) {
this.sourceContainer.scrollTop = 0;
}
if (!selectedBundles) {
bundleInfo = (
<div>
<p>
<b>See details by:</b> Clicking on a file in the left nav, or a
bundle in the network graph
</p>
</div>
);
} else {
sourceTitles = (
<div className="col-xs-12 sourceTitles">
<div
style={{
width: 200,
borderRight: "1px solid #ccc"
}}
>
<p>Bundle Breakdown</p>
</div>
<div style={{ paddingLeft: 10 }}>
<p>Selected Source</p>
</div>
</div>
);
if (!selectedSource) {
sourceFile = (
<p style={{ marginLeft: 20 }}>
Click on a source file on the left to look at the shared lines of
code
</p>
);
} else if (selectedSource) {
sourceFile = (
<div
className="source-container"
ref={container => (this.sourceContainer = container)}
style={{
display: selectedSource === null ? "none" : "block"
}}
>
<p className="overlap-info">
{this.summarizeOverlapInfo(
selectedSource,
this.props.sourceFileLinesGroupedByCommonBundle[
this.props.selectedSource
]
)}
</p>
<SourceView
selectedSource={selectedSource}
perFileStats={perFileStats}
sourceFiles={sourceFiles}
/>
</div>
);
}
sourceDetails = (
<div className="col-xs-12">
<div className="source-details">
<svg id="fileMap" width={width} height={height}>
<g className="chunks" />
<g className="labels" />
<g className="hoverAnnotations" />
</svg>
{sourceFile}
</div>
</div>
);
}
return (
<div className="fullWidth">
<div className="col-xs-12 bottomSummary">
{summarySentence}
{bundleInfo}
</div>
{sourceTitles}
{sourceDetails}
</div>
);
}
}
export default BottomPanel;
|
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
cd "$(dirname "$0")"/..
trap -- 'exit 0' SIGINT
default_targets=(
# no atomic load/store (16-bit)
avr-unknown-gnu-atmega328
msp430-none-elf
# no atomic load/store (32-bit)
riscv32i-unknown-none-elf
riscv32im-unknown-none-elf
riscv32imc-unknown-none-elf
# no atomic CAS (32-bit)
thumbv4t-none-eabi
thumbv6m-none-eabi
# no atomic CAS (64-bit)
bpfeb-unknown-none
bpfel-unknown-none
# no-std 32-bit
thumbv7m-none-eabi
# no-std 64-bit
x86_64-unknown-none
# no-std 64-bit has Atomic{I,U}128
aarch64-unknown-none
# aarch64 no Atomic{I,U}128
aarch64-pc-windows-msvc
# aarch64 has Atomic{I,U}128
aarch64-unknown-freebsd
# aarch64 always support lse
aarch64-apple-darwin
# aarch64_be
aarch64_be-unknown-linux-gnu
# riscv32 with atomic
riscv32imac-unknown-none-elf
riscv32imc-esp-espidf
# tier 1 targets
aarch64-unknown-linux-gnu
i686-pc-windows-gnu
i686-pc-windows-msvc
i686-unknown-linux-gnu
x86_64-apple-darwin
x86_64-pc-windows-gnu
x86_64-pc-windows-msvc
x86_64-unknown-linux-gnu
# tier 2 targets we run tests on CI
arm-unknown-linux-gnueabi
armv5te-unknown-linux-gnueabi
armv7-unknown-linux-gnueabi
armv7-unknown-linux-gnueabihf
i586-unknown-linux-gnu
mips-unknown-linux-gnu
mips64-unknown-linux-gnuabi64
mips64el-unknown-linux-gnuabi64
mipsel-unknown-linux-gnu
powerpc-unknown-linux-gnu
powerpc64-unknown-linux-gnu
powerpc64le-unknown-linux-gnu
riscv64gc-unknown-linux-gnu
s390x-unknown-linux-gnu
sparc64-unknown-linux-gnu
thumbv7neon-unknown-linux-gnueabihf
)
pre_args=()
if [[ "${1:-}" == "+"* ]]; then
pre_args+=("$1")
shift
fi
if [[ $# -gt 0 ]]; then
targets=("$@")
else
targets=("${default_targets[@]}")
fi
rustup_target_list=$(rustup ${pre_args[@]+"${pre_args[@]}"} target list)
rustc_target_list=$(rustc ${pre_args[@]+"${pre_args[@]}"} --print target-list)
rustc_version=$(rustc ${pre_args[@]+"${pre_args[@]}"} -Vv | grep 'release: ' | sed 's/release: //')
subcmd=build
if [[ "${rustc_version}" == *"nightly"* ]] || [[ "${rustc_version}" == *"dev"* ]]; then
rustup ${pre_args[@]+"${pre_args[@]}"} component add rust-src &>/dev/null
case "${rustc_version}" in
# -Z check-cfg-features requires 1.61.0-nightly
1.[0-5]* | 1.60.*) ;;
*)
# shellcheck disable=SC2207
build_script_cfg=($(grep -E 'cargo:rustc-cfg=' build.rs | sed -E 's/^.*cargo:rustc-cfg=//' | sed -E 's/".*$//' | LC_ALL=C sort | uniq))
check_cfg="-Z unstable-options --check-cfg=names(docsrs,portable_atomic_unsafe_assume_single_core,$(IFS=',' && echo "${build_script_cfg[*]}"))"
echo "base rustflags='${check_cfg}'"
rustup ${pre_args[@]+"${pre_args[@]}"} component add clippy &>/dev/null
subcmd=clippy
;;
esac
fi
x() {
local cmd="$1"
shift
(
set -x
"${cmd}" "$@"
)
}
build() {
local target="$1"
shift
args=()
if ! grep <<<"${rustc_target_list}" -Eq "^${target}$"; then
echo "target '${target}' not available on ${rustc_version}"
return 0
fi
if [[ "${target}" == "avr-"* ]]; then
# https://github.com/rust-lang/compiler-builtins/issues/400
case "${rustc_version}" in
1.4* | 1.50.* | 1.51.*) ;;
*) return 0 ;;
esac
fi
args+=(${pre_args[@]+"${pre_args[@]}"} hack "${subcmd}")
if grep <<<"${rustup_target_list}" -Eq "^${target}( |$)"; then
x rustup ${pre_args[@]+"${pre_args[@]}"} target add "${target}" &>/dev/null
elif [[ "${rustc_version}" == *"nightly"* ]] || [[ "${rustc_version}" == *"dev"* ]]; then
case "${target}" in
*-none* | avr-* | *-esp-espidf) args+=(-Z build-std="core,alloc") ;;
*) args+=(-Z build-std) ;;
esac
else
echo "target '${target}' requires nightly compiler"
return 0
fi
if [[ -n "${check_cfg:-}" ]]; then
args+=(-Z check-cfg-features)
fi
args+=(--target "${target}")
# x cargo "${args[@]}" --manifest-path tests/no-std/Cargo.toml "$@"
args+=(
--workspace --ignore-private
--no-dev-deps --feature-powerset --depth 3 --optional-deps
)
case "${target}" in
x86_64* | aarch64*) ;;
# outline-atomics feature only affects x86_64 and aarch64.
*) args+=(--exclude-features "outline-atomics") ;;
esac
case "${target}" in
*-none* | avr-* | riscv32imc-esp-espidf)
args+=(--exclude-features "std")
cfgs=$(RUSTC_BOOTSTRAP=1 rustc ${pre_args[@]+"${pre_args[@]}"} --print cfg --target "${target}")
if ! grep <<<"${cfgs}" -q "target_has_atomic="; then
case "${target}" in
bpf* | thumbv4t-*) ;; # TODO
*)
RUSTFLAGS="${RUSTFLAGS:-} ${check_cfg:-} --cfg portable_atomic_unsafe_assume_single_core" \
x cargo "${args[@]}" --target-dir target/assume-single-core "$@"
;;
esac
fi
;;
esac
RUSTFLAGS="${RUSTFLAGS:-} ${check_cfg:-}" \
x cargo "${args[@]}" "$@"
if [[ "${target}" == "x86_64"* ]]; then
RUSTFLAGS="${RUSTFLAGS:-} ${check_cfg:-} -C target-feature=+cmpxchg16b" \
x cargo "${args[@]}" --target-dir target/cmpxchg16b "$@"
fi
if [[ "${target}" == "aarch64"* ]]; then
RUSTFLAGS="${RUSTFLAGS:-} ${check_cfg:-} -C target-feature=+lse" \
x cargo "${args[@]}" --target-dir target/lse "$@"
fi
}
for target in "${targets[@]}"; do
build "${target}"
done
|
var searchData=
[
['hasname_23',['hasName',['../class_device_name_helper.html#a2df6f562cf9b26f4272ef1f638c16dd1',1,'DeviceNameHelper']]],
['hassubscribed_24',['hasSubscribed',['../class_device_name_helper.html#accba8f666f7fc9b5f40963a023da6737',1,'DeviceNameHelper']]]
];
|
<gh_stars>1-10
# -------------------------------------------------------------------------------
# Copyright IBM Corp. 2016
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------
from pyspark.sql import SQLContext
from pyspark.sql import Row
from pyspark.mllib.regression import LabeledPoint
from pyspark.mllib.linalg import Vectors
from pyspark.mllib.evaluation import MulticlassMetrics
from IPython.display import display, HTML
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import sys
if sys.version >= '3':
from functools import reduce
#global variables
#credentials
cloudantHost=None
cloudantUserName=None
cloudantPassword=None
sqlContext=None
weatherUrl=None
attributes=['dewPt','rh','vis','wc',
#'wdir',
'wspd','feels_like','uv_index']
attributesMsg = ['Dew Point', 'Relative Humidity', 'Prevailing Hourly visibility', 'Wind Chill',
#'Wind direction',
'Wind Speed','Feels Like Temperature', 'Hourly Maximum UV Index']
#Function used to customize classification of data
customTrainingHandler=None
#Display Confusion Matrix as an HTML table when computing metrics
displayConfusionTable=False
def loadDataSet(dbName,sqlTable):
if (sqlContext==None):
raise Exception("sqlContext not set")
if (cloudantHost==None):
raise Exception("cloudantHost not set")
if (cloudantUserName==None):
raise Exception("cloudantUserName not set")
if (cloudantPassword==None):
raise Exception("cloudantPassword not set")
cloudantdata = sqlContext.read.format("com.cloudant.spark")\
.option("cloudant.host",cloudantHost)\
.option("cloudant.username",cloudantUserName)\
.option("cloudant.password",<PASSWORD>)\
.option("schemaSampleSize", "-1")\
.load(dbName)
cloudantdata.cache()
print("Successfully cached dataframe")
cloudantdata.registerTempTable(sqlTable)
print("Successfully registered SQL table " + sqlTable);
return cloudantdata
def buildLabeledPoint(s, classification, handler):
features=[]
for attr in attributes:
features.append(getattr(s, attr + '_1'))
for attr in attributes:
features.append(getattr(s, attr + '_2'))
customFeatures=handler.customTrainingFeatures(s)
for v in customFeatures:
features.append(v)
return LabeledPoint(classification,Vectors.dense(features))
#default training handler class
class defaultTrainingHandler:
def getClassLabel(self, value):
if ( int(value)==0 ):
return "Canceled"
elif (int(value)==1 ):
return "On Time"
elif (int(value) == 2 ):
return "Delayed less than 2 hours"
elif (int(value) == 3 ):
return "Delayed between 2 and 4 hours"
elif (int(value) == 4 ):
return "Delayed more than 4 hours"
return value
def numClasses(self):
return 5
def computeClassification(self, s):
return s.classification
def customTrainingFeaturesNames(self ):
return []
def customTrainingFeatures(self, s):
return []
def getTrainingHandler():
global customTrainingHandler
if ( customTrainingHandler == None ):
customTrainingHandler=defaultTrainingHandler()
return customTrainingHandler
def getNumClasses():
return getTrainingHandler().numClasses()
def loadLabeledDataRDD(sqlTable):
select = 'select '
comma=''
for attr in attributes:
select += comma + 'departureWeather.' + attr + ' as ' + attr + '_1'
comma=','
select += ',deltaDeparture'
select += ',classification'
for attr in attributes:
select += comma + 'arrivalWeather.' + attr + ' as ' + attr + '_2'
for attr in getTrainingHandler().customTrainingFeaturesNames():
select += comma + attr
select += ' from ' + sqlTable
df = sqlContext.sql(select)
handler=getTrainingHandler()
datardd = df.map(lambda s: buildLabeledPoint(s, handler.computeClassification(s), handler))
datardd.cache()
return datardd
def runMetrics(labeledDataRDD, *args):
html='<table width=100%><tr><th>Model</th><th>Accuracy</th><th>Precision</th><th>Recall</th></tr>'
confusionHtml = '<p>Confusion Tables for each Model</p>'
for model in args:
label= model.__class__.__name__
predictionAndLabels = model.predict(labeledDataRDD.map(lambda lp: lp.features))
metrics = MulticlassMetrics(\
predictionAndLabels.zip(labeledDataRDD.map(lambda lp: lp.label)).map(lambda t: (float(t[0]),float(t[1])))\
)
html+='<tr><td>{0}</td><td>{1:.2f}%</td><td>{2:.2f}%</td><td>{3:.2f}%</td></tr>'\
.format(label,metrics.weightedFMeasure(beta=1.0)*100, metrics.weightedPrecision*100,metrics.weightedRecall*100 )
if ( displayConfusionTable ):
#get labels from RDD
handler=getTrainingHandler()
classLabels = labeledDataRDD.map(lambda t: t.label).distinct().map(lambda l: handler.getClassLabel(l)).collect()
confusionMatrix = metrics.call("confusionMatrix")
confusionMatrixArray = confusionMatrix.toArray()
#labels = metrics.call("labels")
confusionHtml += "<p>" + label + "<p>"
confusionHtml += "<table>"
confusionHtml+="<tr><td></td>"
for classLabel in classLabels:
confusionHtml+="<td>" + str(classLabel) + "</td>"
confusionHtml+="</tr>"
for i, row in enumerate(confusionMatrixArray):
confusionHtml += "<tr>"
confusionHtml += "<td>" + classLabels[i] + "</td>"
for j, cell in enumerate(row):
confusionHtml+="<td style='text-align:center'>" + ("<b>" if (i==j) else "") + str(cell) + ("</b>" if (i==j) else "") + "</td>"
confusionHtml += "</tr>"
confusionHtml += "</table>"
html+='</table>'
if ( displayConfusionTable ):
html+=confusionHtml
display(HTML(html))
def makeList(l):
return l if isinstance(l, list) else [l]
def scatterPlotForFeatures(df, f1,f2,legend1,legend2):
f1=f1.split(".")
f2=f2.split(".")
handler=getTrainingHandler()
darr=df.map(lambda s: ( handler.computeClassification(s),(\
reduce(lambda x,y: getattr(x,y) if isinstance(x, Row) else getattr(getattr(s,x),y), f1) if len(f1)>1 else getattr(s,f1[0]),\
reduce(lambda x,y: getattr(x,y) if isinstance(x, Row) else getattr(getattr(s,x),y), f2) if len(f2)>1 else getattr(s,f2[0])\
)))\
.reduceByKey(lambda x,y: makeList(x) + makeList(y))\
.collect()
numClasses=getTrainingHandler().numClasses()
citer=iter(cm.rainbow(np.linspace(0, 1, numClasses)))
colors = [next(citer) for i in range(0, numClasses)]
legends= [getTrainingHandler().getClassLabel(i) for i in range(0,numClasses)]
sets=[]
for t in darr:
sets.append((plt.scatter([x[0] for x in t[1]],[x[1] for x in t[1]],
color=colors[t[0]],alpha=0.5),legends[t[0]]))
params = plt.gcf()
plSize = params.get_size_inches()
params.set_size_inches( (plSize[0]*3, plSize[1]*2) )
plt.ylabel(legend2)
plt.xlabel(legend1)
plt.legend([x[0] for x in sets],
[x[1] for x in sets],
scatterpoints=1,
loc='lower left',
ncol=numClasses,
fontsize=12)
plt.show() |
<gh_stars>1-10
/* eslint import/first: 0 */
// @flow
import './utils/dotenv';
import server from './server';
const { PORT }: any = process.env;
server(PORT);
|
#!/bin/sh
data_folder="data_wsm"
if [ ! -d ./data/${data_folder} ] ; then
scp -r xiaohan.zhang@xiaohanzha-wsm:~/Codes/sderl/data ./data/${data_folder}
fi
python -m sderl.run plot /Users/xiaohan.zhang/Downloads/Graphics_Scratch/toplot
#/Users/xiaohan.zhang/Codes/sderl/data/${data_folder}/vpg
|
<reponame>Saitop/TWU_Biblioteca
package com.twu.biblioteca;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.junit.Assert.assertEquals;
/**
* Created by hxlin on 9/20/15.
*/
public class WelcomeInfoTests {
private final ByteArrayOutputStream welcomeInfo = new ByteArrayOutputStream();
private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
@Before
public void setUpStreams() {
System.setOut(new PrintStream(welcomeInfo));
System.setErr(new PrintStream(errContent));
}
@After
public void cleanUpStreams() {
System.setOut(null);
System.setErr(null);
}
@Test
public void out() {
System.out.print("Welcome to Bibiloteca");
assertEquals("Welcome to Bibiloteca", welcomeInfo.toString());
}
@Test
public void err() {
System.err.print("something wrong, please reboot again");
assertEquals("something wrong, please reboot again", errContent.toString());
}
}
|
class OrderRecord {
protected $table = 'ly_order_record';
// Method to retrieve all records from the table
public function getAllRecords() {
// Assuming $db is the database connection object
$query = "SELECT * FROM $this->table";
$result = $db->query($query);
return $result->fetchAll(PDO::FETCH_ASSOC);
}
// Method to insert a new record into the table
public function insertRecord($data) {
// Assuming $db is the database connection object
$columns = implode(', ', array_keys($data));
$values = "'" . implode("', '", array_values($data)) . "'";
$query = "INSERT INTO $this->table ($columns) VALUES ($values)";
$db->exec($query);
}
} |
//
// Copyright (c) 2015-2020 Microsoft Corporation and Contributors.
// SPDX-License-Identifier: Apache-2.0
//
#include "Logger.hpp"
#include "CommonFields.h"
#include "LogSessionData.hpp"
#include "NullObjects.hpp"
#include "utils/Utils.hpp"
#include <algorithm>
#include <array>
using namespace MAT;
namespace MAT_NS_BEGIN
{
class ActiveLoggerCall
{
public:
const Logger& m_logger;
bool m_active;
ActiveLoggerCall(ActiveLoggerCall const& source) : m_logger(source.m_logger)
{
std::lock_guard<std::mutex> lock(m_logger.m_shutdown_mutex);
m_active = m_logger.m_active;
if (m_active)
{
m_logger.m_active_count += 1;
}
}
/// Record current state on construction; increment active
/// count if we are active.
explicit ActiveLoggerCall(const Logger& parent) :
m_logger(parent)
{
std::lock_guard<std::mutex> lock(m_logger.m_shutdown_mutex);
m_active = m_logger.m_active;
if (m_active)
{
m_logger.m_active_count += 1;
}
}
/// If active, decrement active count, wake any listeners when
/// active count reaches zero (usually there are no listeners).
~ActiveLoggerCall()
{
if (m_active)
{
std::lock_guard<std::mutex> lock(m_logger.m_shutdown_mutex);
if (m_logger.m_active_count > 0)
{
m_logger.m_active_count -= 1;
if (m_logger.m_active_count == 0)
{
const_cast<std::condition_variable*>(&m_logger.m_shutdown_condition)->notify_all();
}
}
}
}
bool LoggerIsDead() const noexcept
{
return !m_active;
}
};
static NullLogManager nullManager;
Logger::Logger(
const std::string& tenantToken,
const std::string& source,
const std::string& scope,
ILogManagerInternal& logManager,
ContextFieldsProvider& parentContext,
IRuntimeConfig& runtimeConfig) :
m_tenantToken(tenantToken),
m_source(source),
// TODO: scope parameter can be used to rewire the logger to alternate context.
// Scope must uniquely identify the "shared context" instance id.
m_scope(scope),
m_level(DIAG_LEVEL_DEFAULT),
m_logManager(logManager),
m_context(&parentContext),
m_config(runtimeConfig),
m_baseDecorator(logManager),
m_eventPropertiesDecorator(logManager),
m_semanticContextDecorator(logManager, m_context),
m_semanticApiDecorators(logManager),
m_sessionStartTime(0),
m_allowDotsInType(false),
m_resetSessionOnEnd(false)
{
std::string tenantId = tenantTokenToId(m_tenantToken);
LOG_TRACE("%p: New instance (tenantId=%s)", this, tenantId.c_str());
m_iKey = "o:" + tenantId;
m_allowDotsInType = m_config[CFG_MAP_COMPAT][CFG_BOOL_COMPAT_DOTS];
m_resetSessionOnEnd = m_config[CFG_BOOL_SESSION_RESET_ENABLED];
// Special scope "-" - means opt-out from parent context variables auto-capture.
// It allows to detach the logger from its parent context.
// This is the default mode for C API guests.
if (m_scope == CONTEXT_SCOPE_NONE)
{
SetParentContext(nullptr);
}
}
Logger::~Logger() noexcept
{
LOG_TRACE("%p: Destroyed", this);
}
ISemanticContext* Logger::GetSemanticContext() const
{
return (ISemanticContext*)(&m_context);
}
/******************************************************************************
* Logger::SetContext
*
* Set app/session context fields.
*
* Could be used to overwrite the auto-populated(Part A) context fields
* (ie. m_commonContextFields)
*
******************************************************************************/
void Logger::SetContext(const std::string& name, const EventProperty& prop)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: SetContext( properties.name=\"%s\", properties.value=\"%s\", PII=%u, ...)",
this, name.c_str(), prop.to_string().c_str(), prop.piiKind);
const EventRejectedReason isValidPropertyName = validatePropertyName(name);
if (isValidPropertyName != REJECTED_REASON_OK)
{
LOG_ERROR("Context name is invalid: %s", name.c_str());
DebugEvent evt;
evt.type = DebugEventType::EVT_REJECTED;
evt.param1 = isValidPropertyName;
DispatchEvent(evt);
return;
}
// Always overwrite the stored value.
// Empty string is allowed to remove the previously set value.
// If the value is empty, the context will not be added to event.
m_context.SetCustomField(name, prop);
}
void Logger::SetContext(const std::string& k, const char v[], PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, const std::string& v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, double v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, int64_t v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, time_ticks_t v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, GUID_t v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
void Logger::SetContext(const std::string& k, bool v, PiiKind pii)
{
SetContext(k, EventProperty(v, pii));
};
// The goal of this method is to rewire the logger instance to any other ISemanticContext issued by SDK.
// SDK may provide a future option for a guest logger to opt-in into its own semantic context. The method will then
// rewire from the default parent (Host LogManager context) to guest's sandbox context, i.e. enabling scenario where
// several guests are attached to one host, but each guest has their own 'local' LogManager semantic context sandbox.
// ...
// LogManager<T>::SetContext(...); // issued by guests would also allow to set context variable on guest's sandbox.
//
// C API does not expose shared context to the callers. Default option for C API 'guest' customers is to detach them
// from the parent logger via ILogger::SetParentContext(nullptr)
//
void Logger::SetParentContext(ISemanticContext* context)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
if (context == nullptr)
{
// Since common props would typically be populated by the root-level
// LogManager instance and we are detaching from that one, we need
// to populate this context with common props directly.
PAL::registerSemanticContext(&m_context);
}
m_context.SetParentContext(static_cast<ContextFieldsProvider*>(context));
}
/// <summary>
/// Logs the application lifecycle.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="properties">The properties.</param>
void Logger::LogAppLifecycle(AppLifecycleState state, EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogAppLifecycle(state=%u, properties.name=\"%s\", ...)",
this, state, properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateAppLifecycleMessage(record, state);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"AppLifecycle", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_LIFECYCLE, size_t(latency), size_t(0), static_cast<void*>(&record), sizeof(record)));
}
/// <summary>
/// Logs the custom event with the specified name.
/// </summary>
/// <param name="name">A string that contains the name of the custom event.</param>
void Logger::LogEvent(std::string const& name)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
EventProperties event(name);
LogEvent(event);
}
/// <summary>
/// Logs the event.
/// </summary>
/// <param name="properties">The properties.</param>
void Logger::LogEvent(EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
// SendAsJSON(properties, m_tenantToken);
LOG_TRACE("%p: LogEvent(properties.name=\"%s\", ...)",
this, properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
if (properties.GetLatency() > EventLatency_Unspecified)
{
latency = properties.GetLatency();
}
::CsProtocol::Record record;
if (!applyCommonDecorators(record, properties, latency))
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"custom",
tenantTokenToId(m_tenantToken).c_str(),
properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_EVENT, size_t(latency), size_t(0), static_cast<void*>(&record), sizeof(record)));
}
/// <summary>
/// Logs a failure event - such as an application exception.
/// </summary>
/// <param name="signature">A string that contains the signature that identifies the bucket of the failure.</param>
/// <param name="detail">A string that contains a description of the failure.</param>
/// <param name="category">A string that contains the category of the failure - such as an application error,
/// application not responding, or application crash</param>
/// <param name="id">A string that contains the identifier that uniquely identifies this failure.</param>
/// <param name="properties">Properties of this failure event, specified using an EventProperties object.</param>
void Logger::LogFailure(
std::string const& signature,
std::string const& detail,
std::string const& category,
std::string const& id,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogFailure(signature=\"%s\", properties.name=\"%s\", ...)",
this, signature.c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateFailureMessage(record, signature, detail, category, id);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"Failure",
tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_FAILURE, size_t(latency), size_t(0), static_cast<void*>(&record), sizeof(record)));
}
void Logger::LogFailure(
std::string const& signature,
std::string const& detail,
EventProperties const& properties)
{
// this inner call to LogFailure will instantiate ActiveCall
LogFailure(signature, detail, "", "", properties);
}
void Logger::LogPageView(
std::string const& id,
std::string const& pageName,
std::string const& category,
std::string const& uri,
std::string const& referrer,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogPageView(id=\"%s\", properties.name=\"%s\", ...)",
this, id.c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decoratePageViewMessage(record, id, pageName, category, uri, referrer);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"PageView", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_PAGEVIEW, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
void Logger::LogPageView(
std::string const& id,
std::string const& pageName,
EventProperties const& properties)
{
LogPageView(id, pageName, "", "", "", properties);
}
void Logger::LogPageAction(
std::string const& pageViewId,
ActionType actionType,
EventProperties const& properties)
{
PageActionData pageActionData(pageViewId, actionType);
LogPageAction(pageActionData, properties);
}
void Logger::LogPageAction(
PageActionData const& pageActionData,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogPageAction(pageActionData.actionType=%u, properties.name=\"%s\", ...)",
this, pageActionData.actionType, properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decoratePageActionMessage(record, pageActionData);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"PageAction", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_PAGEACTION, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
/// <summary>
/// Applies the common decorators.
/// </summary>
/// <param name="record">The record.</param>
/// <param name="properties">The properties.</param>
/// <param name="latency">The latency.</param>
/// <returns></returns>
bool Logger::applyCommonDecorators(::CsProtocol::Record& record, EventProperties const& properties, EventLatency& latency)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return false;
}
record.name = properties.GetName();
record.baseType = EVENTRECORD_TYPE_CUSTOM_EVENT;
std::string evtType = properties.GetType();
if (!evtType.empty())
{
record.baseType.append(".");
if (!m_allowDotsInType)
{
std::replace(evtType.begin(), evtType.end(), '.', '_');
}
record.baseType.append(evtType);
}
if (record.name.empty())
{
record.name = "NotSpecified";
}
record.iKey = m_iKey;
return m_baseDecorator.decorate(record) && m_semanticContextDecorator.decorate(record) && m_eventPropertiesDecorator.decorate(record, latency, properties);
}
void Logger::submit(::CsProtocol::Record& record, const EventProperties& props)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
const auto policyBitFlags = props.GetPolicyBitFlags();
const auto persistence = props.GetPersistence();
const auto latency = props.GetLatency();
auto levelFilter = m_logManager.GetLevelFilter();
if (levelFilter.IsLevelFilterEnabled())
{
const auto& m_props = props.GetProperties();
const auto it = m_props.find(COMMONFIELDS_EVENT_LEVEL);
//
// Level policy:
// * get level from the COMMONFIELDS_EVENT_LEVEL property if set
// * if not set, then get level from the ILogger instance
// * if not set, then get level from the LogManager instance
// * if still not set (no default assigned at LogManager scope),
// then prefer to drop. This is user error: user set the range
// restrition, but didn't specify the defaults.
//
uint8_t level = (it != m_props.cend()) ? static_cast<uint8_t>(it->second.as_int64) : m_level;
if (level == DIAG_LEVEL_DEFAULT)
{
level = levelFilter.GetDefaultLevel();
if (level == DIAG_LEVEL_DEFAULT)
{
// If no default level, but restrictions are in effect, then prefer to drop event
LOG_INFO("Event %s/%s dropped: no diagnostic level assigned!",
tenantTokenToId(m_tenantToken).c_str(), record.baseType.c_str());
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
}
if (!levelFilter.IsLevelEnabled(level))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
}
if (latency == EventLatency_Off)
{
DispatchEvent(DebugEventType::EVT_DROPPED);
LOG_INFO("Event %s/%s dropped: calculated latency 0 (Off)",
tenantTokenToId(m_tenantToken).c_str(), record.baseType.c_str());
return;
}
// TODO: [MG] - check if optimization is possible in generateUuidString
IncomingEventContext event(PAL::generateUuidString(), m_tenantToken, latency, persistence, &record);
event.policyBitFlags = policyBitFlags;
m_logManager.sendEvent(&event);
}
void Logger::onSubmitted()
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_INFO("This method is executed from worker thread");
}
void Logger::LogSampledMetric(
std::string const& name,
double value,
std::string const& units,
std::string const& instanceName,
std::string const& objectClass,
std::string const& objectId,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogSampledMetric(name=\"%s\", properties.name=\"%s\", ...)",
this, name.c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateSampledMetricMessage(record, name, value, units, instanceName, objectClass, objectId);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"SampledMetric", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_SAMPLEMETR, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
void Logger::LogSampledMetric(
std::string const& name,
double value,
std::string const& units,
EventProperties const& properties)
{
LogSampledMetric(name, value, units, "", "", "", properties);
}
void Logger::LogAggregatedMetric(
std::string const& name,
long duration,
long count,
EventProperties const& properties)
{
AggregatedMetricData metricData(name, duration, count);
LogAggregatedMetric(metricData, properties);
}
void Logger::LogAggregatedMetric(
AggregatedMetricData const& metricData,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogAggregatedMetric(name=\"%s\", properties.name=\"%s\", ...)",
this, metricData.name.c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
const bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateAggregatedMetricMessage(record, metricData);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"AggregatedMetric", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_AGGRMETR, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
void Logger::LogTrace(
TraceLevel level,
std::string const& message,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogTrace(level=%u, properties.name=\"%s\", ...)",
this, level, properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateTraceMessage(record, level, message);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"Trace", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_TRACE, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
void Logger::LogUserState(
UserState state,
long timeToLiveInMillis,
EventProperties const& properties)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
LOG_TRACE("%p: LogUserState(state=%u, properties.name=\"%s\", ...)",
this, state, properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
if (!CanEventPropertiesBeSent(properties))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
EventLatency latency = EventLatency_Normal;
::CsProtocol::Record record;
bool decorated =
applyCommonDecorators(record, properties, latency) &&
m_semanticApiDecorators.decorateUserStateMessage(record, state, timeToLiveInMillis);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"UserState", tenantTokenToId(m_tenantToken).c_str(), properties.GetName().empty() ? "<unnamed>" : properties.GetName().c_str());
return;
}
submit(record, properties);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_USERSTATE, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
/******************************************************************************
* Logger::LogSession
*
* Log a user's Session.
*
******************************************************************************/
void Logger::LogSession(SessionState state, const EventProperties& props)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return;
}
if (!CanEventPropertiesBeSent(props))
{
DispatchEvent(DebugEventType::EVT_FILTERED);
return;
}
auto logSessionData = m_logManager.GetLogSessionData();
std::string sessionSDKUid;
unsigned long long sessionFirstTime = 0;
if (logSessionData!=nullptr)
{
sessionSDKUid = logSessionData->getSessionSDKUid();
sessionFirstTime = logSessionData->getSessionFirstTime();
}
if (sessionSDKUid == "" || sessionFirstTime == 0)
{
LOG_WARN("We don't have a first time so no session logged");
return;
}
EventRejectedReason isValidEventName = validateEventName(props.GetName());
if (isValidEventName != REJECTED_REASON_OK)
{
LOG_ERROR("Invalid event properties!");
DebugEvent evt;
evt.type = DebugEventType::EVT_REJECTED;
evt.param1 = isValidEventName;
DispatchEvent(evt);
return;
}
int64_t sessionDuration = 0;
switch (state)
{
case SessionState::Session_Started:
{
if (m_sessionStartTime > 0)
{
LOG_ERROR("LogSession The order is not the correct one in calling LogSession");
return;
}
m_sessionStartTime = PAL::getUtcSystemTime();
m_sessionId = PAL::generateUuidString();
break;
}
case SessionState::Session_Ended:
{
if (m_sessionStartTime == 0)
{
LOG_WARN("LogSession We don't have session start time");
return;
}
sessionDuration = PAL::getUtcSystemTime() - m_sessionStartTime;
if (m_resetSessionOnEnd)
{
// reset the time of the session to 0 and get a new sessionId
m_sessionStartTime = 0;
if (logSessionData!=nullptr)
{
m_logManager.ResetLogSessionData();
LOG_TRACE("Resetting session data on session end");
}
}
break;
}
}
EventLatency latency = EventLatency_RealTime;
::CsProtocol::Record record;
bool decorated = applyCommonDecorators(record, props, latency) &&
m_semanticApiDecorators.decorateSessionMessage(record, state, m_sessionId, PAL::formatUtcTimestampMsAsISO8601(sessionFirstTime), sessionSDKUid, sessionDuration);
if (!decorated)
{
LOG_ERROR("Failed to log %s event %s/%s: invalid arguments provided",
"Trace", tenantTokenToId(m_tenantToken).c_str(), props.GetName().empty() ? "<unnamed>" : props.GetName().c_str());
return;
}
submit(record, props);
DispatchEvent(DebugEvent(DebugEventType::EVT_LOG_SESSION, size_t(latency), size_t(0), (void*)(&record), sizeof(record)));
}
IEventFilterCollection& Logger::GetEventFilters() noexcept
{
return m_filters;
}
const IEventFilterCollection& Logger::GetEventFilters() const noexcept
{
return m_filters;
}
ILogManager& Logger::GetParent()
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return nullManager;
}
return m_logManager;
}
LogSessionData* Logger::GetLogSessionData()
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return nullManager.GetLogSessionData();
}
return m_logManager.GetLogSessionData();
}
IAuthTokensController* Logger::GetAuthTokensController()
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return nullManager.GetAuthTokensController();
}
return m_logManager.GetAuthTokensController();
}
bool Logger::DispatchEvent(DebugEvent evt)
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return nullManager.DispatchEvent(std::move(evt));
}
return m_logManager.DispatchEvent(std::move(evt));
}
std::string Logger::GetSource()
{
return m_source;
}
void Logger::SetLevel(uint8_t level)
{
m_level = level;
}
bool Logger::CanEventPropertiesBeSent(EventProperties const& properties) const noexcept
{
ActiveLoggerCall active(*this);
if (active.LoggerIsDead())
{
return false;
}
return m_filters.CanEventPropertiesBeSent(properties) && m_logManager.GetEventFilters().CanEventPropertiesBeSent(properties);
}
void Logger::RecordShutdown()
{
std::unique_lock<std::mutex> shutdownLock(m_shutdown_mutex);
m_active = false;
if (m_active_count > 0)
{
// wait for idle before continuing
// as with all condition variables, we hold the lock,
// so no thread can decrement m_active_count until
// wait() releases (and later reacquires) the lock.
m_shutdown_condition.wait(shutdownLock, [this]() {
return m_active_count == 0; // we hold the lock for this call
});
}
}
}
MAT_NS_END
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from qframer.qt import QtCore
from . import logo_ico
# 工具条toolbar停靠位置
dockAreas = {
'left': QtCore.Qt.LeftToolBarArea,
'right': QtCore.Qt.RightToolBarArea,
'top': QtCore.Qt.TopToolBarArea,
'bottom': QtCore.Qt.BottomToolBarArea
}
mainwindowsettings = {
'title': u'',
'postion': (300, 300),
'minsize': (800, 600),
'size': (800, 600),
'windowicon': logo_ico,
'fullscreenflag': False,
'centralwindow': {
'pagetags': [['Markdown', 'QChrome', 'About']],
'pagetags_zh': {
'Markdown': u'Markdown编辑(Edit)',
'QChrome': u'Markdown预览(Preview)',
'About': u'关于(About)',
}
},
'menusettings': {
'visual': False,
'menus': [
{
'name': u'File',
'name_zh': u'',
'actions': [
{
'name': u'Exit',
'name_zh': u'退出',
'icon': u'',
'shortcut': u'Ctrl+K',
'trigger': 'Exit',
},
]
},
{
'name': u'Preference',
'name_zh': u'',
'actions': [
{
'name': u'Settings',
'name_zh': u'界面设定',
'icon': u'',
'shortcut': u'',
'trigger': 'Settings',
},
{
'name': u'Key Bindings',
'name_zh': u'快捷键',
'icon': u'',
'shortcut': u'',
'trigger': 'KeyBindings',
},
]
},
{
'name': u'Help',
'name_zh': u'',
'actions': [
{
'name': u'Documentation',
'name_zh': u'文档',
'icon': u'',
'shortcut': u'',
'trigger': 'Documentation',
},
{
'name': u'Activity Logging',
'name_zh': u'活动日志',
'icon': u'',
'shortcut': u'',
'trigger': 'ActivityLogging',
},
{
'name': u'Enter License',
'name_zh': u'认证',
'icon': u'',
'shortcut': u'',
'trigger': 'EnterLicense',
},
{
'name': u'About',
'name_zh': u'关于',
'icon': u'',
'shortcut': u'',
'trigger': 'About',
},
]
}
]
},
'toolbarsettings': {
'visual': False,
'dockArea': dockAreas['left'],
'movable': False,
'toolbars': [
{
'id': u'ImportButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'Ctrl+K',
'trigger': u'',
'tooltip': u'Import Markdown File'
},
{
'id': u'ExportButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'Ctrl+M',
'trigger': u'',
'tooltip': u'Export Markdown File'
},
{
'id': u'PreviewButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'Ctrl+P',
'trigger': u'',
'tooltip': u'Preview Markdown File in fullscreen'
},
{
'id': u'ShareButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'Ctrl+L',
'trigger': u'',
'tooltip': u'Share Markdown File in Interent'
},
{
'id': u'MinButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'',
'trigger': u'',
'tooltip': u'Min Window'
},
{
'id': u'CloseButton',
'icon': u'',
'name': u'',
'name_zh': u'',
'shortcut': u'',
'trigger': u'',
'tooltip': u'Close Window'
},
]
},
'statusbarsettings': {
'initmessage': u'Ready',
'minimumHeight': 30,
'visual': True
},
'navigationvisual': True
}
|
from selenium.webdriver import Chrome
from user_registration_page import UserRegistrationPage
# Create a web driver instance
web_driver = Chrome()
# Instantiate the UserRegistrationPage class
registration_page = UserRegistrationPage()
# Call the method to clean the database on the registration page
registration_page.method_registration_page_clean_database(web_driver)
# Close the web driver
web_driver.quit() |
az ml datastore upload -w $(azureml.workspaceName) -g $(azureml.resourceGroup) -n $(az ml datastore show-default -w $(azureml.workspaceName) -g $(azureml.resourceGroup) --query name -o tsv) -p data -u diabetes --overwrite true
|
const CustomError = require("../extensions/custom-error");
module.exports = function timeFornatter(sec) {
if(sec < 3600 && sec > 0) {
let min = Math.trunc(sec / 60);
sec = sec % 60;
min > 0 ? min = min + ' min' : min = '';
sec > 0 ? sec = sec + ' sec' : sec = '';
return `${min} ${sec}`.trim();
} else {
throw new Error('wrong value')
}
};
|
/*
* Copyright (c) 2017 Sprint
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string>
#include <iostream>
#include <sstream>
#include "st_impl.h"
#include "tssf.h"
namespace st {
// Member functions that customize the individual application
Application::Application( TSSF &tssf )
: ApplicationBase()
, m_cmd_tsr( *this )
, m_cmd_setr( *this )
, m_tssf( tssf )
{
registerHandlers();
}
Application::~Application()
{
}
void Application::registerHandlers()
{
// Remove the comments for any Command this application should
// listen for (receive).
std::cout << "Registering st command handlers" << std::endl;
std::cout << "Registering TSR command handler" << std::endl;
registerHandler( m_cmd_tsr );
std::cout << "Registering SETR command handler" << std::endl;
registerHandler( m_cmd_setr );
}
// TSR Request (req) Command member functions
// A handler for Answers corresponding to this specific Request
void TSRreq::processAnswer( FDMessageAnswer &ans )
{
}
// TSR Command (cmd) member function
// Function invoked when a TSR Command is received
int TSRcmd::process( FDMessageRequest *req )
{
req->dump();
TsRequestExtractor tsr( *req, getDict() );
tsr.session_id.get( getApplication().getSessionId() );
FDMessageAnswer tsa( req );
FDAvp vsai( getDict().avpVendorSpecificApplicationId() );
vsai.add( getDict().avpVendorId(), getDict().vnd3GPP().getId() );
vsai.add( getDict().avpAuthApplicationId(), getDict().app().getId() );
tsa.add( vsai );
tsa.addOrigin();
tsa.add( getDict().avpResultCode(), 2001 );
tsa.dump();
tsa.send();
return 0;
}
// SETR Request (req) Command member functions
// A handler for Answers corresponding to this specific Request
void SETRreq::processAnswer( FDMessageAnswer &ans )
{
}
// SETR Command (cmd) member function
// Function invoked when a SETR Command is received
int SETRcmd::process( FDMessageRequest *req )
{
req->dump();
SessionTerminationRequestExtractor str( *req, getDict() );
FDMessageAnswer sta( req );
sta.addOrigin();
sta.add( getDict().avpResultCode(), 2001 );
sta.dump();
sta.send();
return 0;
}
// TNR Request (req) Command member functions
// Sends a TNR Request to the corresponding Peer
bool Application::sendTNRreq(FDPeer &peer)
{
//TODO - This code may be modified based on specific
// processing needs to send the TNR Command
TNRreq *s = createTNRreq( peer );
try
{
if ( s )
{
s->send();
}
}
catch ( FDException &ex )
{
std::cout << SUtility::currentTime() << " - EXCEPTION - " << ex.what() << std::endl;
delete s;
s = NULL;
}
// DO NOT free the newly created TNRreq object!!
// It will be deleted by the framework after the
// answer is received and processed.
return s != NULL;
}
// A factory for TNR reuqests
TNRreq *Application::createTNRreq(FDPeer &peer)
{
// creates the TNRreq object
TNRreq *s = new TNRreq( *this );
//TODO - Code must be added to correctly
// populate the TNR request object
// return the newly created request object
return s;
}
// A handler for Answers corresponding to this specific Request
void TNRreq::processAnswer( FDMessageAnswer &ans )
{
// TODO - This code must be implemented IF the application
// receives Answers for this command, i.e. it sends the
// TNR Command
}
// TNR Command (cmd) member function
// Function invoked when a TNR Command is received
int TNRcmd::process( FDMessageRequest *req )
{
return -1;
}
}
|
<gh_stars>1-10
package tbc.bookworm;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.widget.TextView;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.lang.String;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import java.io.StringReader;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
public class ISBNLookupActivity extends AppCompatActivity {
private TextView statusMessage;
private TextView title;
private String barcode;
private String searchQuery;
private String API_KEY = BuildConfig.API_KEY;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.search);
statusMessage = (TextView) findViewById(R.id.status_message);
title = (TextView) findViewById(R.id.title);
Intent intent = getIntent();
barcode = intent.getStringExtra("barcode");
Log.d("error", barcode);
new findBookTitle().execute();
}
private class findBookTitle extends AsyncTask<String, String, String> {
protected String doInBackground(String... params) {
String id = "";
String query = "https://www.goodreads.com/search/index.xml?key=" + API_KEY + "&q=" + barcode;
try {
URL url = new URL(query);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("GET");
con.connect();
int response = con.getResponseCode();
InputStream in = con.getInputStream();
StringBuilder sb = new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String read;
while ((read = br.readLine()) != null) {
sb.append(read);
}
br.close();
String xml = sb.toString();
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
InputSource src = new InputSource();
src.setCharacterStream(new StringReader(xml));
Document doc = builder.parse(src);
String authorDetails = doc.getElementsByTagName("author").item(0).getTextContent();
id = authorDetails.replaceAll("[^0-9]", "");
} catch (IOException | ParserConfigurationException | SAXException e) {
//do something
}
return id;
}
protected void onPostExecute(String result) {
searchQuery = result;
Intent intent = new Intent(ISBNLookupActivity.this, AuthorLookupActivity.class);
intent.putExtra("id", searchQuery);
startActivity(intent);
}
}
}
|
#!/bin/bash
rm -rf /var/log/npu/slog/host-0/*
currentDir=$(cd "$(dirname "$0")"; pwd)
#source ${currentDir}/env.sh
# user env
export JOB_ID=9999001
export RANK_TABLE_FILE=${currentDir}/8p.json
export RANK_SIZE=8
export RANK_ID=npu8p
export SLOG_PRINT_TO_STDOUT=0
device_group="0 1 2 3 4 5 6 7"
for device_phy_id in ${device_group}
do
echo "[`date +%Y%m%d-%H:%M:%S`] [INFO] start: train.sh ${device_phy_id} & " >> main.log
${currentDir}/train_wideanddeep_criteo_8p.sh ${device_phy_id} &
done
wait
echo "[`date +%Y%m%d-%H:%M:%S`] [INFO] all train.sh exit " >> main.log
|
package com.huatuo.base;
import org.json.JSONObject;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.support.v4.app.FragmentActivity;
import android.text.TextUtils;
import android.widget.Toast;
import com.huatuo.R;
import com.huatuo.custom_widget.CustomProgressDialog;
import com.huatuo.dictionary.MsgId;
import com.huatuo.util.CommonUtil;
import com.huatuo.util.DialogUtils;
import com.umeng.analytics.MobclickAgent;
public abstract class BaseFragmentActivity extends FragmentActivity {
protected Context mContext;
/** 是否正在加载 */
protected boolean isLoading;
protected CustomProgressDialog customDialog;
protected Handler mHandler;
protected JSONObject inJson;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
mContext = this;
mHandler = new MyHandler();
inJson = new JSONObject();
super.onCreate(savedInstanceState);
initWindowPixel();
}
/*
* client 接入方 ANDROID sign 签名 deviceId 设备号 version 客户端版本
*/
private void initWindowPixel() {
//客户端版本
if(TextUtils.isEmpty(CommonUtil.SCREENPIXEL)){
CommonUtil.initScreen(this);
CommonUtil.SCREENPIXEL = CommonUtil.WIDTH_SCREEN+"x"+ CommonUtil.HEIGHT_SCREEN;
}
}
@Override
public void onResume() {
super.onResume();
MobclickAgent.onResume(this);
}
@Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
}
// 离开Fragment时一定要关闭对话框窗口
@Override
public void onPause() {
super.onPause();
MobclickAgent.onPause(this);
if (customDialog != null) {
try {
customDialog.cancel();
customDialog = null;
} catch (Exception e) {
}
}
}
// 显示自定义加载对话框
public CustomProgressDialog showCustomCircleProgressDialog(String title, String msg) {
if (customDialog != null) {
try {
customDialog.cancel();
customDialog = null;
} catch (Exception e) {
}
}
customDialog = CustomProgressDialog.createDialog(mContext);
// dialog.setIndeterminate(false);
// dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
customDialog.setTitle(title);
customDialog.setMessage(msg);
try {
customDialog.show();
} catch (Exception e) {
}
return customDialog;
}
// 显示自定义加载对话框
public CustomProgressDialog showCustomCircleProgressDialog(String title, String msg, boolean isCancelable) {
if (customDialog != null) {
try {
customDialog.cancel();
customDialog = null;
} catch (Exception e) {
}
}
customDialog = CustomProgressDialog.createDialog(mContext);
// dialog.setIndeterminate(false);
// dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
customDialog.setCancelable(isCancelable);// 是否可用用"返回键"取消
customDialog.setTitle(title);
customDialog.setMessage(msg);
try {
customDialog.show();
} catch (Exception e) {
}
return customDialog;
}
// 关闭自定义加载对话框
public void closeCustomCircleProgressDialog() {
if (customDialog != null) {
try {
customDialog.cancel();
customDialog = null;
} catch (Exception e) {
e.printStackTrace();
}
}
}
public void setCustomDialog(String msg, boolean cancelable) {
customDialog.setImageView();
customDialog.setMessage(msg);
customDialog.setCancelable(true);// 可以用“返回键”取消
}
public abstract void netCall(JSONObject inJson);
public abstract void netCallBack(Message msg);
class MyHandler extends Handler {
@Override
public void handleMessage(Message msg) {
isLoading = false;
switch (msg.what) {
case MsgId.DOWN_DATA_S:
closeCustomCircleProgressDialog();
netCallBack(msg);
break;
case MsgId.DOWN_DATA_F:
closeCustomCircleProgressDialog();
DialogUtils.showToastMsg(mContext, getString(R.string.common_toast_net_down_data_fail), Toast.LENGTH_SHORT);
break;
case MsgId.NET_NOT_CONNECT:
setCustomDialog(getString(R.string.common_toast_net_not_connect), true);
break;
default:
break;
}
}
}
} |
methods=(aucell wmean wsum ulm mlm viper gsva ora gsea udt mdt);
path_raw=data/raw
time=0
max_time=300
echo "lang" "mat" "method" "time" "memr";
For Python
for method in ${methods[*]}; do
for i_mat in $(seq45); do
if [ "$time" -lt "$max_time" ]; then
test="$(/usr/bin/time --verbose python R/process/run_single_method.py \
$path_raw/${i_mat}_mat.csv \
$path_raw/net.csv \
$method 2>&1 | \
awk '{if(NR==5){print $8};if(NR==10){print $6}}')";
time="$(echo $test | awk '{print $1}')";
memr="$(echo $test | awk '{print $2}')";
has_h="$(echo $time | grep '\(.*:\)\{2\}')"
if [[ $has_h ]]; then
hours="$(echo $time | awk -F: '{print $1 * 3600}')"
minutes="$(echo $time | awk -F: '{print $2 * 60}')"
seconds="$(echo $time | awk -F: '{print $3}')"
time="$(echo $hours $minutes $seconds | awk '{print $1+$2+$3}')"
else
minutes="$(echo $time | awk -F: '{print $1 * 60}')"
seconds="$(echo $time | awk -F: '{print int( $2 )}')"
time="$(echo $minutes $seconds | awk '{print $1+$2}')"
fi
echo "Python" $i_mat $method $time $memr;
else
echo "Python" $i_mat $method "NA" "NA";
fi
done
time=0;
done
# For R
for method in ${methods[*]}; do
for i_mat in $(seq 4); do
if [ "$time" -lt "$max_time" ]; then
test="$(/usr/bin/time --verbose Rscript R/process/run_single_method.R \
$path_raw/${i_mat}_mat.csv \
$path_raw/net.csv \
$method 2>&1 | \
awk '{if(NR==5){print $8};if(NR==10){print $6}}')";
time="$(echo $test | awk '{print $1}')";
memr="$(echo $test | awk '{print $2}')";
has_h="$(echo $time | grep '\(.*:\)\{2\}')"
if [[ $has_h ]]; then
hours="$(echo $time | awk -F: '{print $1 * 3600}')"
minutes="$(echo $time | awk -F: '{print $2 * 60}')"
seconds="$(echo $time | awk -F: '{print $3}')"
time="$(echo $hours $minutes $seconds | awk '{print $1+$2+$3}')"
else
minutes="$(echo $time | awk -F: '{print $1 * 60}')"
seconds="$(echo $time | awk -F: '{print int( $2 )}')"
time="$(echo $minutes $seconds | awk '{print $1+$2}')"
fi
echo "R" $i_mat $method $time $memr;
else
echo "R" $i_mat $method "NA" "NA";
fi
done
time=0;
done
|
package com.example.ivan.crystallball;
import java.util.Random;
/**
* Created by ivan on 01/08/15.
*/
public class CrystallBall {
//Member variables
public String[] mAnswers = {
"It is certain",
"It is decidedly so",
"All signs say YES",
"The stars are not aligned",
"My reply is no",
"It is doubtful",
"Better not tell you now",
"Concentrate and ask again",
"Unable to answer now"};
//Methods
public String getAnAnswer() {
String answer = "";
// Random answer
Random randomGenerator = new Random();
int randomNumber = randomGenerator.nextInt(mAnswers.length);
answer = mAnswers[randomNumber];
return answer;
}
}
|
#!/bin/bash
# apt -y install unzip
# install besttrace
if [ ! -f "besttrace" ]; then
wget https://github.com/zq/shell/raw/master/besttrace
# unzip besttrace4linux.zip
chmod +x besttrace
fi
## start to use besttrace
next() {
printf "%-70s\n" "-" | sed 's/\s/-/g'
}
clear
next
ip_list=(14.215.116.1 101.95.120.109 117.28.254.129 113.207.25.138 119.6.6.6 183.192.160.3 183.221.253.100 202.112.14.151)
ip_addr=(广州电信 上海电信 厦门电信 重庆联通 成都联通 上海移动 成都移动 成都教育网)
# ip_len=${#ip_list[@]}
for i in {0..7}
do
echo ${ip_addr[$i]}
./besttrace -q 1 ${ip_list[$i]}
next
done |
import os
from time import sleep
import logging
import json
import joblib
import pandas as pd
def init():
global model
# construct the model path
model_path = os.path.join(os.getenv("AZUREML_MODEL_DIR"), "outputs", "risk_model.joblib")
# deserialize the model file back into a sklearn model
model = joblib.load(model_path)
logging.info("model loaded!")
logging.info("Init complete")
def run(raw_data):
logging.info("Request received")
data = json.loads(raw_data)["data"]
input_df = pd.DataFrame(data)
result = model.predict_proba(input_df)
logging.info("Request processed")
return result.tolist()
|
<gh_stars>0
// lambda's need at least the [] square brackets
int main()
{
auto f = [] () -> void { }; // OK
auto g = [] () { }; // OK
auto h = [] { }; // OK
#ifdef SHOW_SYNTAX_ERRORS
auto i = [] -> void { }; // syntax error, but it previously compiled
auto j = () { }; // syntax error
auto k = { }; // syntax error
#endif
}
|
<gh_stars>0
import { isReactComponent } from '../../Utilities/componentDetect';
// hàm tạo option screen
const generateOption = (element) => {
if (isReactComponent(element)) {
return {
screen: element
};
}
if (
typeof element === "object"
&& element.screen
&& isReactComponent(element.screen)
) {
return element;
}
return null;
};
export default ( group, routes = {} ) => {
let routeConfiguration = {};
if (isReactComponent(routes)) {
return routeConfiguration = {
[group]: {
screen: routes
}
};
}
for (let key in routes) {
if (routes.hasOwnProperty(key)) {
let element = routes[key];
let routeName = `${group}/${key}`;
routeName = routeName.replace(/[\/]+/g, "/");
let option = generateOption(element);
if (option) {
routeConfiguration[routeName] = option;
}
}
}
return routeConfiguration;
}; |
<reponame>tdm1223/Algorithm<gh_stars>1-10
// 15829. Hashing
// 2020.05.05
// 해싱
#include<iostream>
#include<string>
using namespace std;
int main()
{
int n;
cin >> n;
string s;
cin >> s;
long long r = 1;
long long ans = 0;
for (int i = 0; i < n; i++)
{
long long a = (long long)s[i] - 'a' + 1;
ans += a * r;
r *= 31;
r %= 1234567891;
ans %= 1234567891;
}
cout << ans << endl;
return 0;
}
|
<reponame>guigallo/mymoney-client
import { appBarSpacer, content } from './mixins/main';
const styles = theme => ({
...appBarSpacer(theme),
...content(theme),
titleContainer: {
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center',
},
title: {
alignSelf: 'flex-end',
},
create: {
margin: theme.spacing.unit,
},
extendedIcon: {
marginRight: theme.spacing.unit,
},
})
export default styles; |
#!/bin/sh
zcat="gunzip -c"
# Download FASTA
#wget ftp://ftp.solgenomics.net/tomato_genome/annotation/ITAG3.2_release/S_lycopersicum_chromosomes.3.00.fa.gz
#wget ftp://ftp.solgenomics.net/tomato_genome/annotation/ITAG3.2_release/ITAG3.2_cds.fasta
#wget ftp://ftp.solgenomics.net/tomato_genome/annotation/ITAG3.2_release/ITAG3.2_proteins.fasta
# Download GFF3
# wget ftp://ftp.solgenomics.net/tomato_genome/annotation/ITAG3.2_release/ITAG3.2_gene_models.gff
# Create genes.gff
echo Fixing start-end problem
$zcat ORI/ITAG3.2_gene_models.gff.gz | ./fixStartEnd.pl | ./removeExonFrame.pl > genes.gff
# Append FASTA Sequences
echo Sequence FASTA
cp ORI/S_lycopersicum_chromosomes.3.00.fa.gz sequences.fa.gz
echo Create CDS file
$zcat ORI/ITAG3.2_cds.fasta.gz | sed "s/^>/>mRNA:/" > cds.fa
echo Create Proteins file
$zcat ORI/ITAG3.2_proteins.fasta.gz | sed "s/^>/>mRNA:/" > protein.fa
gzip cds.fa protein.fa genes.gff
|
<gh_stars>0
package addon
import (
"context"
"fmt"
"sync"
"time"
"github.com/go-logr/logr"
operatorsv1 "github.com/operator-framework/api/pkg/operators/v1"
operatorsv1alpha1 "github.com/operator-framework/api/pkg/operators/v1alpha1"
monitoringv1 "github.com/prometheus-operator/prometheus-operator/pkg/apis/monitoring/v1"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/controller/controllerutil"
"sigs.k8s.io/controller-runtime/pkg/event"
"sigs.k8s.io/controller-runtime/pkg/handler"
"sigs.k8s.io/controller-runtime/pkg/source"
addonsv1alpha1 "github.com/openshift/addon-operator/apis/addons/v1alpha1"
internalhandler "github.com/openshift/addon-operator/internal/controllers/addon/handler"
"github.com/openshift/addon-operator/internal/ocm"
)
// Default timeout when we do a manual RequeueAfter
const (
defaultRetryAfterTime = 10 * time.Second
cacheFinalizer = "addons.managed.openshift.io/cache"
)
type AddonReconciler struct {
client.Client
Log logr.Logger
Scheme *runtime.Scheme
csvEventHandler csvEventHandler
globalPause bool
globalPauseMux sync.RWMutex
addonRequeueCh chan event.GenericEvent
ocmClient ocmClient
ocmClientMux sync.RWMutex
}
type ocmClient interface {
PatchUpgradePolicy(
ctx context.Context,
req ocm.UpgradePolicyPatchRequest,
) (res ocm.UpgradePolicyPatchResponse, err error)
}
func (r *AddonReconciler) InjectOCMClient(ctx context.Context, c *ocm.Client) error {
r.ocmClientMux.Lock()
defer r.ocmClientMux.Unlock()
if r.ocmClient == nil {
r.Log.Info("ocm client initialized for the first time")
// Requeue all addons for the first time that the ocm client becomes available.
if err := r.requeueAllAddons(ctx); err != nil {
return fmt.Errorf("requeue all Addons: %w", err)
}
}
r.ocmClient = c
return nil
}
// Pauses reconcilation of all Addon objects. Concurrency safe.
func (r *AddonReconciler) EnableGlobalPause(ctx context.Context) error {
return r.setGlobalPause(ctx, true)
}
// Unpauses reconcilation of all Addon objects. Concurrency safe.
func (r *AddonReconciler) DisableGlobalPause(ctx context.Context) error {
return r.setGlobalPause(ctx, false)
}
func (r *AddonReconciler) setGlobalPause(ctx context.Context, paused bool) error {
r.globalPauseMux.Lock()
defer r.globalPauseMux.Unlock()
r.globalPause = paused
if err := r.requeueAllAddons(ctx); err != nil {
return fmt.Errorf("requeue all Addons: %w", err)
}
return nil
}
// requeue all addons that are currently in the local cache.
func (r *AddonReconciler) requeueAllAddons(ctx context.Context) error {
addonList := &addonsv1alpha1.AddonList{}
if err := r.List(ctx, addonList); err != nil {
return fmt.Errorf("listing Addons, %w", err)
}
for i := range addonList.Items {
r.addonRequeueCh <- event.GenericEvent{Object: &addonList.Items[i]}
}
return nil
}
type csvEventHandler interface {
handler.EventHandler
Free(addon *addonsv1alpha1.Addon)
ReplaceMap(addon *addonsv1alpha1.Addon, csvKeys ...client.ObjectKey) (changed bool)
}
func (r *AddonReconciler) SetupWithManager(mgr ctrl.Manager) error {
r.csvEventHandler = internalhandler.NewCSVEventHandler()
r.addonRequeueCh = make(chan event.GenericEvent)
return ctrl.NewControllerManagedBy(mgr).
For(&addonsv1alpha1.Addon{}).
Owns(&corev1.Namespace{}).
Owns(&operatorsv1.OperatorGroup{}).
Owns(&operatorsv1alpha1.CatalogSource{}).
Owns(&operatorsv1alpha1.Subscription{}).
Owns(&addonsv1alpha1.AddonInstance{}).
Owns(&monitoringv1.ServiceMonitor{}).
Watches(&source.Kind{
Type: &operatorsv1alpha1.ClusterServiceVersion{},
}, r.csvEventHandler).
Watches(&source.Channel{ // Requeue everything when entering/leaving global pause.
Source: r.addonRequeueCh,
}, &handler.EnqueueRequestForObject{}).
Complete(r)
}
// AddonReconciler/Controller entrypoint
func (r *AddonReconciler) Reconcile(
ctx context.Context, req ctrl.Request) (res ctrl.Result, err error) {
log := r.Log.WithValues("addon", req.NamespacedName.String())
addon := &addonsv1alpha1.Addon{}
if err := r.Get(ctx, req.NamespacedName, addon); err != nil {
return ctrl.Result{}, client.IgnoreNotFound(err)
}
defer func() {
// Ensure we report to the UpgradePolicy endpoint, when we are done with whatever we are doing.
if err != nil {
return
}
err = r.handleUpgradePolicyStatusReporting(ctx, log, addon)
// Finally, update the Status back to the kube-api
// This is the only place where Status is being reported.
if err != nil {
return
}
err = r.Status().Update(ctx, addon)
}()
// check for global pause
r.globalPauseMux.RLock()
defer r.globalPauseMux.RUnlock()
if r.globalPause {
reportAddonPauseStatus(addon, addonsv1alpha1.AddonOperatorReasonPaused)
// TODO: figure out how we can continue to report status
return ctrl.Result{}, nil
}
// check for Addon pause
if addon.Spec.Paused {
reportAddonPauseStatus(addon, addonsv1alpha1.AddonReasonPaused)
return ctrl.Result{}, nil
}
// Make sure Pause condition is removed
r.removeAddonPauseCondition(addon)
if !addon.DeletionTimestamp.IsZero() {
return ctrl.Result{}, r.handleAddonDeletion(ctx, addon)
}
// Phase 0.
// Ensure cache finalizer
if !controllerutil.ContainsFinalizer(addon, cacheFinalizer) {
controllerutil.AddFinalizer(addon, cacheFinalizer)
if err := r.Update(ctx, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to add finalizer: %w", err)
}
}
// Phase 1.
// Ensure wanted namespaces
if requeueResult, err := r.ensureWantedNamespaces(ctx, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure wanted Namespaces: %w", err)
} else if requeueResult != resultNil {
return r.handleExit(requeueResult), nil
}
// Phase 2.
// Ensure unwanted namespaces are removed
if err := r.ensureDeletionOfUnwantedNamespaces(ctx, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure deletion of unwanted Namespaces: %w", err)
}
// Phase 3.
// Ensure the creation of the corresponding AddonInstance in .spec.install.olmOwnNamespace/.spec.install.olmAllNamespaces namespace
if err := r.ensureAddonInstance(ctx, log, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure the creation of addoninstance: %w", err)
}
// Phase 4.
// Ensure OperatorGroup
if requeueResult, err := r.ensureOperatorGroup(ctx, log, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure OperatorGroup: %w", err)
} else if requeueResult != resultNil {
return r.handleExit(requeueResult), nil
}
// Phase 5.
var (
catalogSource *operatorsv1alpha1.CatalogSource
requeueResult requeueResult
)
if requeueResult, catalogSource, err = r.ensureCatalogSource(ctx, log, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure CatalogSource: %w", err)
} else if requeueResult != resultNil {
return r.handleExit(requeueResult), nil
}
// Phase 6.
// Ensure Subscription for this Addon.
requeueResult, currentCSVKey, err := r.ensureSubscription(
ctx, log.WithName("phase-ensure-subscription"),
addon, catalogSource)
if err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure Subscription: %w", err)
} else if requeueResult != resultNil {
return r.handleExit(requeueResult), nil
}
// Phase 7.
// Observe current csv
if requeueResult, err := r.observeCurrentCSV(ctx, addon, currentCSVKey); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to observe current CSV: %w", err)
} else if requeueResult != resultNil {
return r.handleExit(requeueResult), nil
}
// Phase 7.
// Possibly ensure monitoring federation
// Normally this would be configured before the addon workload is installed
// but currently the addon workload creates the monitoring stack by itself
// thus we want to create the service monitor as late as possible to ensure that
// cluster-monitoring prom does not try to scrape a non-existent addon prometheus.
if stop, err := r.ensureMonitoringFederation(ctx, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure ServiceMonitor: %w", err)
} else if stop {
log.Info("stopping", "reason", "monitoring federation namespace or servicemonitor owned by something else")
return ctrl.Result{}, nil
}
// Phase 8
// Remove possibly unwanted monitoring federation
if err := r.ensureDeletionOfUnwantedMonitoringFederation(ctx, addon); err != nil {
return ctrl.Result{}, fmt.Errorf("failed to ensure deletion of unwanted ServiceMonitors: %w", err)
}
// After last phase and if everything is healthy
reportReadinessStatus(addon)
return ctrl.Result{}, nil
}
|
package project
object Dependencies {
object Version {
val scalaTest = "3.1.0"
}
val root: Seq[Setting[_]] = deps(
"org.scalatest" %% "scalatest" % Version.scalaTest,
)
private def deps(modules: ModuleID*): Seq[Setting[_]] = Seq(libraryDependencies ++= modules)
}
|
#!/bin/bash
# Copyright 2013 (Authors: Bagher BabaAli, Daniel Povey, Arnab Ghoshal)
# 2014 Brno University of Technology (Author: Karel Vesely)
# 2015 Pei-wen Huang -- Verna
# Apache 2.0.
if [ $# -ne 1 ]; then
echo "Argument should be the CUSENT directory, see ../run.sh for example."
exit 1;
fi
tmpdir=`pwd`/data/local/tmpdir
dir=`pwd`/data/local/data
lmdir=`pwd`/data/local/nist_lm
mkdir -p $dir $lmdir $tmpdir
local=`pwd`/local
utils=`pwd`/utils
conf=`pwd`/conf
. ./path.sh # Needed for KALDI_ROOT
export PATH=$PATH:$KALDI_ROOT/tools/irstlm/bin
sph2pipe=$KALDI_ROOT/tools/sph2pipe_v2.5/sph2pipe
if [ ! -x $sph2pipe ]; then
echo "Could not find (or execute) the sph2pipe program at $sph2pipe";
exit 1;
fi
[ -f $conf/spk_eval.list ] || error_exit "$PROG: Eval-set speaker list not found.";
[ -f $conf/spk_dev.list ] || error_exit "$PROG: dev-set speaker list not found.";
# First check if the train & test directories exist (these can either be upper-
# or lower-cased
if [ ! -d $*/TRAIN -o ! -d $*/TEST ] && [ ! -d $*/train -o ! -d $*/test ]; then
echo "cusent_data_prep.sh: Spot check of command line argument failed"
echo "Command line argument must be absolute pathname to CUSENT directory"
echo "with name like /home/verna/kaldi-trunk/egs/cusent/CUSENT"
exit 1;
fi
# Now check what case the directory structure is
uppercased=false
train_dir=train
test_dir=test
if [ -d $*/TRAIN ]; then
uppercased=true
train_dir=TRAIN
test_dir=TEST
fi
#tmpdir=$(mktemp -d);
#trap 'rm -rf "$tmpdir"' EXIT
# Get the list of speakers. The list of speakers in the 8-speaker core test
# set and the 4-speaker development set must be supplied to the script. All
# speakers in the 'train' directory are used for training.
if $uppercased; then
tr '[:lower:]' '[:upper:]' < $conf/dev_spk.list >$tmpdir/dev_spk
tr '[:lower:]' '[:upper:]' < $conf/test_spk.list >$tmpdir/test_spk
ls -d "$*"/TRAIN/* | sed -e "s:^.*/::" > $tmpdir/train_spk
else
tr '[:upper:]' '[:lower:]' < $conf/dev_spk.list >$tmpdir/dev_spk
tr '[:upper:]' '[:lower:]' < $conf/test_spk.list >$tmpdir/test_spk
ls -d "$*"/train/* | sed -e "s:^.*/::" > $tmpdir/train_spk
fi
cd $dir
for x in train dev test; do
# First, find the list of audio files .
# Note: train & test sets are under different directories, but doing find on
# both and grepping for the speakers will work correctly.
find $*/{$train_dir,$test_dir} -iname '*.wav' \
| grep -f $tmpdir/${x}_spk > ${x}_sph.flist
sed -e 's:.*/\(.*\)/\(.*\).wav$:\1_\2:i' ${x}_sph.flist \
> $tmpdir/${x}_sph.uttids
paste $tmpdir/${x}_sph.uttids ${x}_sph.flist \
| sort -k1,1 > ${x}_sph.scp
cat ${x}_sph.scp | awk '{print $1}' > ${x}.uttids
# Create wav.scp
awk '{printf("%s '$sph2pipe' -f wav %s |\n", $1, $2);}' < ${x}_sph.scp > ${x}_wav.scp
# Make the utt2spk and spk2utt files.
cut -f1 -d'_' ${x}.uttids | paste -d' ' ${x}.uttids - > ${x}.utt2spk
cat ${x}.utt2spk | $utils/utt2spk_to_spk2utt.pl > ${x}.spk2utt || exit 1;
# Prepare gender mapping
cat ${x}.spk2utt | awk '{print $1}' | perl -ane 'chomp($_); $g = substr($_, -1, 1); print "$_ $g\n";' > ${x}.spk2gender
# Prepare text
cp $*/${x}.text .
# Prepare STM file for sclite:
wav-to-duration scp:${x}_wav.scp ark,t:${x}_dur.ark || exit 1
awk -v dur=${x}_dur.ark \
'BEGIN{
while(getline < dur) { durH[$1]=$2; }
print ";; LABEL \"O\" \"Overall\" \"Overall\"";
print ";; LABEL \"F\" \"Female\" \"Female speakers\"";
print ";; LABEL \"M\" \"Male\" \"Male speakers\"";
}
{ wav=$1; spk=gensub(/_.*/,"",1,wav); $1=""; ref=$0;
gender=(substr(spk,5,1) == "f" ? "F" : "M");
printf("%s 1 %s 0.0 %f <O,%s> %s\n", wav, spk, durH[wav], gender, ref);
}
' ${x}.text >${x}.stm || exit 1
# Create dummy GLM file for sclite:
echo ';; empty.glm
[FAKE] => %HESITATION / [ ] __ [ ] ;; hesitation token
' >${x}.glm
done
echo "Data preparation succeeded"
|
#!/bin/bash
# Function calculates number of bit in a netmask
#
mask2cidr() {
nbits=0
for dec in $1 ; do
case $dec in
255) (( nbits+=8 ));;
254) (( nbits+=7 )) ; break ;;
252) (( nbits+=6 )) ; break ;;
248) (( nbits+=5 )) ; break ;;
240) (( nbits+=4 )) ; break ;;
224) (( nbits+=3 )) ; break ;;
192) (( nbits+=2 )) ; break ;;
128) (( nbits+=1 )) ; break ;;
0);;
*) echo "Error: $dec is not recognised"; exit 1
esac
done
echo "$nbits"
}
## main ##
# MASK=255.255.254.0
MASK=$1
numbits=$(IFS=. ; mask2cidr "$MASK")
echo "$numbits"
exit 0
|
<filename>db/migrate/20160308165607_create_distribution_by_prison_and_calendar_dates.rb<gh_stars>10-100
class CreateDistributionByPrisonAndCalendarDates < ActiveRecord::Migration[4.2]
def change
execute 'DROP VIEW IF EXISTS distribution_by_prison_and_calendar_dates;'
create_view :distribution_by_prison_and_calendar_dates
end
end
|
<filename>Examples/seive.js
var mathlib = require('../lib/cpmath.js');
console.log(mathlib.seive());
|
import React, {useState} from 'react';
function App() {
const [loginCredentials, setLoginCredentials] = useState();
const [userDetails, setUserDetails] = useState();
const logInHandler = () => {
// Code to prompt user to enter their credentials
// Code to validate credentials
// Login Successful
setLoginCredentials(true);
};
const signUpHandler = () => {
// Code to prompt user to enter their details
// Code to validate details
// Sign up Successful
setUserDetails(true);
};
return (
<div>
<button onClick={logInHandler}>Log In</button>
<button onClick={signUpHandler}>Sign Up</button>
</div>
)
}
export default App; |
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package propertyhistorydb;
import java.net.URL;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.ResourceBundle;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.control.Label;
import javafx.scene.control.ListCell;
import javafx.scene.control.ListView;
import javafx.scene.control.Slider;
import javafx.scene.control.TextArea;
/**
*
* @author Iaroslav
*/
public class PropertyTesterFormController implements Initializable {
private Label label;
@FXML
private ListView<ObservableList<StringProperty>> bigList;
@FXML
private ListView<StringProperty> smallList;
@FXML
private TextArea textEditor;
@FXML
private TextArea textEditor2;
EditingHistory history = new EditingHistory();
SimpleStringProperty prop = new SimpleStringProperty();
List<ObservableList<StringProperty>> list = new ArrayList<>();
ObservableList<ObservableList<StringProperty>> primalList = FXCollections.observableList(list);
@FXML
private Slider historySlider;
@Override
public void initialize(URL url, ResourceBundle rb) {
// TODO
ObservableList<StringProperty> olist = FXCollections.observableList(new ArrayList<>());
primalList.add(olist);
bigList.setItems(primalList);
SetupSmallList();
history.AddObservableList(primalList);
history.AddObservableList(olist);
historySlider.maxProperty().bind(history.historyLength);
historySlider.valueProperty().bindBidirectional(history.historyPosition);
AddDataToList(olist);
bigList.getSelectionModel().select(0);
}
private void SetupSmallList() {
smallList.itemsProperty().bind(bigList.getSelectionModel().selectedItemProperty());
smallList.getSelectionModel().selectedItemProperty().addListener((ov, a1, a2) -> {
if(a1 != null) textEditor.textProperty().unbindBidirectional(a1);
if(a2 != null) textEditor.textProperty().bindBidirectional(a2);
});
smallList.setCellFactory((param) -> new PropertyListCell());
}
private void AddDataToList(ObservableList<StringProperty> olist) {
int N = 13;
int M1 = 7;
int M2 = 5;
for (int i = 0; i < N; i++) {
prop = new SimpleStringProperty();
prop.set(i + " ");
olist.add(prop);
history.AddProperty(prop);
}
for (int i = 0; i < N; i++) {
int idx = (int) (Math.pow(i+1, M1)) % N;
olist.get(idx).set(olist.get(idx).get()+" item");
}
for (int i = 0; i < N; i++) {
int idx = (int) (Math.pow(i+1, M2)) % N;
olist.get(idx).set(olist.get(idx).get()+" data");
}
}
@FXML
private void undoAction(ActionEvent event) {
history.undo();
}
@FXML
private void redoAction(ActionEvent event) {
history.redo();
}
@FXML
private void addToBigList(ActionEvent event) {
List<StringProperty> blst = new ArrayList<>();
ObservableList<StringProperty> olist = FXCollections.observableList(blst);
history.AddObservableList(olist);
primalList.add(olist);
}
@FXML
private void deleteFromBigList(ActionEvent event) {
primalList.remove(bigList.getSelectionModel().getSelectedItem());
}
@FXML
private void addToSmallList(ActionEvent event) {
ObservableList<StringProperty> olist
= bigList.getSelectionModel().getSelectedItem();
if (olist != null) {
SimpleStringProperty tmp = new SimpleStringProperty();
tmp.set("edit me!");
olist.add(tmp);
history.AddProperty(tmp);
}
}
@FXML
private void deleteFromSmallList(ActionEvent event) {
ObservableList<StringProperty> olist
= bigList.getSelectionModel().getSelectedItem();
if (olist != null) {
olist.removeAll(smallList.getSelectionModel().getSelectedItem());
}
}
// class used for proper display of properties
private static class PropertyListCell extends ListCell<StringProperty> {
StringProperty previous = null;
@Override
public void updateItem(StringProperty item, boolean empty) {
super.updateItem(item, empty);
if (item != null) {
if (item != previous) {
Label lbl = null;
Node graphic = getGraphic();
if (graphic == null) {
lbl = new Label();
setGraphic(lbl);
} else {
graphic.setVisible(true);
lbl = (Label) graphic;
}
lbl.textProperty().unbind();
lbl.textProperty().bind(item);
}
} else {
if (getGraphic() != null) {
getGraphic().setVisible(false);
}
}
previous = item;
}
}
}
|
import { Component, ComponentInterface, h } from '@stencil/core';
@Component({
tag: 'mohole-footer',
styleUrl: 'mohole-footer.scss',
shadow: true,
})
export class MoholeFooter implements ComponentInterface {
render() {
return (
<footer>
<div class='foot-col'>
<p class='big'>Disclaimer</p>
<hr />
<p>
È il nuovo operatore telefonico sul mercato con la rete più grande.
La rete 4.5G è la più grande in termini di numero di celle e di volumi di traffico dati.
WINDTRE è il principale operatore nel mercato delle Telecomunicazioni mobili in Italia con il 30,7%
delle linee Human. Maggiori informazioni.
Sito realizzato per il corso di web design e media della scuola Mohole.
</p>
</div>
<div class="foot-col">
<p class="big">Servizi</p>
<hr />
<div class="cont-info">
<p>Servizio Clienti</p>
<p>Espansione della rete</p>
<p>Maggiori informazioni</p>
<p>Connessione</p>
<p>Stato dell'ordine</p>
<p>Vendi cellulare</p>
<p>All'estero in rooming</p>
<p>Magenta tube</p>
<p>Costruttori</p>
<p>Espansione della rete</p>
<p>Centro assistenza</p>
<p>Connessione</p>
</div>
</div>
</footer>
);
}
}
|
<reponame>jamestiotio/esc
import org.junit.Before;
import org.junit.Test;
// This test suite obtains 100% method coverage
public class BiSectionTest {
private BiSectionExample bi;
@Before
public void runBeforeEachTest() {
bi = new BiSectionExample();
}
@Test
public void test4MethodCoverage() {
assert (bi.root(0.5, 100.3, 0.1) >= 100);
// Question: Should we assert the returned value is the exact value we expect?
}
@Test
public void test4LoopCoverage1() { // Loop once
assert (bi.root(0, 100, 80) > 50);
}
}
|
/**
* @module React
*/
import React from 'react'
/**
* @module TextButton
*/
import TextButton from 'components/buttons/TextButton'
/**
* @module Link
*/
import { Link } from 'react-router-dom'
import LoginFormContainer from 'containers/LoginForm'
import TextCtaButton from 'components/buttons/TextCtaButton'
import { FadeIn } from 'components/animation'
import Icon from 'components/icon'
const HeaderPublic = (props) => {
const {
onLoginButtonClick,
showLogin
} = props
return (
<div className='header__content'>
<div className='visible-sm-up'>
<div className='header__search'>
<Link to='/browse-horses'>
<Icon
modifier='magnifying-glass' />
</Link>
</div>
<TextCtaButton
onClick={onLoginButtonClick}
className='header__login-button uppercase semi-bold'
text={'log in'}
active={!showLogin} />
<Link to='/register'>
{
!showLogin
? (
<TextButton
className='header__register-button'
modifier='sm'
text='register free' />
)
: (
<TextCtaButton
className='header__register-button uppercase semi-bold'
text={'register free'} />
)
}
</Link>
</div>
<div className='hidden-sm-up'>
<div className='header__search'>
<Link to='/browse-horses'>
<Icon
modifier='magnifying-glass' />
</Link>
</div>
<div className='header__search' onClick={onLoginButtonClick}>
<Icon
modifier='account' />
</div>
</div>
<div className='hidden-sm-up'>
<FadeIn>
<div className='header__mobile-register section-shadow'>
{
!showLogin
? (
<Link to='/register'>
<TextButton
className='header__register-button'
modifier='md'
text='register free' />
</Link>
)
: (
<Link to='/register'>
<TextCtaButton
className='header__register-button uppercase semi-bold align-middle'
text={'register free'} />
</Link>
)
}
</div>
</FadeIn>
</div>
<div>
<FadeIn>
{
showLogin && (
<LoginFormContainer
className='section-shadow'
closeLogin={onLoginButtonClick} />
)
}
</FadeIn>
</div>
</div>
)
}
export default HeaderPublic
|
<filename>src/org/mocraft/Nagato/Gui/GuiExit.java<gh_stars>1-10
package org.mocraft.Nagato.Gui;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
public class GuiExit implements ActionListener {
public JFrame frame;
public JLabel lbl;
public JButton confirm, cancel;
public GuiExit() {
GridBagConstraints lblGrid = new GridBagConstraints();
GridBagConstraints confirmGrid = new GridBagConstraints();
GridBagConstraints cancelGrid = new GridBagConstraints();
frame = new JFrame("Exit");
frame.setSize(250, 100);
frame.setLayout(new GridBagLayout());
lbl = new JLabel("Confirm to exit program ?");
lblGrid.gridx = 0;
lblGrid.gridy = 0;
lblGrid.gridwidth = 2;
lblGrid.gridheight = 1;
frame.add(lbl, lblGrid);
confirm = new JButton("Confirm");
confirmGrid.gridx = 0;
confirmGrid.gridy = 1;
confirm.setActionCommand("confirm");
confirm.addActionListener(this);
frame.add(confirm, confirmGrid);
cancel = new JButton("Cancel");
cancelGrid.gridx = 1;
cancelGrid.gridy = 1;
cancel.setActionCommand("cancel");
cancel.addActionListener(this);
frame.add(cancel, cancelGrid);
}
@Override
public void actionPerformed(ActionEvent e) {
String cmd = e.getActionCommand();
if(cmd.equals("confirm")) {
frame.dispose();
GuiMain.frame.dispose();
} else {
frame.dispose();
}
}
}
|
#!/usr/bin/env bash
set -e
set -u
set -o pipefail
ROOTDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
readonly ROOTDIR
# shellcheck source=SCRIPTDIR/.util/tools.sh
source "${ROOTDIR}/scripts/.util/tools.sh"
# shellcheck source=SCRIPTDIR/.util/print.sh
source "${ROOTDIR}/scripts/.util/print.sh"
function main() {
local stack version cached output
stack="$(jq -r -S .stack "${ROOTDIR}/config.json")"
cached="false"
output="${ROOTDIR}/build/buildpack.zip"
while [[ "${#}" != 0 ]]; do
case "${1}" in
--stack)
stack="${2}"
shift 2
;;
--version)
version="${2}"
shift 2
;;
--cached)
cached="true"
shift 1
;;
--output)
output="${2}"
shift 2
;;
--help|-h)
shift 1
usage
exit 0
;;
"")
# skip if the argument is empty
shift 1
;;
*)
util::print::error "unknown argument \"${1}\""
esac
done
if [[ -z "${version:-}" ]]; then
usage
echo
util::print::error "--version is required"
fi
package::buildpack "${version}" "${cached}" "${stack}" "${output}"
}
function usage() {
cat <<-USAGE
package.sh --version <version> [OPTIONS]
Packages the buildpack into a .zip file.
OPTIONS
--help -h prints the command usage
--version <version> -v <version> specifies the version number to use when packaging the buildpack
--cached cache the buildpack dependencies (default: true)
USAGE
}
function package::buildpack() {
local version cached stack output
version="${1}"
cached="${2}"
stack="${3}"
output="${4}"
mkdir -p "$(dirname "${output}")"
util::tools::buildpack-packager::install --directory "${ROOTDIR}/.bin"
echo "Building buildpack (version: ${version}, stack: ${stack}, cached: ${cached}, output: ${output})"
local file
file="$(
buildpack-packager build \
"--version=${version}" \
"--cached=${cached}" \
"--stack=${stack}" \
| xargs -n1 | grep -e '\.zip$'
)"
mv "${file}" "${output}"
}
main "${@:-}"
|
#!/bin/bash
echo "Available space on /dev/sda1:";
df -h /dev/sda1
echo "Available space on /dev/sdb1:";
df -h /dev/sdb1
echo "Available space on /dev/sdc1:";
df -h /dev/sdc1 |
package me.batizhao.uaa.config;
import lombok.SneakyThrows;
import me.batizhao.common.security.handler.MyAccessDeniedHandler;
import me.batizhao.common.security.handler.MyAuthenticationEntryPoint;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
/**
* @author batizhao
* @since 2020-02-26
*/
@Configuration
public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
private MyAccessDeniedHandler accessDeniedHandler;
@Autowired
private MyAuthenticationEntryPoint authenticationEntryPoint;
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable()
.authorizeRequests(authz -> authz
.antMatchers("/token", "/captcha", "/actuator/**").permitAll()
.anyRequest().authenticated()
)
.sessionManagement(session -> session.sessionCreationPolicy(SessionCreationPolicy.STATELESS))
.exceptionHandling(exceptions -> exceptions
.authenticationEntryPoint(authenticationEntryPoint)
.accessDeniedHandler(accessDeniedHandler)
);
}
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
@Bean
@Override
@SneakyThrows
public AuthenticationManager authenticationManagerBean() {
return super.authenticationManagerBean();
}
}
|
for i in /tmp/*.POST; do bash $i; done
rm /tmp/*.POST
rm /tmp/*.PRE
|
<reponame>darrindickey/Sandbox
jest.mock('react-native-firebase', () => {
return {
messaging: jest.fn(() => {
return {
hasPermission: jest.fn(() => Promise.resolve(true)),
subscribeToTopic: jest.fn(),
unsubscribeFromTopic: jest.fn(),
requestPermission: jest.fn(() => Promise.resolve(true)),
getToken: jest.fn(() => Promise.resolve('myMockToken'))
};
}),
notifications: jest.fn(() => {
return {
onNotification: jest.fn(),
onNotificationDisplayed: jest.fn()
};
})
};
}); |
all(dict_values) # This statement will return a boolean indicating whether all values in the dictionary are True. |
<filename>riakcs/tests/test_riakcs.py
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import logging
import pytest
from ast import literal_eval
from socket import error as SocketError
from datadog_checks.riakcs import RiakCs
from . import common
log = logging.getLogger(__file__)
def test_parser(mocked_check):
input_json = common.read_fixture('riakcs_in.json')
output_python = common.read_fixture('riakcs_out.python')
assert mocked_check.load_json(input_json) == literal_eval(output_python)
def test_metrics(mocked_check, aggregator):
mocked_check.check(common.CONFIG)
expected = literal_eval(common.read_fixture('riakcs_metrics.python'))
for m in expected:
aggregator.assert_metric(m[0], m[2], m[3].get('tags', []))
def test_service_checks(check, aggregator):
with pytest.raises(SocketError):
check.check(common.CONFIG)
scs = aggregator.service_checks(common.SERVICE_CHECK_NAME)
assert len(scs) == 1
aggregator.assert_service_check(common.SERVICE_CHECK_NAME,
status=RiakCs.CRITICAL,
tags=['aggregation_key:localhost:8080', 'optional:tag1'])
def test_21_parser(mocked_check21):
input_json = common.read_fixture('riakcs21_in.json')
output_python = common.read_fixture('riakcs21_out.python')
assert mocked_check21.load_json(input_json) == literal_eval(output_python)
def test_21_metrics(mocked_check21, aggregator):
mocked_check21.check(common.CONFIG_21)
expected = literal_eval(common.read_fixture('riakcs21_metrics.python'))
for m in expected:
aggregator.assert_metric(m[0], m[2], m[3].get('tags', []))
assert len(aggregator.metrics("riakcs.bucket_policy_get_in_one")) == 0
|
'use strict';
const
RATING_UNKNOWN = 'unknown',
RATING_ERROR = 'error',
RATING_WARN = 'warn',
RATING_OK = 'ok',
NpmInstall = ['npm', ['i', '--silent', '--package-lock-only', '--no-bin-links', '--no-audit', '--ignore-scripts']],
NpmAudit = ['npm', ['audit', '--json']],
fs = require('fs'),
path = require('path'),
/** @var {function<Promise>} series */
series = require('p-series'),
exec = require('execa'),
rimraf = require('rimraf');
class Auditer
{
static get RATING_UNKNOWN () { return RATING_UNKNOWN; }
/**
* @param {object} logger
* @param {Package} packageJson
* @param {function} cb
*/
static getAuditReport (logger, packageJson, cb)
{
let tmpFolder = fs.mkdtempSync(path.join(__dirname, 'tmp'));
series([
Auditer._copy.bind(null, packageJson, tmpFolder),
Auditer._exec(NpmInstall, tmpFolder),
Auditer._exec(NpmAudit, tmpFolder)
]).then(r => {
Auditer._cleanup(tmpFolder);
cb(Auditer._calcResult(logger, r));
}).catch(err => {
logger.error(err);
Auditer._cleanup(tmpFolder);
cb(RATING_ERROR);
});
}
/**
* @param {object} logger
* @param {array} result
* @private
*/
static _calcResult (logger, result)
{
/**
* @typedef {object} Vulnerabilities
* @property {number} info
* @property {number} low
* @property {number} moderate
* @property {number} high
* @property {number} critical
*/
/**
* @typedef {object} AuditReport
* @property {object} metadata
* @property {Vulnerabilities} metadata.vulnerabilities
*/
let auditResult = result.pop();
if (auditResult && !auditResult.failed && auditResult.code === 0 && auditResult.stdout) {
try {
return Auditer._rating(JSON.parse(auditResult.stdout).metadata.vulnerabilities);
} catch(err) {
logger.error(err);
return RATING_ERROR;
}
}
}
/**
* @param {Vulnerabilities} v
* @private
*/
static _rating (v)
{
// noinspection JSUnusedAssignment
let map = {info: 1, low: 2, moderate: 4, high: 8, critical: 16},
rating = Object.entries(map).reduce((p, [sev, fac]) => p += v[sev] === 0 ? 0 : fac, 0);
return rating >= 8 ? RATING_ERROR : (rating >= 2 ? RATING_WARN : RATING_OK);
}
/**
* @param {Package} packageJson
* @param {string} tmpFolder
* @private
*/
static _copy (packageJson, tmpFolder)
{
return new Promise((ok, nok) => {
packageJson = typeof packageJson !== 'string' ? JSON.stringify(packageJson) : packageJson;
fs.writeFile(path.join(tmpFolder, 'package.json'), packageJson, err => err ? nok(err) : ok());
});
}
/**
* @param {array} cmd
* @param {string} tmpFolder
* @private
*/
static _exec (cmd, tmpFolder)
{
return () => exec.apply(null, cmd.concat({cwd: tmpFolder}));
}
/**
* @param {string} tmpFolder
* @private
*/
static _cleanup (tmpFolder)
{
rimraf.sync(tmpFolder, {disableGlob: true});
}
}
module.exports = Auditer;
|
using System;
public class Program
{
public static void Main()
{
// Initializing necessary variables
int currentNumber = 0;
int sum = 0;
int counter = 0;
double average = 0.0;
// Looking for user input
Console.WriteLine("Please, enter numbers. Enter 0 when you're done:");
do
{
// Requesting for number
currentNumber = int.Parse(Console.ReadLine());
// Summing
sum += currentNumber;
// Counting
counter++;
// Looping until 0 is reached
} while (currentNumber != 0);
// Calculating mean value
average = (double)sum / --counter;
// Printing result
Console.WriteLine("The average of the entered numbers is: {0}", average);
}
} |
"""
Develop a Python script that connects to a MySQL database to retrieve certain data information
"""
import pymysql
# Establish a connection to a MySQL database
conn = pymysql.connect(
host='localhost',
user='username',
password='password',
db='dbname',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor
)
# Create and execute a MySQL query
sql = 'SELECT * FROM table WHERE condition=value'
cur = conn.cursor()
cur.execute(sql)
# Retrieve results from the query
result = cur.fetchall()
# Close the cursor and connection before exiting the script
cur.close()
conn.close() |
<html>
<head>
<title>Shopping Cart</title>
</head>
<body>
<h1>Shopping Cart</h1>
<div>
<div class="item-preview">
<img src="{{ item.imageUrl }}" />
<h3>{{ item.name }}</h3>
<span>${{ item.price }}</span>
</div>
<form action="/cart" method="post">
<label>Quantity: <input type="number" name="quantity" min="0"/></label>
<button type="submit">Add to Cart</button>
</form>
</div>
</body>
</html> |
#!/bin/sh
# file : build.sh
# copyright : Copyright (c) 2014-2019 Code Synthesis Ltd
# license : MIT; see accompanying LICENSE file
usage="Usage: $0 [-h|--help] [<options>] <c++-compiler> [<compile-options>]"
# Package repository URL (or path).
#
if test -z "$BUILD2_REPO"; then
BUILD2_REPO="https://stage.build2.org/1"
# BUILD2_REPO="https://pkg.cppget.org/1/queue/alpha"
# BUILD2_REPO="https://pkg.cppget.org/1/alpha"
fi
# The bpkg configuration directory.
#
cver="0.9-a.0"
cdir="build2-toolchain-$cver"
diag ()
{
echo "$*" 1>&2
}
# Note that this function will execute a command with arguments that contain
# spaces but it will not print them as quoted (and neither does set -x).
#
run ()
{
diag "+ $@"
"$@"
if test "$?" -ne "0"; then
exit 1
fi
}
owd="$(pwd)"
cxx=
idir=
jobs=
sudo=
trust=
timeout=
make=
make_options=
verbose=
while test $# -ne 0; do
case "$1" in
-h|--help)
diag
diag "$usage"
diag "Options:"
diag " --install-dir <dir> Alternative installation directory."
diag " --sudo <prog> Optional sudo program to use (pass false to disable)."
diag " --jobs|-j <num> Number of jobs to perform in parallel."
diag " --repo <loc> Alternative package repository location."
diag " --trust <fp> Repository certificate fingerprint to trust."
diag " --timeout <sec> Network operations timeout in seconds."
diag " --make <arg> Bootstrap using GNU make instead of script."
diag " --verbose <level> Diagnostics verbosity level between 0 and 6."
diag
diag "By default the script will install into /usr/local using sudo(1)."
diag "To use sudo for a custom installation directory you need to specify"
diag "the sudo program explicitly, for example:"
diag
diag "$0 --install-dir /opt/build2 --sudo sudo g++"
diag
diag "If --jobs|-j is unspecified, then the bootstrap step is performed"
diag "serially with the rest of the process using the number of available"
diag "hardware threads."
diag
diag "The --trust option recognizes two special values: 'yes' (trust"
diag "everything) and 'no' (trust nothing)."
diag
diag "The --make option can be used to bootstrap using GNU make. The"
diag "first --make value should specify the make executable optionally"
diag "followed by additional make options, for example:"
diag
diag "$0 --make gmake --make -j8 g++"
diag
diag "If --jobs|-j is specified, then its value is passed to make before"
diag "any additional options."
diag
diag "If specified, <compile-options> override the default (-O3) compile"
diag "options (config.cc.coptions) in the bpkg configuration used to build"
diag "and install the final toolchain. For example, to build with the debug"
diag "information (and without optimization):"
diag
diag "$0 g++ -g"
diag
diag "See the BOOTSTRAP-UNIX file for details."
diag
exit 0
;;
--install-dir)
shift
if test $# -eq 0; then
diag "error: installation directory expected after --install-dir"
diag "$usage"
exit 1
fi
idir="$1"
shift
;;
-j|--jobs)
shift
if test $# -eq 0; then
diag "error: number of jobs expected after --jobs|-j"
diag "$usage"
exit 1
fi
jobs="-j $1"
shift
;;
--sudo)
shift
if test $# -eq 0; then
diag "error: sudo program expected after --sudo"
diag "$usage"
exit 1
fi
sudo="$1"
shift
;;
--repo)
shift
if test $# -eq 0; then
diag "error: repository location expected after --repo"
diag "$usage"
exit 1
fi
BUILD2_REPO="$1"
shift
;;
--trust)
shift
if test $# -eq 0; then
diag "error: certificate fingerprint expected after --trust"
diag "$usage"
exit 1
fi
trust="$1"
shift
;;
--timeout)
shift
if test $# -eq 0; then
diag "error: value in seconds expected after --timeout"
diag "$usage"
exit 1
fi
timeout="$1"
shift
;;
--make)
shift
if test $# -eq 0; then
diag "error: argument expected after --make"
diag "$usage"
exit 1
fi
if test -z "$make"; then
make="$1"
else
make_options="$make_options $1"
fi
shift
;;
--verbose)
shift
if test $# -eq 0; then
diag "error: diagnostics level between 0 and 6 expected after --verbose"
diag "$usage"
exit 1
fi
verbose="$1"
shift
;;
*)
cxx="$1"
shift
break
;;
esac
done
if test -z "$cxx"; then
diag "error: compiler executable expected"
diag "$usage"
exit 1
fi
# Place default <compile-options> into the $@ array.
#
if test $# -eq 0; then
set -- -O3
fi
# Merge jobs and make_options into make.
#
if test -n "$make"; then
if test -n "$jobs"; then
make="$make $jobs"
fi
if test -n "$make_options"; then
make="$make$make_options" # Already has leading space.
fi
fi
# Only use default sudo for the default installation directory and only if
# it wasn't specified by the user.
#
if test -z "$idir"; then
idir="/usr/local"
if test -z "$sudo"; then
sudo="sudo"
fi
fi
if test "$sudo" = false; then
sudo=
fi
if test -f build/config.build; then
diag "error: current directory already configured, start with clean source"
exit 1
fi
if test -d "../$cdir"; then
diag "error: ../$cdir/ bpkg configuration directory already exists, remove it"
exit 1
fi
# Add $idir/bin to PATH in case it is not already there.
#
PATH="$idir/bin:$PATH"
export PATH
sys="$(build2/config.guess | sed -n 's/^[^-]*-[^-]*-\(.*\)$/\1/p')"
case "$sys" in
mingw32 | mingw64 | msys | msys2 | cygwin)
conf_rpath="[null]"
conf_sudo="[null]"
;;
*)
conf_rpath="$idir/lib"
if test -n "$sudo"; then
conf_sudo="$sudo"
else
conf_sudo="[null]"
fi
;;
esac
# We don't have arrays in POSIX shell but we should be ok as long as none of
# the option values contain spaces. Note also that the expansion must be
# unquoted.
#
bpkg_fetch_ops=
bpkg_build_ops=
if test -n "$timeout"; then
bpkg_fetch_ops="--fetch-timeout $timeout"
bpkg_build_ops="--fetch-timeout $timeout"
fi
if test "$trust" = "yes"; then
bpkg_fetch_ops="$bpkg_fetch_ops --trust-yes"
elif test "$trust" = "no"; then
bpkg_fetch_ops="$bpkg_fetch_ops --trust-no"
elif test -n "$trust"; then
bpkg_fetch_ops="$bpkg_fetch_ops --trust $trust"
fi
if test -n "$verbose"; then
verbose="--verbose $verbose"
fi
# Bootstrap, stage 1.
#
run cd build2
if test -z "$make"; then
run ./bootstrap.sh "$cxx"
else
run $make -f ./bootstrap.gmake "CXX=$cxx"
fi
run build2/b-boot --version
# Bootstrap, stage 2.
#
run build2/b-boot $verbose $jobs config.cxx="$cxx" config.bin.lib=static build2/exe{b}
mv build2/b build2/b-boot
run build2/b-boot --version
# Build and stage the build system and the package manager.
#
run cd ..
run build2/build2/b-boot $verbose configure \
config.cxx="$cxx" \
config.bin.suffix=-stage \
config.bin.rpath="$conf_rpath" \
config.install.root="$idir" \
config.install.data_root=root/stage \
config.install.sudo="$conf_sudo"
run build2/build2/b-boot $verbose $jobs install: build2/ bpkg/
run which b-stage
run which bpkg-stage
run b-stage --version
run bpkg-stage --version
# Build the entire toolchain from packages.
#
run cd ..
run mkdir "$cdir"
run cd "$cdir"
cdir="$(pwd)" # Save full path for later.
run bpkg-stage $verbose create \
cc \
config.cxx="$cxx" \
config.cc.coptions="$*" \
config.bin.rpath="$conf_rpath" \
config.install.root="$idir" \
config.install.sudo="$conf_sudo"
run bpkg-stage $verbose add "$BUILD2_REPO"
run bpkg-stage $verbose $bpkg_fetch_ops fetch
run bpkg-stage $verbose $jobs $bpkg_build_ops build --for install --yes --plan= build2 bpkg bdep
run bpkg-stage $verbose $jobs install build2 bpkg bdep
run which b
run which bpkg
run which bdep
run b --version
run bpkg --version
run bdep --version
# Clean up stage.
#
run cd "$owd"
run b $verbose $jobs uninstall: build2/ bpkg/
diag
diag "Toolchain installation: $idir/bin"
diag "Upgrade configuration: $cdir"
diag
|
#!/bin/sh
set -e
export VENDOR=samsung
export DEVICE=kltedcmactive
./../../$VENDOR/klte-common/extract-files.sh $@
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.