text stringlengths 1 1.05M |
|---|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import imghdr
from calibre.ebooks.mobi import MAX_THUMB_DIMEN, MAX_THUMB_SIZE
from calibre.ebooks.mobi.utils import (rescale_image, mobify_image,
write_font_record)
from calibre.ebooks import generate_masthead
from calibre.ebooks.oeb.base import OEB_RASTER_IMAGES
PLACEHOLDER_GIF = b'GIF89a\x01\x00\x01\x00\x80\x00\x00\x00\x00\x00\xff\xff\xff!\xf9\x04\x01\x00\x00\x00\x00,\x00\x00\x00\x00\x01\x00\x01\x00@\x02\x01D\x00;'
class Resources(object):
def __init__(self, oeb, opts, is_periodical, add_fonts=False,
process_images=True):
self.oeb, self.log, self.opts = oeb, oeb.log, opts
self.is_periodical = is_periodical
self.process_images = process_images
self.item_map = {}
self.records = []
self.mime_map = {}
self.masthead_offset = 0
self.used_image_indices = set()
self.image_indices = set()
self.cover_offset = self.thumbnail_offset = None
self.add_resources(add_fonts)
def process_image(self, data):
if not self.process_images:
return data
return (mobify_image(data) if self.opts.mobi_keep_original_images else
rescale_image(data))
def add_resources(self, add_fonts):
oeb = self.oeb
oeb.logger.info('Serializing resources...')
index = 1
mh_href = None
if 'masthead' in oeb.guide and oeb.guide['masthead'].href:
mh_href = oeb.guide['masthead'].href
self.records.append(None)
index += 1
self.used_image_indices.add(0)
self.image_indices.add(0)
elif self.is_periodical:
# Generate a default masthead
data = generate_masthead(unicode(self.oeb.metadata['title'][0]))
self.records.append(data)
self.used_image_indices.add(0)
self.image_indices.add(0)
index += 1
cover_href = self.cover_offset = self.thumbnail_offset = None
if (oeb.metadata.cover and
unicode(oeb.metadata.cover[0]) in oeb.manifest.ids):
cover_id = unicode(oeb.metadata.cover[0])
item = oeb.manifest.ids[cover_id]
cover_href = item.href
for item in self.oeb.manifest.values():
if item.media_type not in OEB_RASTER_IMAGES: continue
try:
data = self.process_image(item.data)
except:
self.log.warn('Bad image file %r' % item.href)
continue
else:
if mh_href and item.href == mh_href:
self.records[0] = data
continue
self.image_indices.add(len(self.records))
self.records.append(data)
self.item_map[item.href] = index
self.mime_map[item.href] = 'image/%s'%imghdr.what(None, data)
index += 1
if cover_href and item.href == cover_href:
self.cover_offset = self.item_map[item.href] - 1
self.used_image_indices.add(self.cover_offset)
try:
data = rescale_image(item.data, dimen=MAX_THUMB_DIMEN,
maxsizeb=MAX_THUMB_SIZE)
except:
self.log.warn('Failed to generate thumbnail')
else:
self.image_indices.add(len(self.records))
self.records.append(data)
self.thumbnail_offset = index - 1
self.used_image_indices.add(self.thumbnail_offset)
index += 1
finally:
item.unload_data_from_memory()
if add_fonts:
for item in self.oeb.manifest.values():
if item.href and item.href.rpartition('.')[-1].lower() in {
'ttf', 'otf'} and isinstance(item.data, bytes):
self.records.append(write_font_record(item.data))
self.item_map[item.href] = len(self.records)
def add_extra_images(self):
'''
Add any images that were created after the call to add_resources()
'''
for item in self.oeb.manifest.values():
if (item.media_type not in OEB_RASTER_IMAGES or item.href in
self.item_map): continue
try:
data = self.process_image(item.data)
except:
self.log.warn('Bad image file %r' % item.href)
else:
self.records.append(data)
self.item_map[item.href] = len(self.records)
finally:
item.unload_data_from_memory()
def serialize(self, records, used_images):
used_image_indices = self.used_image_indices | {
v-1 for k, v in self.item_map.iteritems() if k in used_images}
for i in self.image_indices-used_image_indices:
self.records[i] = PLACEHOLDER_GIF
records.extend(self.records)
def __bool__(self):
return bool(self.records)
__nonzero__ = __bool__ |
package com.wix.pay.fatzebra
import com.wix.pay.fatzebra.model.CaptureRequest
import org.json4s.DefaultFormats
import org.json4s.native.Serialization
object CaptureRequestParser {
private implicit val formats = DefaultFormats
def parse(str: String): CaptureRequest = {
Serialization.read[CaptureRequest](str)
}
def stringify(obj: CaptureRequest): String = {
Serialization.write(obj)
}
}
|
package com.udacity.vehicles.api;
import com.google.common.collect.Lists;
import com.udacity.vehicles.VehiclesApiApplication;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.CrudRepository;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.web.servlet.MockMvc;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(SpringExtension.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
classes = VehiclesApiApplication.class,
properties = {
"job.autorun.enabled=false" // don't run preload of addresses into database
})
@AutoConfigureMockMvc
public abstract class AbstractTest implements ITestResource {
@Autowired
protected MockMvc mockMvc;
@LocalServerPort
private Integer port;
protected ResourceBundle bundle;
public AbstractTest() {
bundle = getResourceBundle("test");
}
@Override
public ResourceBundle getResourceBundle() {
return bundle;
}
@Disabled
@Test
void printSystemProperties() {
//remove @Disabled to see System properties
System.getProperties().forEach((key, value) -> System.out.println(key+" - "+value));
}
@Disabled
@Test
void printEnvironmentProperties() {
// Remove @Disabled to see environment properties
System.getenv().forEach((key, value) -> System.out.println(key+" - "+value));
}
/**
* Load a resource file
* @param filename - resource file name
* @return file contents as byte array
*/
protected byte[] loadFileBytes(String filename) {
ClassLoader classLoader = getClass().getClassLoader();
byte[] fileBytes = new byte[0];
try (InputStream inputStream = classLoader.getResourceAsStream(filename)) {
fileBytes = Objects.requireNonNull(inputStream).readAllBytes();
} catch (IOException e) {
e.printStackTrace();
}
return fileBytes;
}
/**
* Load a resource file
* @param inputStream - file input stream
* @return file contents as byte array
*/
protected byte[] loadFileBytes(InputStream inputStream) {
byte[] fileBytes = new byte[0];
try {
if (inputStream != null) {
fileBytes = inputStream.readAllBytes();
}
} catch (IOException e) {
e.printStackTrace();
}
return fileBytes;
}
/**
* Load a resource file
* @param filename - resource file name
* @return file contents as list of strings
*/
protected List<String> loadFileLines(String filename) {
ClassLoader classLoader = getClass().getClassLoader();
List<String> lines = Lists.newArrayList();
try (InputStream inputStream = classLoader.getResourceAsStream(filename)) {
lines = loadFileLines(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
return lines;
}
/**
* Load a resource file
* @param inputStream - file input stream
* @return file contents as list of strings
*/
protected List<String> loadFileLines(InputStream inputStream) {
List<String> lines = Lists.newArrayList();
try (InputStreamReader streamReader = new InputStreamReader(Objects.requireNonNull(inputStream), StandardCharsets.UTF_8);
BufferedReader reader = new BufferedReader(streamReader)) {
lines = reader.lines().collect(Collectors.toList());
} catch (IOException e) {
e.printStackTrace();
}
return lines;
}
@BeforeAll
public static void beforeAll() {
// no-op
}
@AfterAll
public static void afterAll() {
// no-op
}
protected String getUrl(String path,
String query,
String fragment) {
URI uri = null;
try {
uri = new URI("http", null, "localhost", port, path, query, fragment);
} catch (URISyntaxException e) {
e.printStackTrace();
fail();
}
return uri.toString();
}
protected String getUrl(String path) {
return getUrl(path, null, null);
}
/**
* Pause test for specified time
* @param timeout – the length of time to sleep in milliseconds
* @throws InterruptedException – see {@link Thread()#pause(int)}
*/
protected void pause(int timeout) throws InterruptedException {
Thread.sleep(timeout);
}
/**
* Pause test for specified time
* @param timeout – resource key for the length of time to sleep in milliseconds
* @throws InterruptedException – see {@link Thread()#pause(int)}
*/
protected void pause(String timeout) throws InterruptedException {
pause(timeout, 1);
}
/**
* Pause test for specified time
* @param timeout – resource key for the length of time to sleep in milliseconds
* @param multiplier - number of multiples of timeout to wait
* @throws InterruptedException – see {@link Thread()#pause(int)}
*/
protected void pause(String timeout, int multiplier) throws InterruptedException {
pause(getResourceInt(timeout) * multiplier);
}
/**
* Pause test
* @throws InterruptedException – see {@link Thread()#pause(int)}
*/
protected void pause() throws InterruptedException {
pause("defaultEoTTimeout");
}
protected void clearRepository(CrudRepository<?, ? extends Number> repository) {
repository.deleteAll();
assertEquals(0, repository.count(),() -> "Repository not empty: " + repository.getClass().getSimpleName());
assertFalse(repository.findAll().iterator().hasNext(), () -> "Repository not empty: " + repository.getClass().getSimpleName());
}
protected void clearRepository(JpaRepository<?, ? extends Number> repository) {
repository.deleteAll();
assertEquals(0, repository.count(),() -> "Repository not empty: " + repository.getClass().getSimpleName());
assertFalse(repository.findAll().iterator().hasNext(), () -> "Repository not empty: " + repository.getClass().getSimpleName());
}
}
|
<filename>nuitka/codegen/CallCodes.py
# Copyright 2020, <NAME>, mailto:<EMAIL>
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Code generation for calls.
The different kinds of calls get dedicated code. Most notable, calls with
only positional arguments, are attempted through helpers that might be
able to execute them without creating the argument dictionary at all.
"""
from .CodeHelpers import (
generateChildExpressionCode,
generateExpressionCode,
withObjectCodeTemporaryAssignment,
)
from .ConstantCodes import getConstantAccess
from .ErrorCodes import getErrorExitCode
from .LineNumberCodes import emitLineNumberUpdateCode
from .templates.CodeTemplatesCalls import (
template_call_function_with_args_decl,
template_call_function_with_args_impl,
template_call_method_with_args_decl,
template_call_method_with_args_impl,
)
from .templates.CodeTemplatesModules import (
template_header_guard,
template_helper_impl_decl,
)
def _generateCallCodePosOnly(
to_name, expression, called_name, called_attribute_name, emit, context
):
# We have many variants for this to deal with, pylint: disable=too-many-branches
assert called_name is not None
# TODO: Not yet specialized for method calls.
# assert called_attribute_name is None
call_args = expression.getCallArgs()
if call_args is None or call_args.isExpressionConstantRef():
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
if call_args is not None:
call_args_value = call_args.getConstant()
else:
call_args_value = ()
assert type(call_args_value) is tuple
if call_args is not None and call_args.isMutable():
call_arg_names = []
for call_arg_element in call_args_value:
call_arg_name = context.allocateTempName("call_arg_element")
getConstantAccess(
to_name=call_arg_name,
constant=call_arg_element,
emit=emit,
context=context,
)
call_arg_names.append(call_arg_name)
if called_attribute_name is None:
getCallCodePosArgsQuick(
to_name=to_name,
called_name=called_name,
arg_names=call_arg_names,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
_getInstanceCallCodePosArgsQuick(
to_name=to_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
arg_names=call_arg_names,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
elif call_args_value:
if called_attribute_name is None:
_getCallCodeFromTuple(
to_name=to_name,
called_name=called_name,
args_value=call_args_value,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
_getInstanceCallCodeFromTuple(
to_name=to_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
arg_tuple=context.getConstantCode(constant=call_args_value),
arg_size=len(call_args_value),
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
if called_attribute_name is None:
getCallCodeNoArgs(
to_name=to_name,
called_name=called_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
_getInstanceCallCodeNoArgs(
to_name=to_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
elif call_args.isExpressionMakeTuple():
call_arg_names = []
for call_arg_element in call_args.getElements():
call_arg_name = generateChildExpressionCode(
child_name=call_args.getChildName() + "_element",
expression=call_arg_element,
emit=emit,
context=context,
)
call_arg_names.append(call_arg_name)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
if called_attribute_name is None:
getCallCodePosArgsQuick(
to_name=to_name,
called_name=called_name,
arg_names=call_arg_names,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
_getInstanceCallCodePosArgsQuick(
to_name=to_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
arg_names=call_arg_names,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
args_name = generateChildExpressionCode(
expression=call_args, emit=emit, context=context
)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
if called_attribute_name is None:
_getCallCodePosArgs(
to_name=to_name,
called_name=called_name,
args_name=args_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
else:
_getInstanceCallCodePosArgs(
to_name=to_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
args_name=args_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
def _generateCallCodeKwOnly(
to_name, expression, call_kw, called_name, called_attribute_name, emit, context
):
# TODO: Not yet specialized for method calls.
assert called_name is not None
assert called_attribute_name is None
call_kw_name = generateChildExpressionCode(
expression=call_kw, emit=emit, context=context
)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
_getCallCodeKeywordArgs(
to_name=to_name,
called_name=called_name,
call_kw_name=call_kw_name,
emit=emit,
context=context,
)
def generateCallCode(to_name, expression, emit, context):
# There is a whole lot of different cases, for each of which, we create
# optimized code, constant, with and without positional or keyword arguments
# each, so there is lots of branches involved.
called = expression.getCalled()
call_kw = expression.getCallKw()
call_args = expression.getCallArgs()
# TODO: Make this work for all cases. Currently, the method calls that do
# a combined lookup and call, do a re-ordering of things, and therefore it
# must be disabled until this is solved.
if (
called.isExpressionAttributeLookup()
and not called.isExpressionAttributeLookupSpecial()
and called.getAttributeName() not in ("__class__", "__dict__")
and (
call_args is None
or not call_args.mayHaveSideEffects()
or not called.mayHaveSideEffects()
)
and call_kw is None
):
called_name = context.allocateTempName("called_instance")
generateExpressionCode(
to_name=called_name,
expression=called.subnode_expression,
emit=emit,
context=context,
)
called_attribute_name = context.getConstantCode(
constant=called.getAttributeName()
)
else:
called_attribute_name = None
called_name = generateChildExpressionCode(
expression=called, emit=emit, context=context
)
with withObjectCodeTemporaryAssignment(
to_name, "call_result", expression, emit, context
) as result_name:
if call_kw is None or (
call_kw.isExpressionConstantRef() and call_kw.getConstant() == {}
):
_generateCallCodePosOnly(
to_name=result_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
expression=expression,
emit=emit,
context=context,
)
else:
call_args = expression.getCallArgs()
if call_args is None or (
call_args.isExpressionConstantRef() and call_args.getConstant() == ()
):
_generateCallCodeKwOnly(
to_name=result_name,
called_name=called_name,
called_attribute_name=called_attribute_name,
expression=expression,
call_kw=call_kw,
emit=emit,
context=context,
)
else:
call_args_name = generateChildExpressionCode(
expression=call_args, emit=emit, context=context
)
call_kw_name = generateChildExpressionCode(
expression=call_kw, emit=emit, context=context
)
context.setCurrentSourceCodeReference(
expression.getCompatibleSourceReference()
)
_getCallCodePosKeywordArgs(
to_name=result_name,
called_name=called_name,
call_args_name=call_args_name,
call_kw_name=call_kw_name,
emit=emit,
context=context,
)
def getCallCodeNoArgs(to_name, called_name, needs_check, emit, context):
emitLineNumberUpdateCode(emit, context)
emit("%s = CALL_FUNCTION_NO_ARGS(%s);" % (to_name, called_name))
getErrorExitCode(
check_name=to_name,
release_name=called_name,
emit=emit,
needs_check=needs_check,
context=context,
)
context.addCleanupTempName(to_name)
def _getInstanceCallCodeNoArgs(
to_name, called_name, called_attribute_name, needs_check, emit, context
):
emitLineNumberUpdateCode(emit, context)
emit(
"%s = CALL_METHOD_NO_ARGS(%s, %s);"
% (to_name, called_name, called_attribute_name)
)
getErrorExitCode(
check_name=to_name,
release_names=(called_name, called_attribute_name),
emit=emit,
needs_check=needs_check,
context=context,
)
context.addCleanupTempName(to_name)
# Outside helper code relies on some quick call to be present.
quick_calls_used = set([2, 3, 4, 5])
quick_instance_calls_used = set()
def _getInstanceCallCodePosArgsQuick(
to_name, called_name, called_attribute_name, arg_names, needs_check, emit, context
):
arg_size = len(arg_names)
quick_instance_calls_used.add(arg_size)
# For 0 arguments, NOARGS is supposed to be used.
assert arg_size > 0
emitLineNumberUpdateCode(emit, context)
emit(
"""\
{
PyObject *call_args[] = {%s};
%s = CALL_METHOD_WITH_ARGS%d(%s, %s, call_args);
}
"""
% (
", ".join(str(arg_name) for arg_name in arg_names),
to_name,
arg_size,
called_name,
called_attribute_name,
)
)
getErrorExitCode(
check_name=to_name,
release_names=[called_name] + arg_names,
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def getCallCodePosArgsQuick(
to_name, called_name, arg_names, needs_check, emit, context
):
arg_size = len(arg_names)
# For 0 arguments, NOARGS is supposed to be used.
assert arg_size > 0
emitLineNumberUpdateCode(emit, context)
# For one argument, we have a dedicated helper function that might
# be more efficient.
if arg_size == 1:
emit(
"""%s = CALL_FUNCTION_WITH_SINGLE_ARG(%s, %s);"""
% (to_name, called_name, arg_names[0])
)
else:
quick_calls_used.add(arg_size)
emit(
"""\
{
PyObject *call_args[] = {%s};
%s = CALL_FUNCTION_WITH_ARGS%d(%s, call_args);
}
"""
% (
", ".join(str(arg_name) for arg_name in arg_names),
to_name,
arg_size,
called_name,
)
)
getErrorExitCode(
check_name=to_name,
release_names=[called_name] + arg_names,
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getInstanceCallCodeFromTuple(
to_name,
called_name,
called_attribute_name,
arg_tuple,
arg_size,
needs_check,
emit,
context,
):
quick_instance_calls_used.add(arg_size)
# For 0 arguments, NOARGS is supposed to be used.
assert arg_size > 0
emitLineNumberUpdateCode(emit, context)
emit(
"""\
%s = CALL_METHOD_WITH_ARGS%d(%s, %s, &PyTuple_GET_ITEM(%s, 0));
"""
% (to_name, arg_size, called_name, called_attribute_name, arg_tuple)
)
getErrorExitCode(
check_name=to_name,
release_names=(called_name, called_attribute_name),
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getCallCodeFromTuple(to_name, called_name, args_value, needs_check, emit, context):
arg_size = len(args_value)
# For 0 arguments, NOARGS is supposed to be used.
assert arg_size > 0
emitLineNumberUpdateCode(emit, context)
if arg_size == 1:
arg_name = context.getConstantCode(args_value[0])
emit(
"""%s = CALL_FUNCTION_WITH_SINGLE_ARG(%s, %s);"""
% (to_name, called_name, arg_name)
)
else:
# TODO: Having to use a full tuple is wasteful, a PyObject ** would do.
arg_tuple_name = context.getConstantCode(constant=args_value)
quick_calls_used.add(arg_size)
emit(
"""\
%s = CALL_FUNCTION_WITH_ARGS%d(%s, &PyTuple_GET_ITEM(%s, 0));
"""
% (to_name, arg_size, called_name, arg_tuple_name)
)
getErrorExitCode(
check_name=to_name,
release_name=called_name,
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getInstanceCallCodePosArgs(
to_name, called_name, called_attribute_name, args_name, needs_check, emit, context
):
emitLineNumberUpdateCode(emit, context)
emit(
"%s = CALL_METHOD_WITH_POSARGS(%s, %s, %s);"
% (to_name, called_name, called_attribute_name, args_name)
)
getErrorExitCode(
check_name=to_name,
release_names=(called_name, args_name),
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getCallCodePosArgs(to_name, called_name, args_name, needs_check, emit, context):
emitLineNumberUpdateCode(emit, context)
emit("%s = CALL_FUNCTION_WITH_POSARGS(%s, %s);" % (to_name, called_name, args_name))
getErrorExitCode(
check_name=to_name,
release_names=(called_name, args_name),
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getCallCodeKeywordArgs(to_name, called_name, call_kw_name, emit, context):
emitLineNumberUpdateCode(emit, context)
emit(
"%s = CALL_FUNCTION_WITH_KEYARGS(%s, %s);"
% (to_name, called_name, call_kw_name)
)
getErrorExitCode(
check_name=to_name,
release_names=(called_name, call_kw_name),
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getCallCodePosKeywordArgs(
to_name, called_name, call_args_name, call_kw_name, emit, context
):
emitLineNumberUpdateCode(emit, context)
emit(
"%s = CALL_FUNCTION(%s, %s, %s);"
% (to_name, called_name, call_args_name, call_kw_name)
)
getErrorExitCode(
check_name=to_name,
release_names=(called_name, call_args_name, call_kw_name),
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def getCallsDecls():
result = []
for quick_call_used in sorted(quick_calls_used.union(quick_instance_calls_used)):
result.append(
template_call_function_with_args_decl % {"args_count": quick_call_used}
)
for quick_call_used in sorted(quick_instance_calls_used):
result.append(
template_call_method_with_args_decl % {"args_count": quick_call_used}
)
return template_header_guard % {
"header_guard_name": "__NUITKA_CALLS_H__",
"header_body": "\n".join(result),
}
def getCallsCode():
result = []
result.append(template_helper_impl_decl % {})
for quick_call_used in sorted(quick_calls_used.union(quick_instance_calls_used)):
result.append(
template_call_function_with_args_impl % {"args_count": quick_call_used}
)
for quick_call_used in sorted(quick_instance_calls_used):
result.append(
template_call_method_with_args_impl % {"args_count": quick_call_used}
)
return "\n".join(result)
|
curl -X POST http://127.0.0.1:8081/transactions --data "sourceAccount=10000002&amount=1000000.7&destAccount=10000007" -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X POST http://127.0.0.1:8081/transactions --data "sourceAccount=$123&amount=1.7&destAccount=10000007" -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X POST http://127.0.0.1:8081/accounts --data "currencyzz=USD" -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X GET http://127.0.0.1:8081/fofofof -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X POST http://127.0.0.1:8081/fofofof -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X POST http://127.0.0.1:8081/fofofof/123 -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
curl -X GET http://127.0.0.1:8081/accounts/abra/dfdf -H 'Authorization: 25184bc5947ed61556d5230a79394fdd43cdcc04'
printf "\n"
|
<filename>src/modules/fpm.search.js
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
define([], factory(root));
} else if (typeof exports === 'object') {
module.exports = factory(root);
} else {
root.fpm_search = factory(root);
}
})(typeof global !== 'undefined' ? global : this.window || this.global, function (root) {
'use strict';
//
// Variables
//
var fpm_search = {}; // Object for public APIs
var supports = !!document.querySelector && !!root.addEventListener; // Feature test
var settings;
var el;
var tabElements;
// Default settings
var defaults = {
saveQueryCookie: false,
searchPanel: document.querySelector('#flypanels-search .searchpanel'),
onInit: function () {},
onDestroy: function () {},
onEmptySearchResult: function () {},
onSearchError: function () {},
onSearchSuccess: function () {},
};
//
// Methods
//
var executeSearch = function (query) {
searchError('hide');
settings.searchPanel.querySelector('.flypanels-searchresult').innerHTML = '';
settings.searchPanel.querySelector('.resultinfo .query').innerHTML = query;
settings.searchPanel.querySelector('.resultinfo .num').innerHTML = 0;
var jsonURL = settings.searchPanel.querySelector('.searchbox').getAttribute('data-searchurl');
jsonURL = jsonURL + '&q=' + query;
var request = new XMLHttpRequest();
request.open('GET', jsonURL, true);
request.onload = function () {
if (request.status >= 200 && request.status < 400) {
// Success!
var response = parseJSON(request.response);
if (response !== false) {
var foundResults = response.Items.length;
if (foundResults > 0) {
if (settings.saveQueryCookie === true) {
cookies.set('searchQuery', query, null, '/');
}
var output = buildResultsList(response.Items);
// Render html
settings.searchPanel.querySelector('.resultinfo .query').innerHTML = query;
settings.searchPanel.querySelector('.resultinfo .num').innerHTML = foundResults;
settings.searchPanel.querySelector('.flypanels-searchresult').innerHTML = output;
tabElements = document.querySelectorAll(
'.flypanels-button-right, .flypanels-right .searchbutton, #flypanels-searchfield, .flypanels-searchresult ul li a'
);
searchProgress('hide');
settings.searchPanel.querySelector('.resultinfo').removeAttribute('hidden');
settings.searchPanel.querySelector('.resultinfo').setAttribute('aria-hidden', 'false');
settings.searchPanel.querySelector('.flypanels-searchresult').removeAttribute('hidden');
settings.searchPanel.querySelector('.flypanels-searchresult').setAttribute('aria-hidden', 'false');
hook('onSearchSuccess');
} else {
hook('onEmptySearchResult');
if (settings.saveQueryCookie === true) {
cookies.remove('searchQuery', '/');
}
searchProgress('hide');
searchError('show');
}
} else {
hook('onEmptySearchResult');
if (settings.saveQueryCookie === true) {
cookies.remove('searchQuery', '/');
}
searchProgress('hide');
searchError('show');
}
} else {
// We reached our target server, but it returned an error
searchError('show');
searchProgress('hide');
hook('onSearchError');
}
};
request.onerror = function () {
// There was a connection error of some sort
searchError('show');
};
request.send();
};
var buildResultsList = function (results) {
var output = '<ul>';
for (var i in results) {
if (results[i].Type === 'Page') {
output +=
'<li><a href="' + results[i].LinkUrl + '"><span class="type"><i class="fa page"></i></span><span class="link">' + results[i].Header + '</span></a>';
} else {
output +=
'<li><a href="' + results[i].LinkUrl + '"><span class="type"><i class="fa doc"></i></span><span class="link">' + results[i].Header + '</span></a>';
}
}
output += '</ul>';
return output;
};
var parseJSON = function (jsonString) {
try {
var o = JSON.parse(jsonString);
// Handle non-exception-throwing cases:
// Neither JSON.parse(false) or JSON.parse(1234) throw errors, hence the type-checking,
// but... JSON.parse(null) returns 'null', and typeof null === "object",
// so we must check for that, too.
if (o && typeof o === 'object' && o !== null) {
return o;
}
} catch (e) {}
console.warn('Error parsing JSON file');
return false;
};
var searchError = function (state) {
if (state === 'hide') {
settings.searchPanel.querySelector('.errormsg').setAttribute('hidden', '');
settings.searchPanel.querySelector('.errormsg').setAttribute('aria-hidden', 'true');
} else {
settings.searchPanel.querySelector('.errormsg').removeAttribute('hidden');
settings.searchPanel.querySelector('.errormsg').setAttribute('aria-hidden', 'false');
}
};
var searchProgress = function (state) {
if (state === 'hide') {
settings.searchPanel.querySelector('.errormsg').setAttribute('hidden', '');
} else {
settings.searchPanel.querySelector('.errormsg').removeAttribute('hidden');
}
};
var cookies = {
get: function (sKey) {
if (!sKey) {
return null;
}
return (
decodeURIComponent(
document.cookie.replace(
new RegExp('(?:(?:^|.*;)\\s*' + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, '\\$&') + '\\s*\\=\\s*([^;]*).*$)|^.*$'),
'$1'
)
) || null
);
},
set: function (sKey, sValue, vEnd, sPath, sDomain, bSecure) {
if (!sKey || /^(?:expires|max\-age|path|domain|secure)$/i.test(sKey)) {
return false;
}
var sExpires = '';
if (vEnd) {
switch (vEnd.constructor) {
case Number:
sExpires = vEnd === Infinity ? '; expires=Fri, 31 Dec 9999 23:59:59 GMT' : '; max-age=' + vEnd;
break;
case String:
sExpires = '; expires=' + vEnd;
break;
case Date:
sExpires = '; expires=' + vEnd.toUTCString();
break;
}
}
document.cookie =
encodeURIComponent(sKey) +
'=' +
encodeURIComponent(sValue) +
sExpires +
(sDomain ? '; domain=' + sDomain : '') +
(sPath ? '; path=' + sPath : '') +
(bSecure ? '; secure' : '');
return true;
},
remove: function (sKey, sPath, sDomain) {
if (!this.has(sKey)) {
return false;
}
document.cookie =
encodeURIComponent(sKey) + '=; expires=Thu, 01 Jan 1970 00:00:00 GMT' + (sDomain ? '; domain=' + sDomain : '') + (sPath ? '; path=' + sPath : '');
return true;
},
has: function (sKey) {
if (!sKey) {
return false;
}
return new RegExp('(?:^|;\\s*)' + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, '\\$&') + '\\s*\\=').test(document.cookie);
},
keys: function () {
var aKeys = document.cookie.replace(/((?:^|\s*;)[^\=]+)(?=;|$)|^\s*|\s*(?:\=[^;]*)?(?:\1|$)/g, '').split(/\s*(?:\=[^;]*)?;\s*/);
for (var nLen = aKeys.length, nIdx = 0; nIdx < nLen; nIdx++) {
aKeys[nIdx] = decodeURIComponent(aKeys[nIdx]);
}
return aKeys;
},
};
var isAndroid = function () {
if (navigator.userAgent.toLowerCase().indexOf('android') > -1) {
return true;
} else {
return false;
}
};
var isIOS = function () {
if (navigator.userAgent.match(/iPhone/i) || navigator.userAgent.match(/iPad/i) || navigator.userAgent.match(/iPod/i)) {
return true;
} else {
return false;
}
};
var initTabNavigation = function () {
tabElements = tabElements ? tabElements : document.querySelectorAll('#flypanels-searchfield, #flypanels-search .searchbutton');
document.addEventListener('keydown', function (event) {
if (hasClass(document.querySelector('body'), 'flypanels-open') && hasClass(document.querySelector('#flypanels-search'), 'visible')) {
if (event.key === 'Tab') {
if (event.shiftKey) {
if (Array.prototype.indexOf.call(tabElements, event.target) === 0) {
tabElements[tabElements.length - 1].focus();
event.preventDefault();
} else if (Array.prototype.indexOf.call(tabElements, event.target) - 1 < 0) {
tabElements[0].focus();
event.preventDefault();
} else {
tabElements[Array.prototype.indexOf.call(tabElements, event.target) - 1].focus();
event.preventDefault();
}
} else {
if (Array.prototype.indexOf.call(tabElements, event.target) === -1) {
tabElements[0].focus();
event.preventDefault();
} else if (Array.prototype.indexOf.call(tabElements, event.target) + 1 === tabElements.length) {
tabElements[0].focus();
event.preventDefault();
} else {
tabElements[Array.prototype.indexOf.call(tabElements, event.target) + 1].focus();
event.preventDefault();
}
}
}
}
});
};
var hasClass = function (element, classname) {
if (typeof element.classList !== 'undefined' && element.classList.contains(classname)) {
return true;
} else {
return false;
}
};
/**
* Callback hooks.
* Usage: In the defaults object specify a callback function:
* hookName: function() {}
* Then somewhere in the plugin trigger the callback:
* hook('hookName');
*/
var hook = function (hookName) {
if (settings[hookName] !== undefined) {
// Call the user defined function.
// Scope is set to the jQuery element we are operating on.
settings[hookName].call(el);
}
};
/**
* Merge defaults with user options
* @private
* @param {Object} defaults Default settings
* @param {Object} options User options
* @returns {Object} Merged values of defaults and options
*/
var extend = function (defaults, options) {
var extended = {};
forEach(defaults, function (value, prop) {
extended[prop] = defaults[prop];
});
forEach(options, function (value, prop) {
extended[prop] = options[prop];
});
return extended;
};
/**
* A simple forEach() implementation for Arrays, Objects and NodeLists
* @private
* @param {Array|Object|NodeList} collection Collection of items to iterate
* @param {Function} callback Callback function for each iteration
* @param {Array|Object|NodeList} scope Object/NodeList/Array that forEach is iterating over (aka `this`)
*/
var forEach = function (collection, callback, scope) {
if (Object.prototype.toString.call(collection) === '[object Object]') {
for (var prop in collection) {
if (Object.prototype.hasOwnProperty.call(collection, prop)) {
callback.call(scope, collection[prop], prop, collection);
}
}
} else {
for (var i = 0, len = collection.length; i < len; i++) {
callback.call(scope, collection[i], i, collection);
}
}
};
/**
* Destroy the current initialization.
* @public
*/
fpm_search.destroy = function () {
// If plugin isn't already initialized, stop
if (!settings) {
return;
}
// Remove init class for conditional CSS
document.documentElement.classList.remove(settings.initClass);
// @todo Undo any other init functions...
// Remove event listeners
document.removeEventListener('click', eventHandler, false);
// Reset variables
settings = null;
hook('onDestroy');
};
/**
* Initialize Plugin
* @public
* @param {Object} options User settings
*/
fpm_search.init = function (options) {
// feature test
if (!supports) {
return;
}
// Destroy any existing initializations
fpm_search.destroy();
// Merge user options with defaults
settings = extend(defaults, options || {});
el = document.querySelector(settings.container);
if (isAndroid() || isIOS()) {
document.querySelector('.flypanels-searchresult').classList.add('touch');
}
settings.searchPanel.querySelector('.searchbutton').addEventListener('click', function (event) {
event.preventDefault();
searchProgress('show');
executeSearch(settings.searchPanel.querySelector('.searchbox input').value);
});
settings.searchPanel.querySelector('.searchbox input').addEventListener('keydown', function (event) {
if (event.which === 13) {
searchProgress('show');
executeSearch(this.value);
}
});
if (cookies.has('searchQuery') === true && settings.saveQueryCookie === true) {
executeSearch(cookies.get('searchQuery'));
}
initTabNavigation();
hook('onInit');
};
//
// Public APIs
//
return fpm_search;
});
|
// Define the BaseConversionProtocol
protocol BaseConversionProtocol {
func convertString(_ string: String, fromBase: Int, toBase: Int) -> String
}
// Implement the BaseConversionMock class conforming to BaseConversionProtocol
class BaseConversionMock: BaseConversionProtocol {
var convertStringCallCount = 0
func convertString(_ string: String, fromBase: Int, toBase: Int) -> String {
convertStringCallCount += 1
// Implement the base conversion logic here
// This is a placeholder implementation
return "ConvertedString" // Replace with actual base conversion logic
}
} |
package back_tracking;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.LinkedList;
/**
*
* @author minchoba
* 백준 2023번: 신기한 소수
*
* @see https://www.acmicpc.net/problem/2023/
*
*/
public class Boj2023 {
private static final char NEW_LINE = '\n';
private static int N = 0;
private static LinkedList<Character> res = null;
private static StringBuilder sb = new StringBuilder();
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
N = Integer.parseInt(br.readLine());
boolean[] isPrime = new boolean[10];
isPrime[2] = isPrime[3] = isPrime[5] = isPrime[7] = true; // 가장 앞자리 수 선별
for(int i = 0; i < 9; i++) {
if(isPrime[i]) {
res = new LinkedList();
res.add((char) (i + '0')); // i로 시작하는 수를 구하기 위해 저장
dfs(i); // backTracking 시작
}
}
System.out.println(sb.toString()); // 결과 값 한번에 출력
}
/**
* 백트래킹 메소드
*
*/
private static void dfs(int start) {
if(res.size() == N) { // 크기가 N인 경우
for(int i = 0; i < N; i++) { // N자릴 수를 버퍼에 담고 개행
sb.append(res.get(i));
}
sb.append(NEW_LINE);
return;
}
for(int next = 0; next < 10; next++) { // 다음에 이어 붙일 수를 판별
res.add((char) (next + '0'));
boolean isCorrect = isPrime(); // 이어 붙인 수가 소수인지 체크
if(isCorrect) dfs(next); // 소수면 다음 깊이로 넘어가고
res.removeLast(); // 아닌 경우엔 들어왔던 숫자를 빼내줌
}
}
/**
* 해당 숫자가 1 ~ N자리까지 소수인지 판별하는 메소드
*
*/
private static boolean isPrime() {
int leng = res.size();
int num = 0;
for(int i = 0; i < leng; i++) {
int cipher = (res.get(i) - '0'); // 자릿수
int powTen = (int) Math.pow(10, leng - 1 - i); // 각 숫자를 원래 수로 복원하기 위한 10의 자리 설정
num += (powTen * cipher);
}
int loop = num / 2;
for(int i = 2; i < loop; i++) {
if(num % i == 0) return false; // 나누어 떨어지는 경우 소수가 아니므로 거짓 반환
}
return true; // 종료되지 않은 경우 참 반환
}
}
|
<filename>src/SimpleLogger.ts
export class SimpleLogger {
private loggerID = Math.random()
.toString(32)
.slice(2, 6);
constructor(private prefix: string, private debug: boolean) {}
private getLogString() {
return `🌟 react-admin-import-csv:: ${this.prefix} [${this.loggerID}] `;
}
public get log() {
if (!this.debug) {
return (...any) => {};
}
const boundLogFn: (...any) => void = console.log.bind(
console,
this.getLogString()
);
return boundLogFn;
}
public get warn() {
if (!this.debug) {
return (...any) => {};
}
const boundLogFn: (...any) => void = console.warn.bind(
console,
this.getLogString()
);
return boundLogFn;
}
public get error() {
if (!this.debug) {
return (...any) => {};
}
const boundLogFn: (...any) => void = console.error.bind(
console,
this.getLogString()
);
return boundLogFn;
}
setEnabled(logging: boolean) {
this.debug = logging;
}
}
|
'use strict';
/**
* egg-city default config
* @member Config#city
* @property {String} SOME_KEY - some description
*/
exports.city = {
};
exports.proxyworker = {
port: 10086,
};
|
<filename>src/main/scala/nl/egulden/discordbot/services/discord/DiscordMessageSender.scala
package nl.egulden.discordbot.services.discord
import com.google.zxing.EncodeHintType
import javax.inject.Inject
import net.dv8tion.jda.api.{EmbedBuilder, JDA}
import net.dv8tion.jda.api.entities.{Message, MessageChannel, User => DiscordUser}
import net.glxn.qrgen.core.image.ImageType
import net.glxn.qrgen.javase.QRCode
import play.api.{Configuration, Logger}
class DiscordMessageSender @Inject()(configuration: Configuration,
jda: JDA) {
val logger = Logger(getClass)
def addressLinkTemplate: String = configuration.get[String]("app.linktemplates.address")
def adminUserId: Long = configuration.get[Long]("discord.adminUserId")
def transactionLinkTemplate: String = configuration.get[String]("app.linktemplates.transaction")
def getAdminUser: DiscordUser = jda.getUserById(adminUserId)
def sendToAdmin(text: String): Unit = pmToUser(getAdminUser, text)
def sendInChannel(msg: Message, text: String): Unit =
sendInChannel(msg.getChannel, text)
def sendInChannel(channel: MessageChannel, text: String): Unit =
channel.sendMessage(text).queue()
def replyToMessage(msg: Message, reply: String): Unit =
this.sendInChannel(msg, s"${msg.getAuthor.getAsMention} $reply")
def pmToAuthor(msg: Message, text: String): Unit =
pmToUser(msg.getAuthor, text)
def pmToUser(discordUser: DiscordUser, text: String): Unit =
discordUser.openPrivateChannel().queue(channel => {
channel.sendMessage(text).queue()
})
def sendAddressQrCode(channel: MessageChannel, address: String): Unit = {
val filename = s"$address.jpg"
val url = s"egulden:$address"
val file = QRCode
.from(url)
.withSize(256, 256)
.withHint(EncodeHintType.MARGIN, "1")
.to(ImageType.JPG)
.file()
channel.sendFile(file, filename)
.embed(new EmbedBuilder()
.setImage(s"attachment://$filename")
.setDescription(String.format(addressLinkTemplate, address))
.build())
.queue()
}
}
|
#/usr/bin/env bash
# Implement polycubectl CLI autocompletion.
# Inspired by https://github.com/scop/bash-completion
parse_yaml() {
local prefix=$2
local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \
-e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
awk -F$fs '{
indent = length($1)/2;
vname[indent] = $2;
for (i in vname) {if (i > indent) {delete vname[i]}}
if (length($3) > 0) {
vn=""; for (i=0; i<indent; i++) {vn=(vn)(vname[i])("_")}
printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
}
}'
}
# Use bash-completion, if available
[[ $PS1 && -f /usr/share/bash-completion/bash_completion ]] && \
. /usr/share/bash-completion/bash_completion
BASE_URL_="http://localhost:9000/polycube/v1/"
# https://stackoverflow.com/a/10660730
rawurlencode() {
local string="${1}"
local strlen=${#string}
local encoded=""
local pos c o
for (( pos=0 ; pos<strlen ; pos++ )); do
c=${string:$pos:1}
case "$c" in
[-_.~a-zA-Z0-9] ) o="${c}" ;;
* ) printf -v o '%%%02x' "'$c"
esac
encoded+="${o}"
done
echo "${encoded}" # You can either set a return variable (FASTER)
#REPLY="${encoded}" #+or echo the result (EASIER)... or both... :p
#return "${encoded}"
}
_polycubectl_completions() {
if [ -a "$HOME/.config/polycube/polycubectl_config.yaml" ]
then
eval $(parse_yaml "$HOME/.config/polycube/polycubectl_config.yaml" "config_")
fi
BASE_URL=${POLYCUBECTL_URL:-$config_url}
BASE_URL=${BASE_URL:-$BASE_URL_}
local cur prev
_get_comp_words_by_ref -n ":,=" cur prev
local words
_get_comp_words_by_ref -n ":,=" -w words
OLDIFS=$IFS
COMP=""
URL0=""
HELP_TYPE="NONE"
i=0
for X in ${words[@]}
do
let i++
# do not consider the current word the user is typing
#if [ $i == $((COMP_CWORD+1)) ]
if [ "$X" == "$cur" ]
then
#break
continue
fi
if [[ $X == *"="* ]]
then
continue
fi
if [[ $X == *":"* ]]
then
continue
fi
if [[ "polycubectl" == $X ]]
then
continue
fi
if [[ "add" == $X || "del" == $X || "show" == $X || "set" == $X ]]
then
HELP_TYPE=`echo $X | awk '{print toupper($0)}'`
continue
fi
#echo $X
URL0+=$(rawurlencode $X)
URL0+='/'
done
#if [ "${cur: -1}" == "=" ]
if [[ $cur == *"="* ]]
then
URL0+="${cur%%=*}"
URL0+='/'
fi
URL0="${URL0%?}" # remove last '/'
URL=$BASE_URL$URL0"?help="$HELP_TYPE"&completion"
#echo "URL is: " $URL
#return
JSON=`curl -f -L -s -X OPTIONS $URL`
if [ $? -ne 0 ]
then
return
fi
#echo $JSON
VEC="`echo $JSON | jq -cr '.? // [] | @tsv'`"
if [ ${#VEC[@]} == 1 ]
then
VEC0=${VEC[0]}
if [[ ${VEC0:0:1} == "<" ]]
then
# if the command is ADD then print the keyname that is expected, but
# without creating a completion
printf "\n${VEC0}" >/dev/tty
expandedPrompt=$(PS1="$PS1" debian_chroot="$debian_chroot" "$BASH" --norc -i </dev/null 2>&1 | sed -n '${s/^\(.*\)exit$/\1/p;}')
printf '\n%s%s' "$expandedPrompt" "$COMP_LINE" >/dev/tty
return 0;
fi
fi
IFS=$'\t'
for CMD in ${VEC[@]}
do
# add space for commands that doesn't end in "="
if [ ${CMD: -1} != "=" ]
then
CMD+=" "
fi
if [[ $cur == *":"* ]]
then
CMD="${CMD#*:}"
fi
COMP+="$CMD"$IFS
done
IFS=$'\t\n'
local cur0=$cur
# if "=" or ":" are present in the current word, get the substring after it,
# otherwise the completion is going to give wrong results
if [[ $cur == *"="* ]]
then
cur0="${cur#*=}"
fi
if [[ $cur == *":"* ]]
then
cur0="${cur#*:}"
fi
COMPREPLY=( $(compgen -W "$COMP" -- $cur0) )
IFS=$OLDIFS
}
complete -o nospace -F _polycubectl_completions polycubectl
|
#!/bin/sh -f
xv_path="/home/huchao/vivado/Vivado/2015.2"
ExecStep()
{
"$@"
RETVAL=$?
if [ $RETVAL -ne 0 ]
then
exit $RETVAL
fi
}
ExecStep $xv_path/bin/xelab -wto 0303ea328e574a799f2cae8747a5374c -m64 --debug typical --relax --mt 8 -L xil_defaultlib -L unisims_ver -L unimacro_ver -L secureip --snapshot lab3_2_1_tb_behav xil_defaultlib.lab3_2_1_tb xil_defaultlib.glbl -log elaborate.log
|
package io.opensphere.csv.parse;
import java.awt.Color;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Set;
import org.apache.log4j.Logger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.csv.config.v2.CSVDataSource;
import io.opensphere.csvcommon.config.v2.CSVParseParameters;
import io.opensphere.csvcommon.parse.CsvProviderBase;
import io.opensphere.mantle.data.element.MetaDataProvider;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.mantle.util.InputStreamMonitorTaskActivity;
import io.opensphere.mantle.util.MantleToolboxUtils;
import io.opensphere.mantle.util.MonitorInputStream;
/** The Class CSVDataElementProvider. */
@SuppressWarnings("PMD.GodClass")
public class CSVDataElementProvider extends CsvProviderBase
{
/** The Constant ourLogger. */
private static final Logger LOGGER = Logger.getLogger(CSVDataElementProvider.class);
/** The my file source. */
private final CSVDataSource myFileSource;
/**
* Instantiates a new cSV data element provider.
*
* @param tb the tb
* @param dti the dti
* @param configSaver saves the config
* @param source the source
* @param useDeterminedDataTypes the use determined data types
* @param useDynamicEnumerations the use dynamic enumerations
* @throws FileNotFoundException the file not found exception
* @throws IOException Signals that an I/O exception has occurred.
*/
public CSVDataElementProvider(Toolbox tb, DefaultDataTypeInfo dti, Runnable configSaver, CSVDataSource source,
boolean useDeterminedDataTypes, boolean useDynamicEnumerations)
throws FileNotFoundException, IOException
{
geomFact = new CSVMapLocationGeoemtrySupportFactory();
myToolbox = tb;
myFileSource = source;
myConfigSaver = configSaver;
myErrorMessages = New.linkedList();
myWarningMessages = New.linkedList();
myUseDeterminedDataTypes = useDeterminedDataTypes;
myUseDynamicEnumerations = useDynamicEnumerations;
myDynamicEnumerationRegistry = MantleToolboxUtils.getMantleToolbox(tb).getDynamicEnumerationRegistry();
File aFile = new File(myFileSource.getFileLocalPath(tb));
myTypeInfo = dti;
mySpecialColumnMap = io.opensphere.csvcommon.common.Utilities
.createSpecialColumnMap(getParseParams().getSpecialColumns());
setupExtraction(myTypeInfo.getTypeKey());
myTaskActivity = new InputStreamMonitorTaskActivity("Loading CSV File", "Loading CSV File");
LOGGER.info("Reading CSV file: " + aFile);
myReader = createCSVLineReader(myFileSource.getParseParameters(), new InputStreamReader(
new MonitorInputStream(new FileInputStream(aFile), myTaskActivity), StringUtilities.DEFAULT_CHARSET));
tb.getUIRegistry().getMenuBarRegistry().addTaskActivity(myTaskActivity);
if (myReader.ready())
{
while (myLineIndex < myFirstDataRowNum)
{
myCurrLine = myReader.readLine();
myLineIndex++;
}
}
myNextElementToReturn = getNextDataElement();
}
@Override
protected URI getSourceUri()
{
return myFileSource.getSourceUri();
}
@Override
protected Color getLayerColor()
{
return myTypeInfo.getBasicVisualizationInfo().getTypeColor();
}
@Override
protected CSVParseParameters getParseParams()
{
return myFileSource.getParseParameters();
}
@Override
protected Set<String> getColumnFilter()
{
return myFileSource.getColumnFilter();
}
/**
* Some applications may choose to suppress warnings selectively.
* @param parts the parts
* @return true if and only if a warning is allowed
*/
@Override
protected boolean checkAllowWarning(String[] parts)
{
// default to issuing a warning on any perceived problem
return true;
}
/**
* Apply labels to the CSV features. By default, this does nothing;
* override to provide this capability in subclasses.
* @param dtiKey the type key
* @param metaDataProvider the feature metadata
*/
@Override
protected void applyLabels(String dtiKey, MetaDataProvider metaDataProvider)
{
// nothing
}
}
|
<reponame>MccreeFei/jframe<gh_stars>10-100
/**
*
*/
package jframe.jedis.service;
import jframe.core.plugin.annotation.Service;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
/**
* @author dzh
* @date Dec 2, 2014 10:37:37 AM
* @since 1.0
*/
@Service(clazz = "jframe.jedis.service.JedisServiceImpl", id = "jframe.service.jedis")
public interface JedisService {
/**
*
* @return jedis.host.default's jedis
*/
Jedis getJedis();
Jedis getJedis(String id);
/**
* use {@link redis.clients.jedis.Jedis#close()}
*
* @param jedis
*/
@Deprecated
void recycleJedis(Jedis jedis);
/**
* use {@link redis.clients.jedis.Jedis#close()}
*
* @param jedis
*/
@Deprecated
void recycleJedis(String id, Jedis jedis);
/**
* unsupported
*
* @param name
* @return
*/
JedisCluster getJedisCluster(String name);
/************************** simple method ***************************/
String get(String id, String key);
void setex(String id, String key, String value, Integer expiredSeconds);
void del(String id, String key);
}
|
<filename>nuxt.config.js
export default {
target: 'static',
head: {
title: "Febri Hidayan",
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
{
hid: 'description',
name: 'description',
content: "Mengenali lebih jauh tentang saya lewat situs ini dan belajar lebih banyak pengetahuan disini."
}
],
link: [
{
rel: "stylesheet",
href: "https://cdnjs.cloudflare.com/ajax/libs/bulma/0.9.0/css/bulma.min.css"
}
]
},
env: {
appName: '<NAME>'
},
// https://id.nuxtjs.org/guides/configuration-glossary/configuration-router
router: {
// linkActiveClass: 'is-active',
linkExactActiveClass: 'is-active'
},
components: true,
modules: [
'@nuxt/content'
]
} |
<filename>arch/risc-v/src/esp32c3/esp32c3_wireless.c<gh_stars>0
/****************************************************************************
* arch/risc-v/src/esp32c3/esp32c3_wireless.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <nuttx/kmalloc.h>
#include <semaphore.h>
#include <debug.h>
#include "riscv_internal.h"
#include "hardware/esp32c3_system.h"
#include "hardware/esp32c3_soc.h"
#include "hardware/esp32c3_syscon.h"
#include "esp32c3.h"
#include "esp32c3_irq.h"
#include "esp32c3_attr.h"
#include "esp32c3_wireless.h"
#include "espidf_wifi.h"
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/* Software Interrupt */
#define SWI_IRQ ESP32C3_IRQ_FROM_CPU_INT0
#define SWI_PERIPH ESP32C3_PERIPH_FROM_CPU_INT0
/****************************************************************************
* Private Types
****************************************************************************/
/* ESP32-C3 Wireless Private Data */
struct esp32c3_wl_priv_s
{
volatile int ref; /* Reference count */
int cpuint; /* CPU interrupt assigned to SWI */
struct list_node sc_list; /* Semaphore cache list */
};
/****************************************************************************
* Private Function Prototypes
****************************************************************************/
static inline void phy_digital_regs_store(void);
static inline void phy_digital_regs_load(void);
static void esp32c3_phy_enable_clock(void);
static void esp32c3_phy_disable_clock(void);
/****************************************************************************
* Extern Functions declaration
****************************************************************************/
#ifdef CONFIG_ESP32C3_BLE
extern void coex_pti_v2(void);
#endif
/****************************************************************************
* Private Data
****************************************************************************/
/* Wi-Fi sleep private data */
static uint32_t g_phy_clk_en_cnt;
/* Reference count of enabling PHY */
static uint8_t g_phy_access_ref;
/* Memory to store PHY digital registers */
static uint32_t *g_phy_digital_regs_mem = NULL;
/* Indicate PHY is calibrated or not */
static bool g_is_phy_calibrated = false;
static struct esp32c3_wl_priv_s g_esp32c3_wl_priv;
/****************************************************************************
* Private Functions
****************************************************************************/
/****************************************************************************
* Name: phy_digital_regs_store
*
* Description:
* Store PHY digital registers.
*
****************************************************************************/
static inline void phy_digital_regs_store(void)
{
if (g_phy_digital_regs_mem == NULL)
{
g_phy_digital_regs_mem = (uint32_t *)
kmm_malloc(SOC_PHY_DIG_REGS_MEM_SIZE);
}
DEBUGASSERT(g_phy_digital_regs_mem != NULL);
phy_dig_reg_backup(true, g_phy_digital_regs_mem);
}
/****************************************************************************
* Name: phy_digital_regs_load
*
* Description:
* Load PHY digital registers.
*
****************************************************************************/
static inline void phy_digital_regs_load(void)
{
if (g_phy_digital_regs_mem != NULL)
{
phy_dig_reg_backup(false, g_phy_digital_regs_mem);
}
}
/****************************************************************************
* Name: esp32c3_phy_enable_clock
*
* Description:
* Enable PHY hardware clock
*
* Input Parameters:
* None
*
* Returned Value:
* None
*
****************************************************************************/
static void esp32c3_phy_enable_clock(void)
{
irqstate_t flags;
flags = enter_critical_section();
if (g_phy_clk_en_cnt == 0)
{
modifyreg32(SYSTEM_WIFI_CLK_EN_REG, 0,
SYSTEM_WIFI_CLK_WIFI_BT_COMMON_M);
}
g_phy_clk_en_cnt++;
leave_critical_section(flags);
}
/****************************************************************************
* Name: esp32c3_phy_disable_clock
*
* Description:
* Disable PHY hardware clock
*
* Input Parameters:
* None
*
* Returned Value:
* None
*
****************************************************************************/
static void esp32c3_phy_disable_clock(void)
{
irqstate_t flags;
flags = enter_critical_section();
if (g_phy_clk_en_cnt)
{
g_phy_clk_en_cnt--;
if (!g_phy_clk_en_cnt)
{
modifyreg32(SYSTEM_WIFI_CLK_EN_REG,
SYSTEM_WIFI_CLK_WIFI_BT_COMMON_M,
0);
}
}
leave_critical_section(flags);
}
/****************************************************************************
* Name: esp32c3_wl_swi_irq
*
* Description:
* Wireless software interrupt callback function.
*
* Parameters:
* cpuint - CPU interrupt index
* context - Context data from the ISR
* arg - NULL
*
* Returned Value:
* Zero (OK) is returned on success. A negated errno value is returned on
* failure.
*
****************************************************************************/
static int esp32c3_wl_swi_irq(int irq, void *context, FAR void *arg)
{
int i;
int ret;
struct esp32c3_wl_semcache_s *sc;
struct esp32c3_wl_semcache_s *tmp;
struct esp32c3_wl_priv_s *priv = &g_esp32c3_wl_priv;
putreg32(0, SYSTEM_CPU_INTR_FROM_CPU_0_REG);
list_for_every_entry_safe(&priv->sc_list, sc, tmp,
struct esp32c3_wl_semcache_s, node)
{
for (i = 0; i < sc->count; i++)
{
ret = nxsem_post(sc->sem);
if (ret < 0)
{
wlerr("ERROR: Failed to post sem ret=%d\n", ret);
}
}
sc->count = 0;
list_delete(&sc->node);
}
return OK;
}
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: esp32c3_phy_disable
*
* Description:
* Deinitialize PHY hardware
*
* Input Parameters:
* None
*
* Returned Value:
* None
*
****************************************************************************/
void esp32c3_phy_disable(void)
{
irqstate_t flags;
flags = enter_critical_section();
g_phy_access_ref--;
if (g_phy_access_ref == 0)
{
/* Store PHY digital register. */
phy_digital_regs_store();
/* Disable PHY and RF. */
phy_close_rf();
phy_xpd_tsens();
/* Disable Wi-Fi/BT common peripheral clock.
* Do not disable clock for hardware RNG.
*/
esp32c3_phy_disable_clock();
}
leave_critical_section(flags);
}
/****************************************************************************
* Name: esp32c3_phy_enable
*
* Description:
* Initialize PHY hardware
*
* Input Parameters:
* None
*
* Returned Value:
* None
*
****************************************************************************/
void esp32c3_phy_enable(void)
{
static bool debug = false;
irqstate_t flags;
esp_phy_calibration_data_t *cal_data;
char *phy_version = get_phy_version_str();
if (debug == false)
{
debug = true;
wlinfo("phy_version %s\n", phy_version);
}
cal_data = kmm_zalloc(sizeof(esp_phy_calibration_data_t));
if (!cal_data)
{
wlerr("ERROR: Failed to kmm_zalloc");
DEBUGASSERT(0);
}
flags = enter_critical_section();
if (g_phy_access_ref == 0)
{
esp32c3_phy_enable_clock();
if (g_is_phy_calibrated == false)
{
register_chipv7_phy(&phy_init_data, cal_data, PHY_RF_CAL_FULL);
g_is_phy_calibrated = true;
}
else
{
phy_wakeup_init();
phy_digital_regs_load();
}
#ifdef CONFIG_ESP32C3_BLE
coex_pti_v2();
#endif
}
g_phy_access_ref++;
leave_critical_section(flags);
kmm_free(cal_data);
}
/****************************************************************************
* Name: esp32c3_wl_init_semcache
*
* Description:
* Initialize semaphore cache.
*
* Parameters:
* sc - Semaphore cache data pointer
* sem - Semaphore data pointer
*
* Returned Value:
* None.
*
****************************************************************************/
void esp32c3_wl_init_semcache(struct esp32c3_wl_semcache_s *sc,
sem_t *sem)
{
sc->sem = sem;
sc->count = 0;
list_initialize(&sc->node);
}
/****************************************************************************
* Name: esp32c3_wl_post_semcache
*
* Description:
* Store posting semaphore action into semaphore cache.
*
* Parameters:
* sc - Semaphore cache data pointer
*
* Returned Value:
* None.
*
****************************************************************************/
void IRAM_ATTR esp32c3_wl_post_semcache(
struct esp32c3_wl_semcache_s *sc)
{
struct esp32c3_wl_priv_s *priv = &g_esp32c3_wl_priv;
if (!sc->count)
{
list_add_tail(&priv->sc_list, &sc->node);
}
sc->count++;
putreg32(SYSTEM_CPU_INTR_FROM_CPU_0_M, SYSTEM_CPU_INTR_FROM_CPU_0_REG);
}
/****************************************************************************
* Name: esp32c3_wl_init
*
* Description:
* Initialize ESP32-C3 wireless common components for both BT and Wi-Fi.
*
* Parameters:
* None
*
* Returned Value:
* Zero (OK) is returned on success. A negated errno value is returned on
* failure.
*
****************************************************************************/
int esp32c3_wl_init(void)
{
int ret;
irqstate_t flags;
struct esp32c3_wl_priv_s *priv = &g_esp32c3_wl_priv;
flags = enter_critical_section();
if (priv->ref != 0)
{
priv->ref++;
leave_critical_section(flags);
return OK;
}
priv->cpuint = esp32c3_request_irq(SWI_PERIPH,
ESP32C3_INT_PRIO_DEF,
ESP32C3_INT_LEVEL);
ret = irq_attach(SWI_IRQ, esp32c3_wl_swi_irq, NULL);
if (ret < 0)
{
esp32c3_free_cpuint(SWI_PERIPH);
leave_critical_section(flags);
wlerr("ERROR: Failed to attach IRQ ret=%d\n", ret);
return ret;
}
list_initialize(&priv->sc_list);
up_enable_irq(priv->cpuint);
priv->ref++;
leave_critical_section(flags);
return OK;
}
/****************************************************************************
* Name: esp32c3_wl_deinit
*
* Description:
* De-initialize ESP32-C3 wireless common components.
*
* Parameters:
* None
*
* Returned Value:
* Zero (OK) is returned on success. A negated errno value is returned on
* failure.
*
****************************************************************************/
int esp32c3_wl_deinit(void)
{
irqstate_t flags;
struct esp32c3_wl_priv_s *priv = &g_esp32c3_wl_priv;
flags = enter_critical_section();
if (priv->ref == 0)
{
leave_critical_section(flags);
return OK;
}
up_disable_irq(priv->cpuint);
irq_detach(SWI_IRQ);
esp32c3_free_cpuint(SWI_PERIPH);
priv->ref--;
leave_critical_section(flags);
return OK;
}
|
#!/bin/sh
ZIP_NAME=picavista.zip
rm -f $ZIP_NAME
cd src
zip -r ../$ZIP_NAME *
cd ..
|
#! /bin/bash
##### scaling FSM #######################
#remove new squid
son-exec vCDN-SAP2 configure_sap2_squid1.sh
son-emu-cli compute stop -d dc2 -n squid2
#######################################
|
<reponame>kostovmichael/react-examples
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CleanWebpackPlugin = require('clean-webpack-plugin');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const UglifyJSPlugin = require('uglifyjs-webpack-plugin');
const webpack = require('webpack');
const alias = require('./scripts/alias');
const tsconfig = require('./tsconfig.json');
const pkgConfig = require('./package.json');
const env = process.env.NODE_ENV === 'production' ? 'production' : 'development';
const build_env = process.env.BUILD_ENV;
const __PROD__ = env === 'production';
const join = (...args) => path.resolve(process.cwd(), ...args);
const PORT = 2222;
const publicPathMap = {
local: `//localhost:${PORT}/${pkgConfig.name}/docs/`,
remote: `/`
};
const publicPath = publicPathMap[build_env] || publicPathMap.remote;
const cssLoaderOptions = {
sourceMap: true,
importLoaders: 2
};
const cssLoaderModuleOptions = Object.assign({
modules: true,
camelCase: true,
localIdentName: '[local]_[hash:base64:5]',
}, cssLoaderOptions);
const sassLoaderOptions = {
includePaths: [join('src/styles')],
sourceMap: true
};
const postcssLoaderOptions = {
plugins: (loader) => [
require('autoprefixer')()
],
sourceMap: true
};
function getStyleLoaders(cssModule) {
return [
{
loader: 'css-loader',
options: cssModule ? cssLoaderModuleOptions : cssLoaderOptions
},
{
loader: 'postcss-loader',
options: postcssLoaderOptions
},
{
loader: 'sass-loader',
options: sassLoaderOptions
},
{
loader: 'sass-resources-loader',
options: {
resources: [
join('src/styles/variables.scss'),
join('src/styles/mixins.scss')
]
}
}
];
}
const rules = [
{
test: /\.(ts|tsx)$/,
enforce: 'pre',
loader: 'tslint-loader',
options: {
failOnHint: false
}
},
{
test: /\.(tsx|ts)?$/,
loader: 'ts-loader',
exclude: /node_modules/
},
{
test: /\.(scss|css)$/,
exclude: [
/\.module\.(scss|css)$/
],
use: env === 'production' ? ExtractTextPlugin.extract({
fallback: 'style-loader',
use: getStyleLoaders()
}) : ['style-loader'].concat(getStyleLoaders())
},
{
test: /\.(scss|css)$/,
include: [/\.module\.(scss|css)$/],
use: env === 'production' ? ExtractTextPlugin.extract({
fallback: 'style-loader',
use: getStyleLoaders(true)
}) : ['style-loader'].concat(getStyleLoaders(true))
}
];
const plugins = [
new HtmlWebpackPlugin({
template: './src/index.html',
cache: true
}),
new webpack.optimize.CommonsChunkPlugin({
name: ['vendor', 'manifest']
}),
// TODO: watch mode下得HRM貌似不起作用
new webpack.HotModuleReplacementPlugin(),
new webpack.NamedModulesPlugin(),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(env)
},
__PROD__
}),
// new webpack.ProvidePlugin({
// Chance: 'chance'
// }),
new ExtractTextPlugin({
filename: 'styles/[name].[contenthash:8].css',
allChunks: true,
disable: env === 'development'
}),
new CleanWebpackPlugin(['dist', 'docs', 'build'])
];
if (env === 'production') {
plugins.push(
new UglifyJSPlugin()
)
}
const config = {
target: 'web',
cache: true,
entry: {
app: './src/index.tsx',
vendor: [
'react',
'react-dom',
'react-router',
'redux',
'react-redux',
'redux-actions',
'redux-thunk',
"redux-promise-middleware",
'classnames',
'isomorphic-fetch',
'seamless-immutable'
]
},
output: {
path: join('docs'),
filename: env === 'development' ? 'scripts/[name].js' : 'scripts/[name].[hash:8].js',
publicPath,
pathinfo: !__PROD__
},
resolve: {
//webpack1.x中的resolve.root已被webpack2.x的resolve.modules替代
modules: [
'node_modules',
//解决typescript无法使用webpack指定的模块解析别名的问题
//https://github.com/s-panferov/awesome-typescript-loader/issues/156
//https://stackoverflow.com/questions/40443806/webpack-resolve-alias-does-not-work-with-typescript
join(tsconfig.compilerOptions.baseUrl)
],
alias: Object.assign({}, alias(join('src'), ['typings']), {
'swiper-css': join('node_modules/swiper/dist/css/swiper.min.css'),
'swiper': join('node_modules/swiper/dist/js/swiper.min.js')
}),
extensions: ["*", ".tsx", ".ts", ".js", ".jsx", ".scss", ".css", ".sass", ".json"],
mainFiles: ["index"]
},
module: {
rules
},
devtool: __PROD__ ? 'source-map' : 'inline-source-map',
plugins
};
if (env !== 'production') {
config.devServer = {
contentBase: './src/',
historyApiFallback: false,
host: '0.0.0.0',
hot: true,
inline: true,
port: PORT,
proxy: {
// http请求代理,可以解决api请求cors问题
// http://localhost:2222/douban/api/movie/in_theaters?city=上海 -> (代理到) -> http://api.douban.com/v2/movie/in_theaters?city=上海
"/douban/api": {
target: "http://api.douban.com",
// [HPM] Error occurred while trying to proxy request /v2/movie/in_theaters from localhost:2222 to http://api.douban.com (ECONNRESET) (https://nodejs.org/api/errors.html#errors_common_system_errors)
// 添加changeOrigin: true解决上面的报错
changeOrigin: true,
pathRewrite: { '^/douban/api': '/v2' }
}
}
}
}
module.exports = config;
|
<filename>__init__.py
__all__ = ["SDEprocesses", "SDEsystems", "exceptions", "analysis"]
|
#!/bin/sh
# niv: Read the Word of God from your terminal
# License: Public domain
SELF="$0"
get_data() {
sed '1,/^#EOF$/d' < "$SELF" | tar xz -O "$1"
}
if [ -z "$PAGER" ]; then
if command -v less >/dev/null; then
PAGER="less"
else
PAGER="cat"
fi
fi
show_help() {
exec >&2
echo "usage: $(basename "$0") [flags] [reference...]"
echo
echo " -l list books"
echo " -W no line wrap"
echo " -h show help"
echo
echo " Reference types:"
echo " <Book>"
echo " Individual book"
echo " <Book>:<Chapter>"
echo " Individual chapter of a book"
echo " <Book>:<Chapter>:<Verse>[,<Verse>]..."
echo " Individual verse(s) of a specific chapter of a book"
echo " <Book>:<Chapter>-<Chapter>"
echo " Range of chapters in a book"
echo " <Book>:<Chapter>:<Verse>-<Verse>"
echo " Range of verses in a book chapter"
echo " <Book>:<Chapter>:<Verse>-<Chapter>:<Verse>"
echo " Range of chapters and verses in a book"
echo
echo " /<Search>"
echo " All verses that match a pattern"
echo " <Book>/<Search>"
echo " All verses in a book that match a pattern"
echo " <Book>:<Chapter>/<Search>"
echo " All verses in a chapter of a book that match a pattern"
exit 2
}
while [ $# -gt 0 ]; do
isFlag=0
firstChar="${1%"${1#?}"}"
if [ "$firstChar" = "-" ]; then
isFlag=1
fi
if [ "$1" = "--" ]; then
shift
break
elif [ "$1" = "-l" ]; then
# List all book names with their abbreviations
get_data niv.tsv | awk -v cmd=list "$(get_data niv.awk)"
exit
elif [ "$1" = "-W" ]; then
export KJV_NOLINEWRAP=1
shift
elif [ "$1" = "-h" ] || [ "$isFlag" -eq 1 ]; then
show_help
else
break
fi
done
cols=$(tput cols 2>/dev/null)
if [ $? -eq 0 ]; then
export KJV_MAX_WIDTH="$cols"
fi
if [ $# -eq 0 ]; then
if [ ! -t 0 ]; then
show_help
fi
# Interactive mode
while true; do
printf "niv> "
if ! read -r ref; then
break
fi
get_data niv.tsv | awk -v cmd=ref -v ref="$ref" "$(get_data niv.awk)" | ${PAGER}
done
exit 0
fi
get_data niv.tsv | awk -v cmd=ref -v ref="$*" "$(get_data niv.awk)" | ${PAGER}
|
#!/bin/bash
#. ${IDF_PATH}/add_path.sh |Had to change this two lines to
#esptool.py --chip esp32 --port "COM4" --baud $((230400*4)) write_flash -fs 4MB 0x100000 "$1" |the following (otherwise i got: "esptool.py command not found"!??)
${IDF_PATH}/components/esptool_py/esptool/esptool.py --chip esp32 --port "COM7" --baud $((230400*4)) write_flash -fs 4MB "$1" "$2" |
package yijun.sun.plugin.eclipse;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.function.Consumer;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.IPath;
import org.eclipse.egit.ui.ICommitMessageProvider;
public class UseGitCommitTemplateMsg implements ICommitMessageProvider {
@Override
public String getMessage(IResource[] arg0) {
IPath rootPath = ResourcesPlugin.getWorkspace().getRoot().getLocation();
if (rootPath == null) {
return "";
}
String dir = rootPath.toString();
String message = getDefaultMessage();
File file = new File(dir, "git-message-template.txt");
try {
if (!file.exists()) {
boolean createResult = file.createNewFile();
if (createResult) {
FileWriter writer = null;
try {
writer = new FileWriter(file);
writer.write(message);
writer.flush();
} catch (Throwable e) {
e.printStackTrace();
if (writer != null) {
writer.close();
}
}
}
} else if (file.isFile() && file.canRead()) {
FileReader reader = null;
BufferedReader buffer = null;
try {
reader = new FileReader(file);
buffer = new BufferedReader(reader);
final StringBuilder s = new StringBuilder();
buffer.lines().forEach(new Consumer<String>() {
@Override
public void accept(String l) {
s.append(l).append("\n");
}
});
message = s.toString();
} catch (Throwable e) {
if (buffer != null) {
buffer.close();
}
if (reader != null) {
reader.close();
}
e.printStackTrace();
}
}
} catch (IOException e1) {
e1.printStackTrace();
}
return message;
}
private String getDefaultMessage() {
return "[type] subject\n" + "# Please remove comments. \n"
+ "# type: feat(new feature) / bug / docs / style(file style change) / refactor / test(only test code change) / chore(build or tool)\n"
+ "\n" + "Issues: \n" + "Description: \n" + "\n" + "RelateModule: ";
}
}
|
<filename>Documentation/_load_model_8cpp.js<gh_stars>0
var _load_model_8cpp =
[
[ "ModelPtr", "_load_model_8cpp.xhtml#ab8262a23f76732432e30196fa09a5d4d", null ],
[ "OperatorPtr", "_load_model_8cpp.xhtml#af3f6f44d7fdab7f4c61f59e4154dd696", null ],
[ "SubgraphPtr", "_load_model_8cpp.xhtml#ab4ad5c0bbec93520b8210fd6d8e2051c", null ],
[ "BOOST_AUTO_TEST_CASE", "_load_model_8cpp.xhtml#a78183790dc7c28aa493485715480bc39", null ],
[ "BOOST_AUTO_TEST_CASE", "_load_model_8cpp.xhtml#a3496184393204134fcf4cdeeedc21cf6", null ],
[ "BOOST_AUTO_TEST_CASE", "_load_model_8cpp.xhtml#a8afd4e4f33c8c5ea6ed0fcc2c8db1620", null ],
[ "BOOST_AUTO_TEST_CASE", "_load_model_8cpp.xhtml#aecebec3130e12fbe592fac5113b01c6f", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_load_model_8cpp.xhtml#a95a7c926cb96a1c57de39a72a8209c94", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_load_model_8cpp.xhtml#a757da23bc4ad3305fe973413be85015f", null ]
]; |
def merge_list(list1, list2):
merged_list = list1 + list2
final_list = list(dict.fromkeys(merged_list))
return final_list
list1 = ["apple","orange","banana"]
list2 = ["cherry","pineapple","apple"]
result = merge_list(list1, list2)
print(result) # Output: ["apple", "orange", "banana", "cherry", "pineapple"] |
module SW {
interface EventWidgetEvent {
data: any
widget: EventWidget
}
export class EventWidget {
private events: {[name: string]: Array<(e: EventWidgetEvent) => any>} = {};
constructor() {
}
on(eventName: string, callback: (e: EventWidgetEvent) => any) {
if (!(eventName in this.events)) {
this.events[eventName] = [callback];
}
else {
this.events[eventName].push(callback);
}
return this;
}
off(event: string) {
delete this.events[event]
}
trigger(eventName: string, data: any = null) {
if (eventName in this.events) {
this.events[eventName].forEach((callback) => {
callback({
data: data,
widget: this
})
});
}
}
}
}
|
export default [
{
data: ['产品编号', 'CP201900000001', '产品名称', '快易贷'],
type: 'input2',
},
{
data: ['参考利率范围', '5.5%-7%', '贷款额度', '1万元-500万元'],
type: 'input2',
},
{
data: ['贷款期限', '1个月 -36个月', '抵押方式', '抵押'],
type: 'input2',
},
{
data: ['产品类型', '一般金融产品', '贷款类别', '经营贷款(含流动资金贷款、周转贷款)'],
type: 'input2',
},
{
data: ['是否网贷直联', '否', '是否政策性产品', '否'],
type: 'input2',
},
{
data: ['是否通用产品', '是', '是否人民币', '是'],
type: 'input2',
},
{
data: ['产品简介', '单户最高不超过500万,期限最长不超过3年,享受一般小企业贷款的优惠利率。'],
type: 'textarea',
},
{
data: [
'产品特点',
'1.采用专设的简化申报审批流程,业务办理效率高;\r\n2.最高可做到“一次授信,三年有效”, 授信期限长,大大节约了客户反复办理抵押手续的成本,使客户获得较长期限的资金支持;\r\n3.随借随还,资金使用灵活,满足企业快速使用资金的需求',
],
type: 'textarea',
},
{
data: [
'适用客户',
'经国家工商行政机关核准登记、信誉良好,且能够提供优质房地产抵押或有效保证担保的小微企业。',
],
type: 'textarea',
},
{
data: ['申请条件', ''],
type: 'textarea',
},
{
data: [
'提交材料',
`1.企业基础资料,营业执照、公司章程、财务报表等;
2.法定代表人(或实际控制人)基础材料,身份证件、户籍证明、婚姻状况证明等;
3.贷款用途资料,生产销售合同等;
4.有效抵押物的权属证明或担保证明等材料。`,
],
type: 'textarea',
},
];
|
<filename>src/main/Timer.java
package main;
public class Timer {
private static final int TO_NANOS = (int)1E9;
private static final double TO_SECONDS = 1E-9;
private long invFps;
private long time;
private double delta;
public Timer(int fps) {
invFps = TO_NANOS / fps;
}
public boolean updateRequested() {
if (time == 0l) {
time = System.nanoTime();
return false;
}
long nextTime = System.nanoTime();
long diff = nextTime - time;
if (diff > invFps) {
time = nextTime;
delta = diff * TO_SECONDS;
return true;
} else {
return false;
}
}
public void setFPS(int fps) {
this.invFps = TO_NANOS / fps;
}
public double getDelta() {
return delta;
}
}
|
#!/bin/bash
#Colours
greenColour="\e[0;32m\033[1m"
endColour="\033[0m\e[0m"
redColour="\e[0;31m\033[1m"
blueColour="\e[0;34m\033[1m"
yellowColour="\e[0;33m\033[1m"
purpleColour="\e[0;35m\033[1m"
turquoiseColour="\e[0;36m\033[1m"
grayColour="\e[0;37m\033[1m"
function banner(){
echo -e " ${redColour}##### ###### ###### ${endColour} "
echo -e "${yellowColour} ## # # ##### #### ${endColour} ${redColour}# # # # # # ${endColour}"
echo -e "${yellowColour} # # # # # # #${endColour} ${redColour}# # # # # ${endColour}"
echo -e "${yellowColour} # # # # # # #${endColour} ${redColour}###### ###### # ${endColour}"
echo -e "${yellowColour} ###### # # # # #${endColour} ${redColour}# # # # ${endColour}"
echo -e "${yellowColour} # # # # # # #${endColour} ${redColour}# # # # # ${endColour}"
echo -e "${yellowColour} # # #### # #### ${endColour} ${redColour}# # # ##### ${endColour}"
}
function auth_descripion(){
#Get decription of all users using valid RPC credentials
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}username${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_username
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}password${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_password
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}username${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_ip
echo -e "${endColour}"
clear
rpcclient -U $auth_username%$auth_password $auth_ip -c "enumdomusers" | sed 's/user:// ' | tr -d '[]' | awk '{print $1}' > /tmp/user
for authenticated_rid in $(cat /tmp/user);do
echo -e "\t---------------------------------------------" && rpcclient -U $auth_username%$auth_password $auth_ip -c "queryuser $authenticated_rid" | grep -E "Description|User Name" && echo -e "\t---------------------------------------------\n"; done
rm -r /tmp/user
}
function unauth_domain_admins(){
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}ip address${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read unauth_ip
echo -e "${endColour}"
clear
rpcclient -U "" $unauth_ip -N -c 'enumdomgroups' | grep -i 'domain admin' | grep -oP '\[.*?]' | grep '0x' | tr -d '[]' > /tmp/rid
for unauth_domain_users_rid in $(cat /tmp/rid); do
rpcclient -U "" $unauth_ip -N -c "querygroupmem $unauth_domain_users_rid" | sed 's/rid://' | sed 's/attr:// ' | awk '{print $1}' | tr -d '[]' >> /tmp/rid_users ;
done
echo -e "[*] Domain Users:\n"
for rid_to_user_unauth in $(cat /tmp/rid_users); do
rpcclient -U "" $unauth_ip -N -c "queryuser $rid_to_user_unauth" | grep "User Name" | sed 's/User Name//' | tr -d ':' | tr -d ' ' | tr -d '\t';
done
rm -r /tmp/rid && rm -r /tmp/rid_users
}
function auth_domain_admins(){
#Get all domain admins using valid RPC credentials
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}username${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read domain_auth_username
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}password${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read domain_auth_password
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}ip address${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read domain_auth_ip
echo -e "${endColour}"
clear
rpcclient -U $domain_auth_username%$domain_auth_password $domain_auth_ip -c 'enumdomgroups' | grep -i 'domain admin' | grep -oP '\[.*?]' | grep '0x' | tr -d '[]' > /tmp/rid
for domain_users_rid in $(cat /tmp/rid); do
rpcclient -U $domain_auth_username%$domain_auth_password $domain_auth_ip -c "querygroupmem $domain_users_rid" | sed 's/rid://' | sed 's/attr:// ' | awk '{print $1}' | tr -d '[]' >> /tmp/rid_users ;
done
echo -e "[*] Domain Users:\n"
for rid_to_user in $(cat /tmp/rid_users); do
rpcclient -U $domain_auth_username%$domain_auth_password $domain_auth_ip -c "queryuser $rid_to_user" | grep "User Name" | sed 's/User Name//' | tr -d ':' | tr -d ' ' | tr -d '\t';
done
rm -r /tmp/rid && rm -r /tmp/rid_users
}
function null_session_description(){
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}ip address${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read null_session_desc_ip
echo -e "${endColour}"
clear
rpcclient -U "" $null_session_desc_ip -N -c "enumdomusers" | sed 's/user:// ' | tr -d '[' | tr -d ']' | awk '{print $1}' > /tmp/user
for authenticated_rid in $(cat /tmp/user);do
echo -e "\t---------------------------------------------" && rpcclient -U "" $null_session_desc_ip -N -c "queryuser $authenticated_rid" | grep -E "Description|User Name" && echo -e "\t---------------------------------------------\n";
done
rm -r /tmp/user
}
function enumerate_groups_null(){
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}ip address${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read enum_groups_unauth
echo -e "${endColour}"
clear
rpcclient -U "" $enum_groups_unauth -N -c 'enumdomgroups' | grep -oP '\[.*?\]' | grep -v 0x | tr -d '[]'
}
function enumerate_groups_auth(){
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}username${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_groups_user
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}password${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_groups_pass
echo -e "${endColour}"
clear
echo -e "${greenColour}┌─[${endColour}${redColour}autoRPC${endColour}${greenColour}]──[${endColour}${redColour}~${endColour}${greenColour}]─[${endColour}${blueColour}ip address${endColour}${endColour}${greenColour}]${endColour}${grayColour}:${endColour}"
echo -ne "${greenColour}└─────► " && read auth_groups_ip
echo -e "${endColour}"
clear
rpcclient -U $auth_groups_user%$auth_groups_pass $auth_groups_ip -c 'enumdomgroups' | grep -oP '\[.*?\]' | grep -v 0x | tr -d '[]'
}
if [ $1 2>/dev/null == "-h" 2>/dev/null ]; then
# banner
echo -e "\n\t${blueColour}[${endColour}${yellowColour}*${endColour}${blueColour}]${endColour} ${grayColour}Help Panel:${endColour}\n"
echo -e "\t${blueColour}[${endColour}${yellowColour}*${endColour}${blueColour}]${endColour} ${grayColour}Usage:${endColour}\n"
echo -e "\t ${yellowColour}bash autoRPC.sh${endColour} ${blueColour}[${endColour}${redColour}options${endColour}${blueColour}]${endColourp}\n"
echo -e "\t${blueColour}[${endColour}${yellowColour}*${endColour}${blueColour}]${endColour} ${grayColour}Recognized Options:${endColour}"
echo -e "\n\t ${redColour}-auth-desc${endColour}\t\t ${grayColour}:${endColour} ${yellowColour}Enumerate RPC Username and Description (Using Valid Credentials).${endColour}"
echo -e "\t ${redColour}-null-session-desc${endColour}\t ${grayColour}:${endColour} ${yellowColour}Enumerate RPC with null session (No Credentials).${endColour}"
echo -e "\t ${redColour}-auth-domain-admins${endColour}\t ${grayColour}:${endColour} ${yellowColour}Enumerate RPC domain admins (Using Valid Credentials).${endColour}"
echo -e "\t ${redColour}-unauth-domain-admins${endColour} ${grayColour}:${endColour}${yellowColour} Enumerate RPC domain admins (No credentials).${endColour}"
echo -e "\t ${redColour}-enumerate-groups-auth${endColour} ${grayColour}:${endColour} ${yellowColour}Enumerate RPC groups (Using Valid Credentials).${endColour}"
echo -e "\t ${redColour}-enumerate-groups-null${endColour} ${grayColour}:${endColour}${yellowColour} Enumerate RPC domain groups (No credentials).${endColour}"
elif [ $1 == "-auth-desc" 2>/dev/null ]; then
auth_descripion
elif [ $1 == "-null-session-desc" 2>/dev/null ]; then
null_session_description
elif [ $1 == "-auth-domain-admins" 2>/dev/null ]; then
auth_domain_admins
elif [ $1 == "-unauth-domain-admins" 2>/dev/null ]; then
unauth_domain_admins
elif [ $1 == "-enumerate-groups-null" 2>/dev/null ]; then
enumerate_groups_null
elif [ $1 == "-enumerate-groups-auth" 2>/dev/null ]; then
enumerate_groups_auth
else
echo -e "\n\t${blueColour}[${endColour}${redColour}*${endColour}${blueColour}]${endColour} ${yellowColour}bash autoRPC.sh -h${endColour} "
fi
|
#development cas install script
DIRECTORY=/home/core/cas-dev/sinatra/
if [ -d "$DIRECTORY" ]; then
cd /home/core/cas-dev/sinatra/
git reset --hard HEAD
git clean -f
git pull
else
git clone https://github.com/Navionics/rubycas-server.git /home/core/cas-dev/sinatra
cd /home/core/cas-dev/sinatra/
git checkout development
fi
# cd in repository just to be sure
cd /home/core/cas-dev/sinatra/
cp ../Dockerfile /home/core/cas-dev/sinatra/
docker build -t navionics/cas-dev:V4 .
docker stop rubycas-dev
docker rm rubycas-dev
docker create --name rubycas-dev -p 8080:8080 navionics/cas-dev:V4
docker start rubycas-dev |
#include <iostream>
#include <filesystem>
bool fileExists(const std::string& filepath) {
return std::filesystem::exists(filepath);
}
int main() {
std::string filepath = "path_to_your_file"; // Replace with the actual file path
if (fileExists(filepath)) {
std::cout << "File exists at path: " << filepath << std::endl;
} else {
std::cout << "File does not exist at path: " << filepath << std::endl;
}
return 0;
} |
<reponame>jpasserin/harbieNodes
#include "../headers/_Utils.h"
#include "../headers/CurveMultiAttachNode.h"
#include <maya/MPlug.h>
#include <maya/MDataBlock.h>
#include <maya/MDataHandle.h>
#include <maya/MArrayDataBuilder.h>
#include <maya/MFnTypedAttribute.h>
#include <maya/MFnMatrixAttribute.h>
#include <maya/MFnUnitAttribute.h>
#include <maya/MFnEnumAttribute.h>
#include <maya/MFnCompoundAttribute.h>
#include <maya/MFnNurbsSurface.h>
#include <maya/MQuaternion.h>
#include <maya/MEulerRotation.h>
#include <maya/MMatrix.h>
#include <maya/MVector.h>
#include <maya/MPointArray.h>
#include <maya/MGlobal.h>
#include <cfloat>
#include <cmath>
MTypeId CurveMultiAttach::id(0x001226F1);
// Input Attribute Handles
MObject CurveMultiAttach::curve;
MObject CurveMultiAttach::u;
MObject CurveMultiAttach::curveMatrix;
MObject CurveMultiAttach::parentInverse;
MObject CurveMultiAttach::upVector;
MObject CurveMultiAttach::attach;
MObject CurveMultiAttach::isLoop;
MObject CurveMultiAttach::length;
MObject CurveMultiAttach::slide;
MObject CurveMultiAttach::start;
MObject CurveMultiAttach::end;
MObject CurveMultiAttach::reverse;
// output Attribute Handles
MObject CurveMultiAttach::translate;
MObject CurveMultiAttach::translateX;
MObject CurveMultiAttach::translateY;
MObject CurveMultiAttach::translateZ;
MObject CurveMultiAttach::rotate;
MObject CurveMultiAttach::rotateX;
MObject CurveMultiAttach::rotateY;
MObject CurveMultiAttach::rotateZ;
MObject CurveMultiAttach::output;
CurveMultiAttach::CurveMultiAttach() {}
CurveMultiAttach::~CurveMultiAttach() {}
void* CurveMultiAttach::creator() {
return new CurveMultiAttach();
}
MStatus CurveMultiAttach::initialize() {
// attributes are writable, storable, readable, not keyable by default
MStatus stat;
MFnTypedAttribute tAttr;
MFnNumericAttribute nAttr;
MFnMatrixAttribute mAttr;
MFnUnitAttribute uAttr;
MFnEnumAttribute eAttr;
MFnCompoundAttribute cAttr;
// Inputs
curve = tAttr.create("curve", "curve", MFnData::kNurbsCurve);
stat = addAttribute(curve);
u = nAttr.create("u", "u", MFnNumericData::kDouble, -0.5);
nAttr.setKeyable(true);
nAttr.setArray(true);
nAttr.setMin(0.0);
nAttr.setMax(1.0);
nAttr.setUsesArrayDataBuilder(true);
stat = addAttribute(u);
curveMatrix = mAttr.create("curveMatrix", "curveMatrix");
mAttr.setKeyable(true);
stat = addAttribute(curveMatrix);
parentInverse = mAttr.create("parentInverse", "parentInverse");
mAttr.setKeyable(true);
stat = addAttribute(parentInverse);
upVector = nAttr.createPoint("upVector", "upVector");
nAttr.setStorable(false);
nAttr.setWritable(true);
stat = addAttribute(upVector);
attach = eAttr.create("attach", "attach", 0);
eAttr.addField("Parametric", 0);
eAttr.addField("Percentage", 1);
eAttr.addField("FixedLength", 2);
eAttr.setKeyable(true);
stat = addAttribute(attach);
isLoop = nAttr.create("isLoop", "isLoop", MFnNumericData::kBoolean, false);
nAttr.setKeyable(true);
stat = addAttribute(isLoop);
length = nAttr.create("length", "length", MFnNumericData::kDouble, 1.0);
nAttr.setKeyable(true);
nAttr.setMin(0.0001);
stat = addAttribute(length);
slide = nAttr.create("slide", "slide", MFnNumericData::kDouble, 0.0);
nAttr.setKeyable(true);
stat = addAttribute(slide);
start = nAttr.create("start", "start", MFnNumericData::kDouble, 0.0);
nAttr.setKeyable(true);
nAttr.setMin(0);
nAttr.setMax(1);
stat = addAttribute(start);
end = nAttr.create("end", "end", MFnNumericData::kDouble, 1.0);
nAttr.setKeyable(true);
nAttr.setMin(0);
nAttr.setMax(1);
stat = addAttribute(end);
reverse = nAttr.create("reverse", "reverse", MFnNumericData::kDouble, 0.0);
nAttr.setKeyable(true);
nAttr.setMin(0);
nAttr.setMax(1);
stat = addAttribute(reverse);
// Outputs
translateX = uAttr.create("translateX", "translateX", MFnUnitAttribute::kDistance);
uAttr.setWritable(false);
uAttr.setStorable(true);
translateY = uAttr.create("translateY", "translateY", MFnUnitAttribute::kDistance);
uAttr.setWritable(false);
uAttr.setStorable(true);
translateZ = uAttr.create("translateZ", "translateZ", MFnUnitAttribute::kDistance);
uAttr.setWritable(false);
uAttr.setStorable(true);
translate = nAttr.create("translate", "translate", translateX, translateY, translateZ);
nAttr.setWritable(false);
rotateX = uAttr.create("rotateX", "rotateX", MFnUnitAttribute::kAngle);
uAttr.setWritable(false);
uAttr.setStorable(true);
rotateY = uAttr.create("rotateY", "rotateY", MFnUnitAttribute::kAngle);
uAttr.setWritable(false);
uAttr.setStorable(true);
rotateZ = uAttr.create("rotateZ", "rotateZ", MFnUnitAttribute::kAngle);
uAttr.setWritable(false);
uAttr.setStorable(true);
rotate = nAttr.create("rotate", "rotate", rotateX, rotateY, rotateZ);
nAttr.setWritable(false);
output = cAttr.create("output", "output");
cAttr.setWritable(false);
cAttr.setArray(true);
cAttr.addChild(translate);
cAttr.addChild(rotate);
cAttr.setUsesArrayDataBuilder(true);
stat = addAttribute(output);
// Connect
stat = attributeAffects(curve, translate);
stat = attributeAffects(u, translate);
stat = attributeAffects(curveMatrix, translate);
stat = attributeAffects(parentInverse, translate);
stat = attributeAffects(upVector, translate);
stat = attributeAffects(attach, translate);
stat = attributeAffects(isLoop, translate);
stat = attributeAffects(length, translate);
stat = attributeAffects(slide, translate);
stat = attributeAffects(start, translate);
stat = attributeAffects(end, translate);
stat = attributeAffects(reverse, translate);
stat = attributeAffects(curve, rotate);
stat = attributeAffects(u, rotate);
stat = attributeAffects(curveMatrix, rotate);
stat = attributeAffects(parentInverse, rotate);
stat = attributeAffects(upVector, rotate);
stat = attributeAffects(attach, rotate);
stat = attributeAffects(isLoop, rotate);
stat = attributeAffects(length, rotate);
stat = attributeAffects(slide, rotate);
stat = attributeAffects(start, rotate);
stat = attributeAffects(end, rotate);
stat = attributeAffects(reverse, rotate);
return MS::kSuccess;
}
MStatus CurveMultiAttach::compute(const MPlug& plug, MDataBlock& dataBlock) {
MStatus stat;
MDataHandle curveHandle = dataBlock.inputValue(curve);
MDataHandle curveMatrixHandle = dataBlock.inputValue(curveMatrix);
MDataHandle parentInverseHandle = dataBlock.inputValue(parentInverse);
MDataHandle upVectorHandle = dataBlock.inputValue(upVector);
MDataHandle attachHandle = dataBlock.inputValue(attach);
MDataHandle isLoopHandle = dataBlock.inputValue(isLoop);
MDataHandle lengthHandle = dataBlock.inputValue(length);
MDataHandle slideHandle = dataBlock.inputValue(slide);
MDataHandle startHandle = dataBlock.inputValue(start);
MDataHandle endHandle = dataBlock.inputValue(end);
MDataHandle reverseHandle = dataBlock.inputValue(reverse);
MArrayDataHandle uArrayHandle = dataBlock.inputArrayValue(u);
MArrayDataHandle outputArrayHandle = dataBlock.outputArrayValue(output);
// Inputs
MFnNurbsCurve curve = curveHandle.asNurbsCurve();
MMatrix curveMatrix = curveMatrixHandle.asMatrix();
MMatrix parentInverse = parentInverseHandle.asMatrix();
MVector upVector = upVectorHandle.asFloatVector();
short attach = attachHandle.asShort();
bool isLoop = isLoopHandle.asBool();
double length = lengthHandle.asDouble();
double slide = slideHandle.asDouble();
double start = startHandle.asDouble();
double end = endHandle.asDouble();
double reverse = reverseHandle.asDouble();
// U value
unsigned int uCount = uArrayHandle.elementCount();
MDoubleArray uParams;
MDataHandle uHandle;
for(unsigned int i = 0; i<uCount; i++) {
uHandle = uArrayHandle.inputValue(&stat);
uParams.append(uHandle.asDouble());
uArrayHandle.next();
}
double ratio;
if (attach == 2) ratio = length / curve.length();
// Rescale the U Params depending on start/end/slide parameters
rescaleParams(uParams, attach, start, end, slide, reverse, ratio, isLoop);
// Process
MMatrixArray outMatrix = getTransform(curve, uParams, upVector, (attach==0), curveMatrix);
// Output
MArrayDataBuilder transBuilder(output, uCount, &stat);
MDataHandle trHandle, tHandle, rHandle;
MVector t;
MEulerRotation r;
MMatrix m;
for (unsigned int i = 0; i<uCount; i++) {
trHandle = transBuilder.addElement(i);
tHandle = trHandle.child(translate);
rHandle = trHandle.child(rotate);
outMatrix[i] *= parentInverse;
MTransformationMatrix tfm(outMatrix[i]);
t = tfm.getTranslation(MSpace::kWorld);
r = tfm.eulerRotation();
tHandle.set(t.x, t.y, t.z);
rHandle.set(r.x, r.y, r.z);
trHandle.setClean();
}
stat = outputArrayHandle.set(transBuilder);
stat = outputArrayHandle.setAllClean();
return MS::kSuccess;
}
MMatrixArray CurveMultiAttach::getTransform(MFnNurbsCurve &curve, MDoubleArray ¶ms, MVector &upVector,
bool isParametric, MMatrix &matrix) {
double currentLength, paramLength;
if (!isParametric) // Percentage
currentLength = curve.length();
double startDomain, endDomain, rangeDomain;
curve.getKnotDomain(startDomain, endDomain);
rangeDomain = endDomain - startDomain;
MMatrixArray output(params.length());
MPoint point;
MVector tangent, normal, cross;
double param;
for (unsigned int i=0; i < params.length(); i++){
if (isParametric) // Parametric
param = params[i] * rangeDomain + startDomain;
else {
paramLength = params[i] * currentLength;
param = curve.findParamFromLength(paramLength);
}
curve.getPointAtParam(param, point, MSpace::kWorld);
tangent = curve.tangent(param, MSpace::kWorld);
point *= matrix;
tangent *= matrix;
normal = upVector - MVector(point);
normal.normalize();
cross = tangent ^ normal;
double mData[4][4] = { tangent.x, tangent.y, tangent.z, 0.0,
normal.x, normal.y, normal.z, 0.0,
cross.x, cross.y, cross.z, 0.0,
point.x, point.y, point.z, 1.0 };
MMatrix mat {mData};
output[i] = mat;
}
return output;
}
|
#!/usr/bin/env bash
# Get timestamps of subtitle matching expressions.
ffmpeg_get_subtitle_timestamps() {
: "${2?"Usage: ${FUNCNAME[0]} VIDEO_FILE EXPRESSION"}";
video_file="${1}";
expression="${2}";
subtitle_file="${video_file}.srt";
if [ ! -f "${subtitle_file}" ]; then
ffmpeg_extract_subtitles "${video_file}" || return -1;
fi;
grep -iE -B 2 "${expression}" "${subtitle_file}" \
| grep -oE '^[0-9]{2}:[0-9]{2}:[0-9]{2},[0-9]{3}' \
| sed 's/,/./';
}
|
@staticmethod
def _create_oozie_db(instance, databases, instances):
db_name = MySQL.METASTORE_SPECS.db_name
if db_name not in databases:
MySQL._create_service_db(instance, MySQL.METASTORE_SPECS)
MySQL._grant_access(instance, MySQL.METASTORE_SPECS, instances) |
# Lynx
patch -Np1 -i ../$(basename $PATCH_LYNX)
./configure --prefix=/usr \
--sysconfdir=/etc/lynx \
--datadir=/usr/share/doc/lynx-2.8.9rel.1 \
--with-zlib \
--with-bzlib \
--with-ssl \
--with-screen=ncursesw \
--enable-locale-charset
make
make install-full
chgrp -v -R root /usr/share/doc/lynx-2.8.9rel.1/lynx_doc
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.geometry.euclidean.threed;
import java.util.List;
import org.apache.commons.geometry.core.partitioning.BoundaryList;
/** {@link BoundarySource3D} implementation that uses boundaries stored in
* a list. Lists given during construction are used directly; no copies are made.
* Thread safety and immutability therefore depend on the underlying list and its
* usage outside of this class. The boundary list cannot be modified through this
* class.
*/
public class BoundaryList3D extends BoundaryList<Vector3D, PlaneConvexSubset>
implements BoundarySource3D {
/** Construct a new instance with the given list of boundaries. The
* argument is used directly; no copy is made.
* @param boundaries list of boundaries for the instance
*/
public BoundaryList3D(final List<? extends PlaneConvexSubset> boundaries) {
super(boundaries);
}
/** Return this instance.
* @return this instance
*/
@Override
public BoundaryList3D toList() {
return this;
}
}
|
package com.pi4j.mvc.multicontrollerapp.view.pui;
import com.pi4j.mvc.multicontrollerapp.controller.ApplicationController;
import com.pi4j.mvc.multicontrollerapp.model.ExampleModel;
import com.pi4j.mvc.multicontrollerapp.view.pui.components.ButtonComponent;
import com.pi4j.mvc.util.Pi4JContext;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ExamplePUITest {
@Test
void testLED() {
//given
ExampleModel model = new ExampleModel();
ApplicationController controller = new ApplicationController(model);
ExamplePUI pui = new ExamplePUI(controller, Pi4JContext.createMockContext());
//when
controller.setLedGlows(true);
controller.awaitCompletion();
pui.awaitCompletion();
//then
assertTrue(pui.led.glows());
//when
controller.setLedGlows(false);
controller.awaitCompletion();
pui.awaitCompletion();
//then
assertFalse(pui.led.glows());
}
@Test
void testButton() {
//given
ExampleModel model = new ExampleModel();
ApplicationController controller = new ApplicationController(model);
ExamplePUI pui = new ExamplePUI(controller, Pi4JContext.createMockContext());
int initialCounter = model.counter.getValue();
//when
pui.button.dispatchSimpleEvents(ButtonComponent.ButtonState.UP);
controller.awaitCompletion();
//then
assertEquals(initialCounter - 1, model.counter.getValue());
}
}
|
/*
* Copyright 2015 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.oic.simulator.server;
import java.util.Map;
import org.oic.simulator.AttributeValue;
import org.oic.simulator.InvalidArgsException;
import org.oic.simulator.SimulatorException;
import org.oic.simulator.SimulatorResourceAttribute;
/**
* This class represents a single type resource(Non-collection resource). It
* provides APIs specific to single resource for manipulating the resource model
* by adding/removing the attributes, and updating the attribute values manually
* and automatically.
*/
public final class SimulatorSingleResource extends SimulatorResource {
/**
* API to get attribute of resource.
*
* @param attrName
* Name of the attribute
*
* @return An object of {@link SimulatorResourceAttribute}, or null if
* resource does not have attribute of this name.
*
* @throws InvalidArgsException
* This exception will be thrown if the attribute name is
* invalid.
* @throws SimulatorException
* This exception will be thrown either if the resource model is
* not found or for some general errors.
*/
public SimulatorResourceAttribute getAttribute(String attrName)
throws InvalidArgsException, SimulatorException {
return nativeGetAttribute(attrName);
}
/**
* API to get all the attributes of the resource.
*
* @return Map of attributes with attribute name as key and its
* corresponding {@link SimulatorResourceAttribute} as value.
*
* @throws InvalidArgsException
* This exception will be thrown if the attribute name is
* invalid.
* @throws SimulatorException
* This exception will be thrown either if the resource model is
* not found or for some general errors.
*/
public Map<String, SimulatorResourceAttribute> getAttributes()
throws InvalidArgsException, SimulatorException {
return nativeGetAttributes();
}
/**
* API to add an attribute to resource's representation model. Observers
* will be notified on success.
*
* @param attribute
* Attribute to be added to resource's representation model.
*
* @return True if the attribute is added properly.
*
* @throws InvalidArgsException
* This exception will be thrown on invalid input.
* @throws SimulatorException
* This exception will be thrown for other errors.
*/
public boolean addAttribute(SimulatorResourceAttribute attribute)
throws InvalidArgsException, SimulatorException {
return nativeAddAttribute(attribute);
}
/**
* API to remove an attribute from the simulated resource. Observers will be
* notified on success.
*
* @param attrName
* Name of the attribute to be deleted.
*
* @return True if the attribute is removed properly.
*
* @throws InvalidArgsException
* This exception will be thrown on invalid input.
* @throws SimulatorException
* This exception will be thrown for other errors.
*/
public boolean removeAttribute(String attrName)
throws InvalidArgsException, SimulatorException {
return nativeRemoveAttribute(attrName);
}
/**
* API to update the value of an attribute. Observers will be notified on
* success.
*
* @param attrName
* Name of the attribute.
* @param value
* New value of the attribute.
*
* @return True if the attribute's value is updated properly.
*
* @throws InvalidArgsException
* This exception will be thrown on invalid input.
* @throws SimulatorException
* This exception will be thrown for other errors.
*/
public boolean updateAttribute(String attrName, AttributeValue value)
throws InvalidArgsException, SimulatorException {
return nativeUpdateAttribute(attrName, value);
}
/**
* API to start the resource level automation. This automation involves
* automatically updating all the possible values for all the attributes
* sequentially.
*
* @param type
* {@link AutoUpdateType} indicating whether the automation is
* one-time or recursive.
* @param interval
* Interval time in milliseconds.
* @param listener
* Listener to be notified when automation ends.
*
* @return Automation ID using which the automation can be stopped.
*
* @throws InvalidArgsException
* This exception will be thrown on invalid input.
* @throws SimulatorException
* This exception will be thrown for other errors.
*/
public int startResourceUpdation(AutoUpdateType type, int interval,
AutoUpdateListener listener) throws InvalidArgsException,
SimulatorException {
return nativeStartResourceUpdation(type, interval, listener);
}
/**
* API to start the attribute level automation. This automation involves
* automatically updating all the possible values for a given attribute
* sequentially.
*
* @param attrName
* Name of the attribute to be automated.
* @param type
* {@link AutoUpdateType} indicating whether the automation is
* one-time or recursive.
* @param interval
* Interval time in milliseconds.
* @param listener
* Listener to be notified when automation ends.
*
* @return Automation ID using which the automation can be stopped.
*
* @throws InvalidArgsException
* This exception will be thrown on invalid input.
* @throws SimulatorException
* This exception will be thrown for other errors.
*/
public int startAttributeUpdation(String attrName, AutoUpdateType type,
int interval, AutoUpdateListener listener)
throws InvalidArgsException, SimulatorException {
return nativeStartAttributeUpdation(attrName, type, interval, listener);
}
/**
* API to stop the automation based on automation id.
*
* @param id
* Using which a specific automation can be stopped.
*
* @throws SimulatorException
* This exception will be thrown for general errors.
*/
public void stopUpdation(int id) throws SimulatorException {
nativeStopUpdation(id);
}
private SimulatorSingleResource(long nativeHandle) {
mNativeHandle = nativeHandle;
}
private native SimulatorResourceAttribute nativeGetAttribute(String attrName);
private native Map<String, SimulatorResourceAttribute> nativeGetAttributes();
private native boolean nativeAddAttribute(
SimulatorResourceAttribute attribute);
private native boolean nativeRemoveAttribute(String attrName);
private native boolean nativeUpdateAttribute(String attrName,
AttributeValue value);
private native int nativeStartResourceUpdation(AutoUpdateType type,
int interval, AutoUpdateListener listener);
private native int nativeStartAttributeUpdation(String attrName,
AutoUpdateType type, int interval, AutoUpdateListener listener);
private native void nativeStopUpdation(int id);
}
|
<reponame>nanshuii/TurnTable
//
// LENSettingViewController.h
// TurnTable
//
// Created by 林南水 on 2019/7/22.
// Copyright © 2019 ledon. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface LENSettingViewController : UIViewController
@property (weak, nonatomic) IBOutlet UISwitch *recordSwitch;
@end
NS_ASSUME_NONNULL_END
|
<filename>tests/test-helpers.js
const nock = require('nock')
const issueAssignedPayload = require('./test-fixtures/issues.assigned.json')
const commentCreatedPayload = require('./test-fixtures/issue_comment.created.json')
const myProbotApp = require('../src/probot')
const { Probot, ProbotOctokit } = require('probot')
function issueAssignedWithLabelsPayload (...labels) {
return payloadWithLabels(issueAssignedPayload, labels)
}
function commentCreatedWithLabelsPayload (...labels) {
return payloadWithLabels(commentCreatedPayload, labels)
}
function payloadWithLabels (payload, labels) {
const issueCopy = JSON.parse(JSON.stringify(payload))
labels.forEach(l => issueCopy.issue.labels.push({ name: l }))
return issueCopy
}
function privateOrganizationRepoPayload (payload) {
const payloadCopy = JSON.parse(JSON.stringify(payload))
payloadCopy.repository.private = true
payloadCopy.repository.owner.type = 'Organization'
return payloadCopy
}
function nockEmptyConfig () {
nock('https://api.github.com')
.persist()
.get('/repos/robvanderleek/create-issue-branch/contents/.github%2Fissue-branch.yml')
.reply(404)
.get('/repos/robvanderleek/.github/contents/.github%2Fissue-branch.yml')
.reply(404)
.get('/repos/robvanderleek/create-issue-branch/contents/.github%2Fissue-branch.yaml')
.reply(404)
.get('/repos/robvanderleek/.github/contents/.github%2Fissue-branch.yaml')
.reply(404)
}
function nockConfig (yamlConfig) {
nock('https://api.github.com')
.persist()
.get('/repos/robvanderleek/create-issue-branch/contents/.github%2Fissue-branch.yml')
.reply(200, yamlConfig)
}
function nockGlobalConfig (yamlConfig) {
nock('https://api.github.com')
.persist()
.get('/repos/robvanderleek/create-issue-branch/contents/.github%2Fissue-branch.yml')
.reply(404)
.get('/repos/robvanderleek/.github/contents/.github%2Fissue-branch.yml')
.reply(200, yamlConfig)
}
function nockInstallation (installation) {
nock('https://api.github.com')
.persist()
.get('/users/robvanderleek/installation')
.reply(200, installation)
}
function nockMarketplacePlan (plan) {
nock('https://api.github.com')
.persist()
.get('/marketplace_listing/accounts/5324924')
.reply(200, plan)
}
function nockExistingBranch (name, sha) {
nock('https://api.github.com')
.get(`/repos/robvanderleek/create-issue-branch/git/ref/heads%2F${encodeURIComponent(name)}`)
.reply(200, { object: { sha: sha } })
}
function nockNonExistingBranch (name) {
nock('https://api.github.com')
.get(`/repos/robvanderleek/create-issue-branch/git/ref/heads%2F${encodeURIComponent(name)}`)
.reply(404)
}
function nockUpdateBranch (name) {
nock('https://api.github.com').patch(`/repos/robvanderleek/create-issue-branch/git/refs/heads%2F${name}`).reply(200)
}
function nockCommentCreated () {
nock('https://api.github.com')
.post('/repos/robvanderleek/create-issue-branch/issues/1/comments')
.reply(200)
}
function nockCommitTreeSha (sha, treeSha) {
nock('https://api.github.com').get(`/repos/robvanderleek/create-issue-branch/git/commits/${sha}`)
.reply(200, { tree: { sha: treeSha } })
}
function nockCommit () {
nock('https://api.github.com').post('/repos/robvanderleek/create-issue-branch/git/commits').reply(200)
}
function nockCreateBranch () {
nock('https://api.github.com')
.post('/repos/robvanderleek/create-issue-branch/git/refs')
.reply(200)
}
function nockCreatePR () {
nock('https://api.github.com').post('/repos/robvanderleek/create-issue-branch/pulls').reply(200, { number: 123 })
}
function nockIssueLabels () {
nock('https://api.github.com').post('/repos/robvanderleek/create-issue-branch/issues/123/labels').reply(200)
}
function nockIssueAssignees () {
nock('https://api.github.com').post('/repos/robvanderleek/create-issue-branch/issues/123/assignees').reply(200)
}
function getDefaultContext () {
return {
payload: {
repository: {
owner: {
login: 'robvanderleek'
}, //
name: 'create-issue-branch', //
default_branch: 'master'
}, //
issue: { number: 1, title: 'Hello world' }
}, //
octokit: {
pulls: {
create: () => {}
}, //
git: {
getCommit: () => ({ data: { tree: { sha: '1234abcd' } } }),
createCommit: () => ({ data: { sha: 'abcd1234' } }),
createRef: () => {},
updateRef: () => {}
}, //
issues: {
createComment: () => {}
}
}, //
issue: () => {}
}
}
function initNock () {
nock.disableNetConnect()
const logRequest = (r) => console.log(`No match: ${r.path}, method: ${r.method}, host: ${r.options.host}`)
nock.emitter.on('no match', req => { logRequest(req) })
}
function initProbot () {
const result = new Probot({
appId: 1, //
githubToken: '<PASSWORD>', // Disable throttling & retrying requests for easier testing
Octokit: ProbotOctokit.defaults({
retry: { enabled: false }, throttle: { enabled: false }
})
})
const app = result.load(myProbotApp)
app.app = {
getInstallationAccessToken: () => Promise.resolve('test')
}
nock.cleanAll()
jest.setTimeout(10000)
nockAccessToken()
return result
}
function nockAccessToken () {
nock('https://api.github.com')
.post('/app/installations/1296032/access_tokens')
.reply(200, { token: 'test' })
}
module.exports = {
issueAssignedWithLabelsPayload: issueAssignedWithLabelsPayload,
commentCreatedWithLabelsPayload: commentCreatedWithLabelsPayload,
privateOrganizationRepoPayload: privateOrganizationRepoPayload,
nockEmptyConfig: nockEmptyConfig,
nockConfig: nockConfig,
nockGlobalConfig: nockGlobalConfig,
nockInstallation: nockInstallation,
nockExistingBranch: nockExistingBranch,
nockNonExistingBranch: nockNonExistingBranch,
nockUpdateBranch: nockUpdateBranch,
nockCommitTreeSha: nockCommitTreeSha,
nockCommit: nockCommit,
nockCommentCreated: nockCommentCreated,
nockCreateBranch: nockCreateBranch,
nockCreatePR: nockCreatePR,
nockIssueLabels: nockIssueLabels,
nockIssueAssignees: nockIssueAssignees,
nockMarketplacePlan: nockMarketplacePlan,
getDefaultContext: getDefaultContext,
initNock: initNock,
initProbot: initProbot
}
|
<reponame>leaf-1993/tomcat
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.juli;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
@RunWith(Parameterized.class)
public class TestOneLineFormatterMillisPerformance {
@Parameterized.Parameters(name = "{index}: format[{0}]")
public static Collection<Object[]> parameters() {
List<Object[]> parameterSets = new ArrayList<Object[]>();
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.SSS" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.SS" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.S" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss Z" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.SSSZ" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.SSZ" });
parameterSets.add(new String[] { "dd-MMM-yyyy HH:mm:ss.SZ" });
parameterSets.add(new String[] { "SSS dd-MMM-yyyy HH:mm:ss" });
parameterSets.add(new String[] { "SS dd-MMM-yyyy HH:mm:ss" });
parameterSets.add(new String[] { "S dd-MMM-yyyy HH:mm:ss" });
return parameterSets;
}
@Parameter(0)
public String timestampFormat;
@Test
public void testMillisHandling() {
OneLineFormatter olf = new OneLineFormatter();
olf.setTimeFormat(timestampFormat);
long timeStamp = System.currentTimeMillis();
StringBuilder buf = new StringBuilder(64);
long start = System.nanoTime();
for (int i = 0; i < 10000000; i++) {
buf.setLength(0);
olf.addTimestamp(buf, timeStamp);
}
System.out.println("Format: [" + timestampFormat + "], Output: [" + buf + "], Duration: [" + (System.nanoTime() - start) + "] ns");
}
}
|
#!/bin/sh
# install.sh
# iSstp
#
# Created by Zheng Shao on 2/26/15.
# Copyright (c) 2015 axot. All rights reserved.
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
cd $DIR
sudo chown root helper
sudo chmod 4755 helper
sudo touch installed
|
import * as React from 'react'
// import Story from '../fresco/fusion-story'
class Story {
registerFactory (a, b) {}
addEventListener (a, b) {}
setData (a) {}
remove (a) {}
}
const { useEffect, useRef } = React
const Renderer = ({
type,
content,
onDrawn,
onEventTrigger,
onConfigured
}: {
type: string
content: Function
onDrawn: Function
onEventTrigger: Function
onConfigured: (config: object) => void
}) => {
const mainRef = useRef(null)
useEffect(() => {
if (type === 'html') {
content(mainRef.current)
onConfigured(
Object.fromEntries(
[...mainRef.current.children[0].attributes].map(({ name, value }) => [
name,
value
])
)
)
onDrawn()
} else {
let story = new Story()
const { width, height } = mainRef.current.getBoundingClientRect()
story.registerFactory('content', content)
story.addEventListener('drawn', onDrawn)
story.addEventListener(
'childattached',
({
data: { attachedChild }
}: {
data: { attachedChild }
}) => {
if (attachedChild.getType() !== 'animationManager') {
attachedChild.addEventListener('*', onEventTrigger)
onConfigured(attachedChild.config)
}
}
)
story.setData({
id: 'main',
availableWidth: width,
availableHeight: height
})
return () => story.remove({ instant: false })
}
})
return <div ref={mainRef} className="main" id="main"></div>
}
export default Renderer
|
import pytest
class DeepSpeedPrecisionPlugin:
def __init__(self, precision, amp_type):
self.precision = precision
self.amp_type = amp_type
if precision not in [16, 32, 64]:
raise ValueError(f"Precision {precision} is not supported. `precision` must be one of 16, 32, or 64.")
def test_invalid_precision_with_deepspeed_precision():
with pytest.raises(ValueError, match="is not supported. `precision` must be one of"):
DeepSpeedPrecisionPlugin(precision=64, amp_type="native") |
import nltk
from nltk.tokenize import word_tokenize
from nltk.probability import FreqDist
from nltk.corpus import stopwords
# Preprocess the data
data = "This is a special offer just for you."
words = word_tokenize(data)
words = [word.lower() for word in words if word.isalpha()]
words = [word for word in words if word not in stopwords.words('english')]
# Create the frequency distribution of all the words
fdist = FreqDist(words)
# Define the keywords for spam and ham
spam_words = ["offer", "special", "just", "you"]
ham_words = ["hello", "hi", "howdy"]
# Classify the text
is_spam = False
for word in fdist.keys():
if word in spam_words:
is_spam = True
if word in ham_words:
is_spam = False
print(is_spam) # True |
def state_value_function(state, num_episodes, gamma, generate_episode, rewards):
returns = {} # Dictionary to store the cumulative returns for each state
# Perform multiple episodes
for episode in range(num_episodes):
# Generate a full episode following the given policy
episode_states = generate_episode()
# Initialize variables
G = 0
visited_states = set()
# For each state in the episode
for t in range(len(episode_states) - 1, -1, -1):
s = episode_states[t]
# Calculate the return G from state s until the end of the episode
G = gamma * G + rewards[t]
if s not in visited_states:
# Update the returns dictionary using the first-visit Monte Carlo method
if s in returns:
returns[s].append(G)
else:
returns[s] = [G]
visited_states.add(s)
# Calculate the average return for each state to estimate the state-value function
V = {s: sum(returns[s]) / len(returns[s]) for s in returns}
return V[state] |
package gex.newsml.nitf;
import lombok.ToString;
import java.util.HashMap;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>
* Java class for anonymous complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within
* this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attGroup ref="{http://iptc.org/std/NITF/2006-10-18/}globalNITFAttributes"/>
* <attribute name="tobject.subject.ipr" type="{http://www.w3.org/2001/XMLSchema}string" default="IPTC" />
* <attribute name="tobject.subject.refnum" use="required" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="tobject.subject.code" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="tobject.subject.type" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="tobject.subject.matter" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="tobject.subject.detail" type="{http://www.w3.org/2001/XMLSchema}string" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "tobject.subject")
@ToString
public class TobjectSubject {
@XmlAttribute(name = "tobject.subject.ipr")
protected String tobjectSubjectIpr;
@XmlAttribute(name = "tobject.subject.refnum", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String tobjectSubjectRefnum;
@XmlAttribute(name = "tobject.subject.code")
protected String tobjectSubjectCode;
@XmlAttribute(name = "tobject.subject.type")
protected String tobjectSubjectType;
@XmlAttribute(name = "tobject.subject.matter")
protected String tobjectSubjectMatter;
@XmlAttribute(name = "tobject.subject.detail")
protected String tobjectSubjectDetail;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the tobjectSubjectIpr property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectIpr() {
if (tobjectSubjectIpr == null) {
return "IPTC";
} else {
return tobjectSubjectIpr;
}
}
/**
* Sets the value of the tobjectSubjectIpr property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectIpr(String value) {
this.tobjectSubjectIpr = value;
}
/**
* Gets the value of the tobjectSubjectRefnum property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectRefnum() {
return tobjectSubjectRefnum;
}
/**
* Sets the value of the tobjectSubjectRefnum property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectRefnum(String value) {
this.tobjectSubjectRefnum = value;
}
/**
* Gets the value of the tobjectSubjectCode property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectCode() {
return tobjectSubjectCode;
}
/**
* Sets the value of the tobjectSubjectCode property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectCode(String value) {
this.tobjectSubjectCode = value;
}
/**
* Gets the value of the tobjectSubjectType property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectType() {
return tobjectSubjectType;
}
/**
* Sets the value of the tobjectSubjectType property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectType(String value) {
this.tobjectSubjectType = value;
}
/**
* Gets the value of the tobjectSubjectMatter property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectMatter() {
return tobjectSubjectMatter;
}
/**
* Sets the value of the tobjectSubjectMatter property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectMatter(String value) {
this.tobjectSubjectMatter = value;
}
/**
* Gets the value of the tobjectSubjectDetail property.
*
* @return possible object is {@link String }
*
*/
public String getTobjectSubjectDetail() {
return tobjectSubjectDetail;
}
/**
* Sets the value of the tobjectSubjectDetail property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setTobjectSubjectDetail(String value) {
this.tobjectSubjectDetail = value;
}
/**
* Gets the value of the id property.
*
* @return possible object is {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed
* property on this class.
*
* <p>
* the map is keyed by the name of the attribute and the value is the string
* value of the attribute.
*
* the map returned by this method is live, and you can add new attribute by
* updating the map directly. Because of this design, there's no setter.
*
*
* @return always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
<gh_stars>0
from django.db import models
class Resource(models.Model):
code = models.CharField(max_length=255)
path = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True, null=True)
def __str__(self):
return self.path
class PurchaseTypeControl(models.Model):
code = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True, null=True)
def __str__(self):
return self.code
class PlacingWay(models.Model):
code = models.CharField(max_length=255)
name = models.CharField(max_length=255)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Organization(models.Model):
reg_num = models.CharField(max_length=255)
cons_registry_num = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255, blank=True, null=True)
full_name = models.TextField(blank=True, null=True)
post_address = models.CharField(max_length=255, blank=True, null=True)
fact_address = models.CharField(max_length=255, blank=True, null=True)
inn = models.CharField(max_length=255, unique=True)
kpp = models.CharField(max_length=255, blank=True, null=True)
okato = models.CharField(max_length=255, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.full_name
class FinanceSource(models.Model):
name = models.TextField()
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Purchase(models.Model):
object_info = models.TextField(blank=True, null=True)
purchase_number = models.CharField(max_length=255, unique=True)
placing_way = models.ForeignKey(PlacingWay, on_delete=models.SET_NULL, blank=True, null=True)
max_price = models.FloatField(blank=True, null=True)
price = models.FloatField(blank=True, null=True)
sign_date = models.DateField(blank=True, null=True)
protocol_date = models.DateField(blank=True, null=True)
direct_date = models.DateTimeField(blank=True, null=True)
doc_publish_date = models.DateTimeField(blank=True, null=True)
finance_source = models.ForeignKey(FinanceSource, on_delete=models.SET_NULL, blank=True, null=True)
def __str__(self):
return self.purchase_number
class OrganizationRole(models.Model):
name = models.CharField(max_length=255)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class OrganizationPurchase(models.Model):
organization = models.ForeignKey(Organization, on_delete=models.CASCADE, blank=True, null=True)
purchase = models.ForeignKey(Purchase, on_delete=models.CASCADE, blank=True, null=True)
organization_role = models.ForeignKey(OrganizationRole, on_delete=models.CASCADE, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class OKPD(models.Model):
code = models.CharField(max_length=255)
name = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class OKPD2(models.Model):
code = models.CharField(max_length=255)
name = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class OKEI(models.Model):
code = models.CharField(max_length=255)
national_code = models.CharField(max_length=255)
full_name = models.CharField(max_length=255)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class PurchaseObject(models.Model):
okpd = models.ForeignKey(OKPD, on_delete=models.SET_NULL, blank=True, null=True)
okpd2 = models.ForeignKey(OKPD2, on_delete=models.SET_NULL, blank=True, null=True)
okei = models.ForeignKey(OKEI, on_delete=models.SET_NULL, blank=True, null=True)
purchase = models.ForeignKey(Purchase, on_delete=models.CASCADE)
price = models.FloatField(blank=True, null=True)
quantity = models.FloatField(blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Archive(models.Model):
resource = models.ForeignKey(Resource, on_delete=models.CASCADE, blank=True, null=True)
name = models.CharField(max_length=255)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class File(models.Model):
name = models.CharField(max_length=255)
archive = models.ForeignKey(Archive, on_delete=models.CASCADE)
purchase = models.ForeignKey(Purchase, on_delete=models.SET_NULL, blank=True, null=True)
purchase_type_control = models.ForeignKey(PurchaseTypeControl, on_delete=models.SET_NULL, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
|
#!/bin/bash
set -e
LANG=en_US.utf8
default_compatibility_version=v1.0.0-rc3 # update this every release
compatibility_version=
LOG_INFO()
{
local content=${1}
echo -e "\033[32m[INFO] ${content}\033[0m"
}
LOG_ERROR()
{
local content=${1}
echo -e "\033[31m[ERROR] ${content}\033[0m"
}
help()
{
echo "$1"
cat << EOF
Usage: Download WeCross demo
-t <tag name> [Optional] download demo from a given tag
-h call for help
e.g
bash $0
EOF
exit 0
}
parse_command()
{
while getopts "t:h" option;do
# shellcheck disable=SC2220
case ${option} in
t)
compatibility_version=$OPTARG
;;
h) help;;
esac
done
}
download_demo()
{
local github_url=https://github.com/WeBankFinTech/WeCross/releases/download/
local cdn_url=https://www.fisco.com.cn/cdn/wecross/releases/download/
#local compatibility_version=
local release_pkg=demo.tar.gz
local release_pkg_checksum_file=demo.tar.gz.md5
if [ -d ./demo/ ];then
LOG_INFO "./demo/ exists"
exit 0
fi
LOG_INFO "Checking latest release"
if [ -z "${compatibility_version}" ];then
compatibility_version=$(curl -s https://api.github.com/repos/WeBankFinTech/WeCross/releases/latest | grep "tag_name"|awk -F '\"' '{print $4}')
fi
if [ -z "${compatibility_version}" ];then
# could not get version from github
compatibility_version=${default_compatibility_version}
fi
LOG_INFO "Latest release: ${compatibility_version}"
download_release_pkg ${github_url} ${cdn_url} ${compatibility_version} ${release_pkg} ${release_pkg_checksum_file}
}
download_release_pkg()
{
local github_url=${1}
local cdn_url=${2}
local compatibility_version=${3}
local release_pkg=${4}
local release_pkg_checksum_file=${5}
#download checksum
LOG_INFO "Try to Download checksum from ${cdn_url}/${compatibility_version}/${release_pkg_checksum_file}"
if ! curl --fail -LO ${cdn_url}/${compatibility_version}/${release_pkg_checksum_file}; then
LOG_INFO "Download checksum from ${github_url}/${compatibility_version}/${release_pkg_checksum_file}"
curl -LO ${github_url}/${compatibility_version}/${release_pkg_checksum_file}
fi
if [ ! -e ${release_pkg_checksum_file} ] || [ -z "$(grep ${release_pkg} ${release_pkg_checksum_file})" ]; then
LOG_ERROR "Download checksum file error"
exit 1
fi
# download
if [ -f "${release_pkg}" ] && [ "$(md5sum -c ${release_pkg_checksum_file}|echo $?)" -eq "0" ];then
LOG_INFO "Latest release ${release_pkg} exists."
else
LOG_INFO "Try to download from: ${cdn_url}/${compatibility_version}/${release_pkg}"
if ! curl --fail -LO ${cdn_url}/${compatibility_version}/${release_pkg}; then
# If CDN failed, download from github release
LOG_INFO "Download from: ${github_url}/${compatibility_version}/${release_pkg}"
curl -C - -LO ${github_url}/${compatibility_version}/${release_pkg}
fi
if [ "$(md5sum -c ${release_pkg_checksum_file}|echo $?)" -ne "0" ]; then
LOG_ERROR "Download package error"
rm -f ${release_pkg}
exit 1
fi
fi
tar -zxf ${release_pkg}
}
main()
{
download_demo
}
print_result()
{
LOG_INFO "Download completed. WeCross Demo is in: ./demo/"
LOG_INFO "Please: \"cd ./demo/ \" and \"bash build.sh\" to build the demo."
}
parse_command $@
main
print_result |
import pandas as pd
import numpy as np
# Load data and modify categorical variables
data = pd.read_csv('customer_data.csv')
data = pd.get_dummies(data)
# Split into training and test sets
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(data.drop(columns='churn'),
data.churn,
test_size=0.2,
random_state=0)
# Fit a logistic regression model
from sklearn.linear_model import LogisticRegression
logreg = LogisticRegression()
logreg.fit(X_train, y_train)
# Make predictions
y_pred = logreg.predict(X_test)
# Evaluate the performance of the model
from sklearn.metrics import confusion_matrix, classification_report
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred)) |
#include <algorithm>
#include <iostream>
#include <vector>
void insertionSort(std::vector<int>& arr)
{
for (int i = 1; i < arr.size(); i++)
{
int value = arr[i];
int hole = i;
while (hole > 0 && arr[hole - 1] > value)
{
arr[hole] = arr[hole - 1];
hole--;
}
arr[hole] = value;
}
}
int main()
{
std::vector<int> vec = { 3, 5, 2, 4, 1 };
insertionSort(vec);
for (int i = 0; i < vec.size(); i++)
std::cout << vec[i] << ' ';
return 0;
} |
<reponame>noijd/.dot
function PropertyPageEditor() {
}
PropertyPageEditor.prototype.install = function (canvas) {
this.canvas = canvas;
this.canvas.propertyPageEditor = this;
this.dialogShown = false;
};
PropertyPageEditor.prototype.onDialogShown = function () {
this.dialogShown = true;
this.attach(this._nextTargetObject);
};
PropertyPageEditor.prototype.showAndAttach = function (targetObject) {
if (!this.dialogShown) {
this._nextTargetObject = targetObject;
this.propertyWindow = window.openDialog("chrome://pencil/content/propertyDialog.xul", "propertyEditor" + Util.getInstanceToken(), "chrome,dialog,alwaysRaised,dependent", this);
} else {
this.attach(targetObject);
}
};
PropertyPageEditor.prototype.attach = function (targetObject) {
if (!this.propertyWindow) return;
try {
this.dettach();
this.targetObject = targetObject;
this.invalidate();
} catch (e) { alert(e); }
};
PropertyPageEditor.SMALL_EDITOR_TYPES = ["pfonteditor", "paligneditor", "pstrokeeditor", "pplaintexteditor", "pshadowstyleeditor", "penumeditor"];
PropertyPageEditor.prototype.invalidateData = function (targetObject) {
var definedGroups = this.targetObject.getPropertyGroups();
var strippedGroups = [];
for (var i in definedGroups) {
var group = definedGroups[i];
//var strippedGroup = new PropertyGroup();
var properties = [];
var allSmall = true;
for (var j in group.properties) {
var property = group.properties[j];
var editor = TypeEditorRegistry.getTypeEditor(property.type);
if (editor) {
//strippedGroup.properties.push(property);
properties.push(property);
if (PropertyPageEditor.SMALL_EDITOR_TYPES.indexOf(editor) < 0) {
allSmall = false;
}
}
}
//if (strippedGroup.properties.length > 0) {
if (properties.length > 0) {
//strippedGroup.name = group.name;
//strippedGroups.push(strippedGroup);
var N = allSmall ? properties.length : 3; // N editors/tab
for (var k = 0; k < properties.length; k+=N) {
var strippedGroup = new PropertyGroup();
strippedGroup.name = group.name;
for (var l = k; l < k + N; l++) {
if (l < properties.length) {
strippedGroup.properties.push(properties[l]);
}
}
strippedGroups.push(strippedGroup);
}
}
}
this.groups = strippedGroups;
this.properties = this.targetObject.getProperties();
};
PropertyPageEditor.prototype.invalidate = function () {
if (this.propertyWindow) {
this.invalidateData();
this.propertyWindow.setup();
}
};
PropertyPageEditor.prototype.dettach = function () {
try { this.targetObject = null; } catch (e) {}
if (this.propertyWindow) {
this.propertyWindow.clean();
}
};
//@begin interface to PropertyDialog.js
PropertyPageEditor.prototype.getPropertyValue = function (name) {
return this.targetObject.getProperty(name);
};
PropertyPageEditor.prototype.setPropertyValue = function (name, value) {
this.targetObject.setProperty(name, value);
if (this.targetObject.updateHandle) this.targetObject.updateHandle();
};
PropertyPageEditor.prototype.getTargetObjectName = function () {
return this.targetObject.getName();
};
PropertyPageEditor.prototype.usingQuickMode = function () {
return false;
};
Pencil.registerEditor(PropertyPageEditor);
|
"use strict";
/* tslint:disable:no-unused-expression */
Object.defineProperty(exports, "__esModule", { value: true });
const chai_1 = require("chai");
require("mocha");
const util_test_1 = require("../util.test");
describe('lexer/Lexer', () => {
describe('peek', () => {
it('should return eof token when provider is empty', () => {
const lexer = util_test_1.createLexerFromText('');
chai_1.expect(lexer.peek().type).eq('eof');
});
it('should return next token without moving', () => {
const lexer = util_test_1.createLexerFromText('Hello World');
chai_1.expect(lexer.peek() === lexer.peek()).true;
});
});
describe('next', () => {
it('should return eof token when provider is empty', () => {
const lexer = util_test_1.createLexerFromText('');
for (let i = 0; i < 100; ++i) {
chai_1.expect(lexer.peek().type).eq('eof');
}
});
it('should move thru tokens', () => {
const lexer = util_test_1.createLexerFromText(`
Hello World
@section('a', 'b')
`.trim());
let tok;
tok = lexer.next();
chai_1.expect(tok.type).eq('text');
if (tok.type === 'text') {
chai_1.expect(tok.value).eq('Hello World\n');
}
tok = lexer.next();
chai_1.expect(tok.type).eq('function');
if (tok.type === 'function') {
chai_1.expect(tok.name).eq('section');
chai_1.expect(tok.args).deep.eq([`'a'`, `'b'`]);
}
});
it('should return first token even if peek was called before', () => {
const lexer = util_test_1.createLexerFromText(`
@for(let i = 0; i < 10; ++i)
@endfor
`.trim());
chai_1.expect(lexer.peek()).deep.eq(lexer.next());
});
});
describe('samples', () => {
it('sample-1 (complex input)', () => {
util_test_1.lexAssert(`
{{-- this example is taken from https://laravel.com/docs/5.8/blade#template-inheritance --}}
<!-- Stored in resources/views/layouts/app.blade.php -->
<html>
<head>
<title>App Name - @yield('title')</title>
</head>
<body>
@section('sidebar')
This is the master sidebar.
@show
<div class="container">
@yield('content')
</div>
</body>
</html>
`.trim(), [
{
type: 'comment',
value: ' this example is taken from https://laravel.com/docs/5.8/blade#template-inheritance ',
},
{
type: 'text',
value: '\n\n<!-- Stored in resources/views/layouts/app.blade.php -->' +
'\n\n<html>\n <head>\n <title>App Name - ',
},
{
type: 'function',
name: 'yield',
args: [
`'title'`,
],
},
{
type: 'text',
value: '</title>\n </head>\n <body>\n ',
},
{
type: 'function',
name: 'section',
args: [
`'sidebar'`,
],
},
{
type: 'text',
value: '\n This is the master sidebar.\n ',
},
{
type: 'function',
name: 'show',
},
{
type: 'text',
value: '\n\n <div class="container">\n ',
},
{
type: 'function',
name: 'yield',
args: [
`'content'`,
],
},
{
type: 'text',
value: '\n </div>\n </body>\n</html>',
},
]);
});
it('sample-2 (function arguments with nested braces and quotes)', () => {
util_test_1.lexAssert(`
@include('view.name', {
'foo': ['hello )))', '(( world'],
'bar': 5 * (3 + 4 * (5 - 2)),
})
`.trim(), [
{
type: 'function',
name: 'include',
args: [
`'view.name'`,
`{
'foo': ['hello )))', '(( world'],
'bar': 5 * (3 + 4 * (5 - 2)),
}`,
],
},
]);
});
it('sample-3 (\'{{ }}\' and \'{!! !!}\')', () => {
util_test_1.lexAssert(`
<div>
Username: {{ user.name }}
Status: {{ user.status }}
Badge: {!! user.badge !!}
</div>
`.trim(), [
{
type: 'text',
value: '<div>\n Username: ',
},
{
type: 'data',
escaped: true,
value: 'user.name',
},
{
type: 'text',
value: '\n Status: ',
},
{
type: 'data',
escaped: true,
value: 'user.status',
},
{
type: 'text',
value: '\n Badge: ',
},
{
type: 'data',
escaped: false,
value: 'user.badge',
},
{
type: 'text',
value: '\n</div>',
},
]);
});
it('sample-4 (escape with \'@\')', () => {
util_test_1.lexAssert(`
<div>
Username: @{{ user.name }}
Status: @{{ user.status }}
Badge: @{!! user.badge !!}
</div>
`.trim(), [
{
type: 'text',
value: '<div>\n Username: {{ user.name }}\n Status: {{ user.status }}\n Badge: {!! user.badge !!}\n</div>',
},
]);
});
it('sample-5 (verbatim and js)', () => {
util_test_1.lexAssert(`
@verbatim
Hello World
@endverbatim
@js
for (let i = 0; i < 10; ++i) {
print(i);
}
@endjs
@markdown('default')
Profile info:
- Name: Bill
- Number: *+1234567890*
- Email: <EMAIL>
@endmarkdown
`.trim(), [
{
type: 'text',
value: '\nHello World\n',
},
{
type: 'text',
value: '\n\n',
},
{
type: 'raw-function',
name: 'js',
content: '\nfor (let i = 0; i < 10; ++i) {\n print(i);\n}\n',
},
{
type: 'text',
value: '\n\n',
},
{
type: 'raw-function',
name: 'markdown',
args: [`'default'`],
content: '\n\nProfile info:\n- Name: Bill\n- Number: *+1234567890*\n- Email: <EMAIL>\n\n',
},
], {
rawFunctions: ['verbatim', 'js', 'markdown'],
});
});
it('sample-6 (filters)', () => {
util_test_1.lexAssert(`
{{ 'test' | ucfirst }}
{{ 'hello' | ucfirst | uclast }}
{{ ' HeLlO ' | trim }}
{{ name | ucfirst | substr:0,1 }}
{!! time | format-time:'Y-m-d H:i:s' !!}
`.trim(), [
{
type: 'data',
escaped: true,
value: '\'test\'',
filters: [
{ name: 'ucfirst', args: [] },
],
},
{
type: 'text',
value: '\n',
},
{
type: 'data',
escaped: true,
value: '\'hello\'',
filters: [
{ name: 'ucfirst', args: [] },
{ name: 'uclast', args: [] },
],
},
{
type: 'text',
value: '\n',
},
{
type: 'data',
escaped: true,
value: '\' HeLlO \'',
filters: [
{ name: 'trim', args: [] },
],
},
{
type: 'text',
value: '\n',
},
{
type: 'data',
escaped: true,
value: 'name',
filters: [
{ name: 'ucfirst', args: [] },
{ name: 'substr', args: ['0', '1'] },
],
},
{
type: 'text',
value: '\n',
},
{
type: 'data',
escaped: false,
value: 'time',
filters: [
{ name: 'format-time', args: ['\'Y-m-d H:i:s\''] },
],
},
]);
});
});
});
//# sourceMappingURL=Lexer.test.js.map |
#!/usr/bin/env python3
import json
import os
import random
import string
import subprocess
import sys
import tempfile
import textwrap
import time
from distutils.util import strtobool
from itertools import count
from pathlib import Path
from urllib.error import URLError
from urllib.parse import ParseResult, urlparse
from urllib.request import urlopen
import click
MIN_MEM_GB = 14
CONNECTIVITY_CHECKS = [
'https://api.jujucharms.com/charmstore/v5/istio-pilot-5/icon.svg',
]
def kubectl_exists(resource):
try:
run('microk8s-kubectl.wrapper', 'get', '-nkubeflow', resource, die=False)
return True
except subprocess.CalledProcessError:
return False
def retry_run(*args, die=True, debug=False, stdout=True, times=3):
for attempt in range(1, times + 1):
try:
return run(*args, die=(times == attempt and die), debug=debug, stdout=stdout)
except subprocess.CalledProcessError as err:
if times == attempt:
raise
else:
if debug and stdout:
print(err)
print("Retrying.")
def run(*args, die=True, debug=False, stdout=True):
# Add wrappers to $PATH
env = os.environ.copy()
env["PATH"] += ":%s" % os.environ["SNAP"]
if debug and stdout:
print("\033[;1;32m+ %s\033[;0;0m" % " ".join(args))
result = subprocess.run(
args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
)
try:
result.check_returncode()
except subprocess.CalledProcessError as err:
if die:
print("Kubeflow could not be enabled:")
if result.stderr:
print(result.stderr.decode("utf-8"))
print(err)
sys.exit(1)
else:
raise
result_stdout = result.stdout.decode("utf-8")
if debug and stdout:
print(result_stdout)
if result.stderr:
print(result.stderr.decode("utf-8"))
return result_stdout
def get_random_pass():
return "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(30))
def juju(*args, **kwargs):
if strtobool(os.environ.get("KUBEFLOW_DEBUG") or "false"):
return run("microk8s-juju.wrapper", "--debug", *args, debug=True, **kwargs)
else:
return run("microk8s-juju.wrapper", *args, **kwargs)
def check_connectivity():
"""Checks connectivity to URLs from within and without the cluster.
For each URL in `CONNECTIVITY_CHECKS`, checks that the URL is reachable from
the host, then spins up a pod and checks from within MicroK8s.
"""
for url in CONNECTIVITY_CHECKS:
host = urlparse(url).netloc
try:
response = urlopen(url)
except URLError:
print("Couldn't contact %s" % host)
print("Please check your network connectivity before enabling Kubeflow.")
sys.exit(1)
if response.status != 200:
print("URL connectivity check failed with response %s" % response.status)
print("Please check your network connectivity before enabling Kubeflow.")
sys.exit(1)
try:
run(
'microk8s-kubectl.wrapper',
'run',
'--rm',
'-i',
'--restart=Never',
'--image=ubuntu',
'connectivity-check',
'--',
'bash',
'-c',
'apt update && apt install -y curl && curl %s' % url,
die=False,
stdout=False,
)
except subprocess.CalledProcessError:
print("Couldn't contact %s from within the Kubernetes cluster" % host)
print("Please check your network connectivity before enabling Kubeflow.")
sys.exit(1)
def parse_hostname(hostname: str) -> ParseResult:
if '//' in hostname:
parsed = urlparse(hostname)
else:
parsed = urlparse('//' + hostname)
if not parsed.scheme:
parsed = parsed._replace(scheme='http')
if not parsed.hostname:
print("Manual hostname `%s` leaves hostname unspecified" % hostname)
sys.exit(1)
if not parsed.port:
parsed = parsed._replace(netloc=parsed.hostname or '' + (parsed.port or ''))
if parsed.path not in ('', '/'):
print("WARNING: The path `%s` was set on the hostname, but was ignored." % parsed.path)
if parsed.params:
print(
"WARNING: The params `%s` were set on the hostname, but were ignored." % parsed.params
)
if parsed.query:
print("WARNING: The query `%s` was set on the hostname, but was ignored." % parsed.query)
if parsed.params:
print(
"WARNING: The fragment `%s` was set on the hostname, but was ignored." % parsed.fragment
)
return parsed._replace(path='', params='', query='', fragment='')
def get_hostname():
"""Gets the hostname that Kubeflow will respond to."""
# See if we've set up metallb with a custom service
try:
output = run(
"microk8s-kubectl.wrapper",
"get",
"--namespace=kubeflow",
"svc/istio-ingressgateway",
"-ojson",
stdout=False,
die=False,
)
pub_ip = json.loads(output)["status"]["loadBalancer"]["ingress"][0]["ip"]
return "%s.xip.io" % pub_ip
except (KeyError, subprocess.CalledProcessError):
print("WARNING: Unable to determine hostname, defaulting to localhost")
return "localhost"
@click.command()
@click.option(
'--bundle',
default='cs:kubeflow-239',
help='The Kubeflow bundle to deploy. Can be one of full, lite, edge, or a charm store URL.',
)
@click.option(
'--channel',
default='stable',
type=click.Choice(['stable', 'candidate', 'beta', 'edge']),
help='Which channel to deploy the bundle from. In most cases, this should be `stable`.',
)
@click.option(
'--debug/--no-debug',
default=False,
help='If true, shows more verbose output when enabling Kubeflow.',
)
@click.option(
'--hostname',
help='If set, this hostname is used instead of a hostname generated by MetalLB.',
)
@click.option(
'--ignore-min-mem/--no-ignore-min-mem',
default=False,
help='If set, overrides the minimum memory check.',
)
@click.option(
'--no-proxy',
help='Allows setting the juju-no-proxy configuration option.',
)
@click.password_option(
envvar='KUBEFLOW_AUTH_PASSWORD',
default=get_random_pass,
help='The Kubeflow dashboard password.',
)
def kubeflow(bundle, channel, debug, hostname, ignore_min_mem, no_proxy, password):
if os.geteuid() == 0:
print("This command can't be run as root.")
print("Try `microk8s enable kubeflow` instead.")
sys.exit(1)
juju_path = Path(os.environ['SNAP_DATA']) / 'juju'
if juju_path.stat().st_gid == 0:
print("Found bad permissions on %s, fixing..." % juju_path)
try:
run('sudo', 'chgrp', '-R', 'microk8s', str(juju_path), die=False)
run('sudo', 'chmod', '-R', '775', str(juju_path), die=False)
except subprocess.CalledProcessError as err:
print("Encountered error while attempting to fix permissions:")
print(err)
print("You can attempt to fix this yourself with:\n")
print("sudo chgrp -R microk8s %s" % juju_path)
print("sudo chmod -R 775 %s\n" % juju_path)
sys.exit(1)
with open("/proc/meminfo") as f:
memtotal_lines = [line for line in f.readlines() if "MemTotal" in line]
try:
total_mem = int(memtotal_lines[0].split(" ")[-2])
except IndexError:
print("Couldn't determine total memory.")
print("Kubeflow recommends at least %s GB of memory." % MIN_MEM_GB)
if total_mem < MIN_MEM_GB * 1024 * 1024 and not ignore_min_mem:
print("Kubeflow recommends at least %s GB of memory." % MIN_MEM_GB)
print("Use `--ignore-min-mem` if you'd like to proceed anyways.")
sys.exit(1)
try:
juju("show-controller", "uk8s", die=False, stdout=False)
except subprocess.CalledProcessError:
pass
else:
print("Kubeflow has already been enabled.")
sys.exit(1)
# Allow specifying the bundle as one of the main types of kubeflow bundles
# that we create in the charm store, namely full, lite, or edge. The user
# shoudn't have to specify a version for those bundles. However, allow the
# user to specify a full charm store URL if they'd like, such as
# `cs:kubeflow-lite-123`.
if bundle == 'full':
bundle = 'cs:kubeflow-239'
elif bundle == 'lite':
bundle = 'cs:kubeflow-lite-26'
elif bundle == 'edge':
bundle = 'cs:kubeflow-edge-23'
else:
bundle = bundle
run("microk8s-status.wrapper", "--wait-ready", debug=debug)
run(
'microk8s-kubectl.wrapper',
'-nkube-system',
'rollout',
'status',
'deployment.apps/calico-kube-controllers',
debug=debug,
)
for service in [
"dns",
"storage",
"dashboard",
"ingress",
"metallb:10.64.140.43-10.64.140.49",
]:
print("Enabling %s..." % service)
run("microk8s-enable.wrapper", service, debug=debug)
run("microk8s-status.wrapper", "--wait-ready", debug=debug)
run(
'microk8s-kubectl.wrapper',
'-nkube-system',
'rollout',
'status',
'ds/calico-node',
debug=debug,
)
print("Waiting for DNS and storage plugins to finish setting up")
run(
"microk8s-kubectl.wrapper",
"wait",
"--for=condition=available",
"-nkube-system",
"deployment/coredns",
"deployment/hostpath-provisioner",
"--timeout=10m",
debug=debug,
)
print("DNS and storage setup complete. Checking connectivity...")
check_connectivity()
print("Bootstrapping...")
if no_proxy is not None:
juju("bootstrap", "microk8s", "uk8s", "--config=juju-no-proxy=%s" % no_proxy)
juju("add-model", "kubeflow", "microk8s")
juju("model-config", "-m", "kubeflow", "juju-no-proxy=%s" % no_proxy)
else:
juju("bootstrap", "microk8s", "uk8s")
juju("add-model", "kubeflow", "microk8s")
print("Bootstrap complete.")
print("Successfully bootstrapped, deploying...")
juju("deploy", bundle, "--channel", channel)
print("Kubeflow deployed.")
print("Waiting for operator pods to become ready.")
wait_seconds = 15
for i in count():
status = json.loads(juju("status", "-m", "uk8s:kubeflow", "--format=json", stdout=False))
unready_apps = [
name
for name, app in status["applications"].items()
if "message" in app["application-status"]
]
if unready_apps:
print(
"Waited %ss for operator pods to come up, %s remaining."
% (wait_seconds * i, len(unready_apps))
)
time.sleep(wait_seconds)
else:
break
print("Operator pods ready.")
print("Waiting for service pods to become ready.")
if kubectl_exists('service/pipelines-api'):
with tempfile.NamedTemporaryFile(mode='w+') as f:
json.dump(
{
'apiVersion': 'v1',
'kind': 'Service',
'metadata': {'labels': {'juju-app': 'pipelines-api'}, 'name': 'ml-pipeline'},
'spec': {
'ports': [
{'name': 'grpc', 'port': 8887, 'protocol': 'TCP', 'targetPort': 8887},
{'name': 'http', 'port': 8888, 'protocol': 'TCP', 'targetPort': 8888},
],
'selector': {'juju-app': 'pipelines-api'},
'type': 'ClusterIP',
},
},
f,
)
f.flush()
run('microk8s-kubectl.wrapper', 'apply', '-f', f.name)
hostname = parse_hostname(hostname or get_hostname())
if kubectl_exists('service/dex-auth'):
juju("config", "dex-auth", "public-url=%s" % hostname.geturl())
if kubectl_exists('service/oidc-gatekeeper'):
juju("config", "oidc-gatekeeper", "public-url=%s" % hostname.geturl())
retry_run(
"microk8s-kubectl.wrapper",
"wait",
"--namespace=kubeflow",
"--for=condition=Ready",
"pod",
"--timeout=30s",
"--all",
debug=debug,
times=100,
)
print("Congratulations, Kubeflow is now available.")
if kubectl_exists('service/istio-ingressgateway'):
print(
textwrap.dedent(
"""
The dashboard is available at %s
Username: admin
Password: %s
To see these values again, run:
microk8s juju config dex-auth static-username
microk8s juju config dex-auth static-password
"""
% (hostname.geturl(), password)
)
)
else:
print("\nYou have deployed the edge bundle.")
print("For more information on how to use Kubeflow, see https://www.kubeflow.org/docs/")
print(
textwrap.dedent(
"""
To tear down Kubeflow and associated infrastructure, run:
microk8s disable kubeflow
"""
)
)
if __name__ == "__main__":
kubeflow(prog_name='microk8s enable kubeflow', auto_envvar_prefix='KUBEFLOW')
|
import wx
class DrawingApp(wx.Frame):
def __init__(self, parent, title):
super(DrawingApp, self).__init__(parent, title=title, size=(400, 300))
self.color = wx.Colour(0, 0, 0)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
dc = wx.PaintDC(self)
width, height = self.GetClientSize()
dc.SetPen(wx.Pen(wx.Colour(self.color)))
dc.SetBrush(wx.Brush(wx.Colour(self.color)))
def set_color(self, r, g, b):
self.color = wx.Colour(r, g, b)
def draw_rectangle(self, x, y, width, height):
dc = wx.ClientDC(self)
dc.SetPen(wx.Pen(wx.Colour(self.color)))
dc.SetBrush(wx.Brush(wx.Colour(self.color)))
dc.DrawRectangle(x, y, width, height)
def clear_canvas(self):
dc = wx.ClientDC(self)
dc.Clear() |
#!/bin/sh
# -----------------------------------------------------------------------------
# Start/Stop Script for the CATALINA Server
#
# Environment Variable Prequisites
#
# CATALINA_HOME May point at your Catalina "build" directory.
#
# CATALINA_BASE (Optional) Base directory for resolving dynamic portions
# of a Catalina installation. If not present, resolves to
# the same directory that CATALINA_HOME points to.
#
# CATALINA_OPTS (Optional) Java runtime options used when the "start",
# "stop", or "run" command is executed.
#
# CATALINA_TMPDIR (Optional) Directory path location of temporary directory
# the JVM should use (java.io.tmpdir). Defaults to
# $CATALINA_BASE/temp.
#
# JAVA_HOME Must point at your Java Development Kit installation.
# Required to run the with the "debug" or "javac" argument.
#
# JRE_HOME Must point at your Java Development Kit installation.
# Defaults to JAVA_HOME if empty.
#
# JAVA_OPTS (Optional) Java runtime options used when the "start",
# "stop", or "run" command is executed.
#
# JPDA_TRANSPORT (Optional) JPDA transport used when the "jpda start"
# command is executed. The default is "dt_socket".
#
# JPDA_ADDRESS (Optional) Java runtime options used when the "jpda start"
# command is executed. The default is 8000.
#
# JPDA_SUSPEND (Optional) Java runtime options used when the "jpda start"
# command is executed. Specifies whether JVM should suspend
# execution immediately after startup. Default is "n".
#
# JPDA_OPTS (Optional) Java runtime options used when the "jpda start"
# command is executed. If used, JPDA_TRANSPORT, JPDA_ADDRESS,
# and JPDA_SUSPEND are ignored. Thus, all required jpda
# options MUST be specified. The default is:
#
# -Xdebug -Xrunjdwp:transport=$JPDA_TRANSPORT,
# address=$JPDA_ADDRESS,server=y,suspend=$JPDA_SUSPEND
#
# JSSE_HOME (Optional) May point at your Java Secure Sockets Extension
# (JSSE) installation, whose JAR files will be added to the
# system class path used to start Tomcat.
#
# CATALINA_PID (Optional) Path of the file which should contains the pid
# of catalina startup java process, when start (fork) is used
#
# $Id: catalina.sh 522797 2007-03-27 07:10:29Z fhanik $
# -----------------------------------------------------------------------------
# OS specific support. $var _must_ be set to either true or false.
cygwin=false
os400=false
darwin=false
case "`uname`" in
CYGWIN*) cygwin=true;;
OS400*) os400=true;;
Darwin*) darwin=true;;
esac
# resolve links - $0 may be a softlink
PRG="$0"
while [ -h "$PRG" ]; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`/"$link"
fi
done
# Get standard environment variables
PRGDIR=`dirname "$PRG"`
# Only set CATALINA_HOME if not already set
[ -z "$CATALINA_HOME" ] && CATALINA_HOME=`cd "$PRGDIR/.." ; pwd`
if [ -r "$CATALINA_HOME"/bin/setenv.sh ]; then
. "$CATALINA_HOME"/bin/setenv.sh
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
[ -n "$JRE_HOME" ] && JRE_HOME=`cygpath --unix "$JRE_HOME"`
[ -n "$CATALINA_HOME" ] && CATALINA_HOME=`cygpath --unix "$CATALINA_HOME"`
[ -n "$CATALINA_BASE" ] && CATALINA_BASE=`cygpath --unix "$CATALINA_BASE"`
[ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
[ -n "$JSSE_HOME" ] && JSSE_HOME=`cygpath --absolute --unix "$JSSE_HOME"`
fi
# For OS400
if $os400; then
# Set job priority to standard for interactive (interactive - 6) by using
# the interactive priority - 6, the helper threads that respond to requests
# will be running at the same priority as interactive jobs.
COMMAND='chgjob job('$JOBNAME') runpty(6)'
system $COMMAND
# Enable multi threading
export QIBM_MULTI_THREADED=Y
fi
# Get standard Java environment variables
if $os400; then
# -r will Only work on the os400 if the files are:
# 1. owned by the user
# 2. owned by the PRIMARY group of the user
# this will not work if the user belongs in secondary groups
BASEDIR="$CATALINA_HOME"
. "$CATALINA_HOME"/bin/setclasspath.sh
else
if [ -r "$CATALINA_HOME"/bin/setclasspath.sh ]; then
BASEDIR="$CATALINA_HOME"
. "$CATALINA_HOME"/bin/setclasspath.sh
else
echo "Cannot find $CATALINA_HOME/bin/setclasspath.sh"
echo "This file is needed to run this program"
exit 1
fi
fi
# Add on extra jar files to CLASSPATH
if [ -n "$JSSE_HOME" ]; then
CLASSPATH="$CLASSPATH":"$JSSE_HOME"/lib/jcert.jar:"$JSSE_HOME"/lib/jnet.jar:"$JSSE_HOME"/lib/jsse.jar
fi
CLASSPATH="$CLASSPATH":"$CATALINA_HOME"/bin/bootstrap.jar:"$CATALINA_HOME"/bin/commons-logging-api.jar
if [ -z "$CATALINA_BASE" ] ; then
CATALINA_BASE="$CATALINA_HOME"
fi
if [ -z "$CATALINA_TMPDIR" ] ; then
# Define the java.io.tmpdir to use for Catalina
CATALINA_TMPDIR="$CATALINA_BASE"/temp
fi
# Bugzilla 37848: When no TTY is available, don't output to console
have_tty=0
if [ "`tty`" != "not a tty" ]; then
have_tty=1
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
JAVA_HOME=`cygpath --absolute --windows "$JAVA_HOME"`
JRE_HOME=`cygpath --absolute --windows "$JRE_HOME"`
CATALINA_HOME=`cygpath --absolute --windows "$CATALINA_HOME"`
CATALINA_BASE=`cygpath --absolute --windows "$CATALINA_BASE"`
CATALINA_TMPDIR=`cygpath --absolute --windows "$CATALINA_TMPDIR"`
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
[ -n "$JSSE_HOME" ] && JSSE_HOME=`cygpath --absolute --windows "$JSSE_HOME"`
JAVA_ENDORSED_DIRS=`cygpath --path --windows "$JAVA_ENDORSED_DIRS"`
fi
# Set juli LogManager if it is present
if [ -r "$CATALINA_BASE"/conf/logging.properties ]; then
JAVA_OPTS="$JAVA_OPTS "-Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager" "-Djava.util.logging.config.file="$CATALINA_BASE/conf/logging.properties"
fi
# ----- Execute The Requested Command -----------------------------------------
# Bugzilla 37848: only output this if we have a TTY
if [ $have_tty -eq 1 ]; then
echo "Using CATALINA_BASE: $CATALINA_BASE"
echo "Using CATALINA_HOME: $CATALINA_HOME"
echo "Using CATALINA_TMPDIR: $CATALINA_TMPDIR"
if [ -n "$CATALINA_OPTS" ]; then
echo "Using CATALINA_OPTS: $CATALINA_OPTS"
fi
if [ "$1" = "debug" -o "$1" = "javac" ] ; then
echo "Using JAVA_HOME: $JAVA_HOME"
else
echo "Using JRE_HOME: $JRE_HOME"
fi
if [ -n "$JAVA_OPTS" ]; then
echo "Using JAVA_OPTS: $JAVA_OPTS"
fi
if [ -n "$JAVA_ENDORSED_DIRS" ]; then
echo "Using JAVA_ENDORSED_DIRS: $JAVA_ENDORSED_DIRS"
fi
fi
if [ "$1" = "jpda" ] ; then
if [ -z "$JPDA_TRANSPORT" ]; then
JPDA_TRANSPORT="dt_socket"
fi
if [ -z "$JPDA_ADDRESS" ]; then
JPDA_ADDRESS="8000"
fi
if [ -z "$JPDA_SUSPEND" ]; then
JPDA_SUSPEND="n"
fi
if [ -z "$JPDA_OPTS" ]; then
JPDA_OPTS="-Xdebug -Xrunjdwp:transport=$JPDA_TRANSPORT,address=$JPDA_ADDRESS,server=y,suspend=$JPDA_SUSPEND"
fi
CATALINA_OPTS="$CATALINA_OPTS $JPDA_OPTS"
shift
fi
if [ "$1" = "debug" ] ; then
if $os400; then
echo "Debug command not available on OS400"
exit 1
else
shift
if [ "$1" = "-security" ] ; then
echo "Using Security Manager"
shift
exec "$_RUNJDB" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-sourcepath "$CATALINA_HOME"/../../java \
-Djava.security.manager \
-Djava.security.policy=="$CATALINA_BASE"/conf/catalina.policy \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start
else
exec "$_RUNJDB" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-sourcepath "$CATALINA_HOME"/../../java \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start
fi
fi
elif [ "$1" = "run" ]; then
shift
if [ "$1" = "-security" ] ; then
echo "Using Security Manager"
shift
exec "$_RUNJAVA" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-Djava.security.manager \
-Djava.security.policy=="$CATALINA_BASE"/conf/catalina.policy \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start
else
exec "$_RUNJAVA" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start
fi
elif [ "$1" = "start" ] ; then
shift
touch "$CATALINA_BASE"/logs/catalina.out
if [ "$1" = "-security" ] ; then
echo "Using Security Manager"
shift
"$_RUNJAVA" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-Djava.security.manager \
-Djava.security.policy=="$CATALINA_BASE"/conf/catalina.policy \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start \
>> "$CATALINA_BASE"/logs/catalina.out 2>&1 &
if [ ! -z "$CATALINA_PID" ]; then
echo $! > $CATALINA_PID
fi
else
"$_RUNJAVA" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" start \
>> "$CATALINA_BASE"/logs/catalina.out 2>&1 &
if [ ! -z "$CATALINA_PID" ]; then
echo $! > $CATALINA_PID
fi
fi
elif [ "$1" = "stop" ] ; then
shift
FORCE=0
if [ "$1" = "-force" ]; then
shift
FORCE=1
fi
"$_RUNJAVA" $JAVA_OPTS $CATALINA_OPTS \
-Djava.endorsed.dirs="$JAVA_ENDORSED_DIRS" -classpath "$CLASSPATH" \
-Dcatalina.base="$CATALINA_BASE" \
-Dcatalina.home="$CATALINA_HOME" \
-Djava.io.tmpdir="$CATALINA_TMPDIR" \
org.apache.catalina.startup.Bootstrap "$@" stop
if [ $FORCE -eq 1 ]; then
if [ ! -z "$CATALINA_PID" ]; then
echo "Killing: `cat $CATALINA_PID`"
kill -9 `cat $CATALINA_PID`
else
echo "Kill failed: \$CATALINA_PID not set"
fi
fi
elif [ "$1" = "version" ] ; then
"$_RUNJAVA" \
-classpath "$CATALINA_HOME/lib/catalina.jar" \
org.apache.catalina.util.ServerInfo
else
echo "Usage: catalina.sh ( commands ... )"
echo "commands:"
if $os400; then
echo " debug Start Catalina in a debugger (not available on OS400)"
echo " debug -security Debug Catalina with a security manager (not available on OS400)"
else
echo " debug Start Catalina in a debugger"
echo " debug -security Debug Catalina with a security manager"
fi
echo " jpda start Start Catalina under JPDA debugger"
echo " run Start Catalina in the current window"
echo " run -security Start in the current window with security manager"
echo " start Start Catalina in a separate window"
echo " start -security Start in a separate window with security manager"
echo " stop Stop Catalina"
echo " stop -force Stop Catalina (followed by kill -KILL)"
echo " version What version of tomcat are you running?"
exit 1
fi
|
#!/bin/sh
#------------------------------------------------------------------------------
#
# Copyright (c) 2017 Dinesh Thirumurthy <dinesh.thirumurthy@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
#------------------------------------------------------------------------------
PROGRAM="mkbase.sh"
SKIP=echo
SKIP=
MARK="##HAKR##"
P=$PROGRAM
ID=`id -u`
if [ "${SKIP}" == "" ]; then
if [ $ID -ne 0 ]; then
echo "$P: error: run as root"
exit 1
fi
fi
SRCROOT=/usr/src
OBJROOT=/usr/obj
echo "\n$P: chown -R build $OBJROOT #$MARK" && ${SKIP} chown -R build ${OBJROOT} && \
echo "\n$P: chgrp -R wobj $OBJROOT #$MARK" && ${SKIP} chgrp -R wobj ${OBJROOT} && \
echo "\n$P: chmod 770 /usr/obj #$MARK" && ${SKIP} chmod 770 /usr/obj && \
echo "\n$P: cd $SRCROOT #$MARK" && ${SKIP} cd ${SRCROOT} && \
echo "\n$P: make obj #$MARK" && ${SKIP} make obj && \
echo "\n$P: make build #$MARK" && ${SKIP} make build && \
echo "\n$P: sysmerge #$MARK" && ${SKIP} sysmerge && \
echo "\n$P: cd /dev && ./MAKEDEV all #$MARK" && ${SKIP} cd /dev && ${SKIP} /dev/MAKEDEV all
exit 0
|
<filename>src/app/pages/queue-details/modal-send-link/modal-send-link.component.spec.ts<gh_stars>0
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ModalSendLinkComponent } from './modal-send-link.component';
describe('ModalSendLinkComponent', () => {
let component: ModalSendLinkComponent;
let fixture: ComponentFixture<ModalSendLinkComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ModalSendLinkComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ModalSendLinkComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
package pass;
import java.util.Scanner;
public class ModularPassMain {
public static void main (String[] args) {
PassUtil.setScanner(new Scanner(System.in));
for (;;) {
double totalScore = PassUtil.inputDouble("Please input the total score");
if (totalScore < 0) {
break;
}
double finalScore = PassUtil.inputDouble("Please input the final score");
System.out.println(String.format("isPass:%s", PassUtil.pass(totalScore, finalScore) ? "true" : "false"));
}
}
}
|
package ru
import (
"testing"
"github.com/gammban/numtow/internal/triplet"
)
//nolint:gochecknoglobals
var testCaseDeclination = []struct {
giveTriplet triplet.Triplet
wantDeclination Declination
}{
{
giveTriplet: triplet.New(1, 2, 1),
wantDeclination: DeclinationSingular,
},
{
giveTriplet: triplet.New(1, 2, 2),
wantDeclination: Declination234,
},
{
giveTriplet: triplet.New(1, 2, 3),
wantDeclination: Declination234,
},
{
giveTriplet: triplet.New(1, 2, 4),
wantDeclination: Declination234,
},
{
giveTriplet: triplet.New(1, 2, 5),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 6),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 7),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 8),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 9),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 10),
wantDeclination: DeclinationPlural,
},
{
giveTriplet: triplet.New(1, 2, 11),
wantDeclination: DeclinationPlural,
},
}
func TestGetTripletDeclination(t *testing.T) {
for _, v := range testCaseDeclination {
if gotDecl := getTripletDeclination(v.giveTriplet); gotDecl != v.wantDeclination {
t.Errorf("expected %d, got %d", v.wantDeclination, gotDecl)
}
}
}
|
#!/bin/bash
#SBATCH -J Act_lrelu001_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py lrelu001 197 RMSprop 4 0.3319003641690469 0.0007438147022939031 he_normal 0.3
|
// producer-consumer
#include <iostream>
#include <thread>
#include <mutex>
#include <queue>
#include <condition_variable>
typedef int sample;
sample job = 0;
const int JOB_COUNT = 5;
bool should_produce () { return job < JOB_COUNT; }
const sample produce() { std::cout << "produce " << job << std::endl; return job++; }
void process(sample s) { std::cout << "process " << s << std::endl; }
bool is_last(sample s) { return job+1 >= JOB_COUNT; }
std::mutex fifo_mutex;
std::queue<sample> data_fifo;
std::condition_variable data_rdy;
void producer_thread ()
{
while ( should_produce() ) {
const sample s = produce();
std::lock_guard <std::mutex > lk (fifo_mutex);
data_fifo.push (s);
data_rdy.notify_all ();
}
}
void consumer_thread ()
{
while (true) {
std::unique_lock <std::mutex > lk ( fifo_mutex);
data_rdy.wait ( lk , []{return ! data_fifo.empty (); });
sample s = data_fifo.front ();
data_fifo.pop();
lk.unlock () ;
process(s);
if ( is_last (s)) break;
}
}
int main(int argc, char**argv)
{
std::thread pt( producer_thread );
std::thread ct( consumer_thread );
pt.join();
ct.join();
}
|
def guess_number(min_num, max_num):
guess = (min_num + max_num) // 2
if guess == secret_num:
return guess
elif guess < secret_num:
return guess_number(guess + 1, max_num)
else:
return guess_number(min_num, guess - 1) |
import java.util.ArrayList;
public class PrimeFactorization {
public static void main(String args[]) {
int n = 24;
// Stores the prime factors
ArrayList<Integer> factors = new ArrayList<Integer>();
// Print the number of 2s that divide n
while (n % 2 == 0) {
factors.add(2);
n /= 2;
}
// n must be odd at this point. So we can
// skip one element (note i = i +2)
for (int i = 3; i <= Math.sqrt(n); i += 2) {
// While i divides n, add i to prime factor list and divide n
while (n % i == 0) {
factors.add(i);
n /= i;
}
}
// This condition is to handle the case when
// n is a prime number greater than 2
if (n > 2) {
factors.add(n);
}
// Printing the prime factors
System.out.print("The prime factors of " + n + " are: ");
for (Integer factor: factors) {
System.out.print(factor + " ");
}
}
} |
import pandas as pd
data = [{'Name': 'John', 'Age': 22}, {'Name': 'Jane', 'Age': 23}, {'Name': 'Jack', 'Age': 18}]
df = pd.DataFrame(data)
# Print the DataFrame
print(df) |
import pandas as pd
import matplotlib.pyplot as plt
def process_and_plot_data(filename: str):
# Step 1: Read the CSV file using pandas
data = pd.read_csv(filename)
# Step 2: Extract the 'Value' column and convert it to a NumPy array
arr = data['Value'].to_numpy()
# Step 3: Perform a mathematical operation by squaring each element
squared_arr = arr ** 2
# Step 4: Create a line plot to visualize the processed data
plt.plot(squared_arr, label='Squared Values')
plt.xlabel('Step')
plt.ylabel('Squared Value')
plt.title('Processed Data Visualization')
plt.legend()
plt.show()
# Example usage
process_and_plot_data('data.csv') |
<reponame>marceloqueiros/backoffice-angular2<filename>backoffice_virtualmachine/Project/appAdmin/dashboard/criarpreco/criarpreco.component.ts<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { CriarPrecoService} from './criarpreco.service';
import {Http,Response} from '@angular/http';
import 'rxjs/add/operator/map'
import { Headers, RequestOptions } from '@angular/http';
@Component({
moduleId:module.id,
selector: 'criarPreco',
templateUrl: 'criarpreco.component.html',
providers: [CriarPrecoService]
})
export class CriarPrecoComponent{
constructor(private _CriarPrecoService: CriarPrecoService){}
criarPreco(preco,linguaOriginal,linguaTraducao){
let Preco = {preco: preco,linguaOriginal: linguaOriginal,linguaTraducao: linguaTraducao};
console.log(Preco);
this._CriarPrecoService.criarPreco(Preco).subscribe(
data => {
// refresh the list
this.todos(event);
}
);}
todos(event){
console.log("Inserido com sucesso!");
}
} |
// Lambda function code
exports.handler = async (event) => {
// Extract the request body from the event
const requestBody = JSON.parse(event.body);
// Process the request body and generate the response
const responseData = processRequest(requestBody);
// Prepare the HTTP response
const response = {
statusCode: 200,
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(responseData)
};
return response;
};
// Function to process the request and generate the response
function processRequest(requestBody) {
// Implement the logic to process the request and generate the response
// Example:
const response = {
message: "Received and processed the request successfully",
requestData: requestBody
};
return response;
} |
public class Option<T>
{
/// <summary>
/// Key of the option
/// </summary>
public string Name { get; set; } = string.Empty;
/// <summary>
/// Value of the option
/// </summary>
public T Value { get; set; } = default(T);
} |
#!/bin/bash
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script is invoked by run_tests.py to accommodate "test under docker"
# scenario. You should never need to call this script on your own.
set -ex
cd "$(dirname "$0")/../../.."
git_root=$(pwd)
cd -
# Ensure existence of ccache directory
mkdir -p /tmp/ccache
# Inputs
# DOCKERFILE_DIR - Directory in which Dockerfile file is located.
# DOCKER_RUN_SCRIPT - Script to run under docker (relative to grpc repo root)
# DOCKERHUB_ORGANIZATION - If set, pull a prebuilt image from given dockerhub org.
# Use image name based on Dockerfile location checksum
DOCKER_IMAGE_NAME=$(basename "$DOCKERFILE_DIR")_$(sha1sum "$DOCKERFILE_DIR/Dockerfile" | cut -f1 -d\ )
if [ "$DOCKERHUB_ORGANIZATION" != "" ]
then
DOCKER_IMAGE_NAME=$DOCKERHUB_ORGANIZATION/$DOCKER_IMAGE_NAME
time docker pull "$DOCKER_IMAGE_NAME"
else
# Make sure docker image has been built. Should be instantaneous if so.
docker build -t "$DOCKER_IMAGE_NAME" "$DOCKERFILE_DIR"
fi
# Choose random name for docker container
CONTAINER_NAME="run_tests_$(uuidgen)"
# Git root as seen by the docker instance
docker_instance_git_root=/var/local/jenkins/grpc
# Run tests inside docker
DOCKER_EXIT_CODE=0
# TODO: silence complaint about $TTY_FLAG expansion in some other way
# shellcheck disable=SC2086
docker run \
-e "RUN_TESTS_COMMAND=$RUN_TESTS_COMMAND" \
-e "config=$config" \
-e "arch=$arch" \
-e CCACHE_DIR=/tmp/ccache \
-e THIS_IS_REALLY_NEEDED='see https://github.com/docker/docker/issues/14203 for why docker is awful' \
-e HOST_GIT_ROOT="$git_root" \
-e LOCAL_GIT_ROOT=$docker_instance_git_root \
-e "BUILD_ID=$BUILD_ID" \
-e "BUILD_URL=$BUILD_URL" \
-e "JOB_BASE_NAME=$JOB_BASE_NAME" \
-e "KOKORO_BUILD_ID=$KOKORO_BUILD_ID" \
-e "KOKORO_BUILD_NUMBER=$KOKORO_BUILD_NUMBER" \
-e "KOKORO_BUILD_URL=$KOKORO_BUILD_URL" \
-e "KOKORO_JOB_NAME=$KOKORO_JOB_NAME" \
-i \
$TTY_FLAG \
--sysctl net.ipv6.conf.all.disable_ipv6=0 \
-v ~/.config/gcloud:/root/.config/gcloud \
-v "$git_root:$docker_instance_git_root" \
-v /tmp/ccache:/tmp/ccache \
-v /tmp/npm-cache:/tmp/npm-cache \
-w /var/local/git/grpc \
--name="$CONTAINER_NAME" \
"$DOCKER_IMAGE_NAME" \
bash -l "/var/local/jenkins/grpc/$DOCKER_RUN_SCRIPT" || DOCKER_EXIT_CODE=$?
# use unique name for reports.zip to prevent clash between concurrent
# run_tests.py runs
TEMP_REPORTS_ZIP=$(mktemp)
docker cp "$CONTAINER_NAME:/var/local/git/grpc/reports.zip" "${TEMP_REPORTS_ZIP}" || true
unzip -o "${TEMP_REPORTS_ZIP}" -d "$git_root" || true
rm -f "${TEMP_REPORTS_ZIP}"
# remove the container, possibly killing it first
docker rm -f "$CONTAINER_NAME" || true
exit $DOCKER_EXIT_CODE
|
<gh_stars>10-100
module BrNfe
module Product
module Nfe
module Transporte
class Veiculo < BrNfe::ActiveModelBase
# Placa do veículo (NT2011/004)
# Conforme a documentação oficial:
# Informar em um dos seguintes formatos: XXX9999, XXX999, XX9999 ou XXXX999.
# Informar a placa em informações complementares quando a placa do veículo
# tiver lei de formação diversa. (NT 2011/005)
# Porém a gem permite informar a placa com o - (hífen), Exemplo:
# XXX-0000, XXX9999, XXX999, XX9999 ou XXXX999.
# Esse attr só irá considerar as letras e números, onde as letras sempre
# vai converter para maiúsculo
#
# <b>Type: </b> _String_
# <b>Required: </b> _Yes_
# <b>tag: </b> placa
#
attr_accessor :placa
def placa
"#{@placa}".gsub(/[^\d\w]/, '').upcase
end
# Sigla da UF
#
# <b>Type: </b> _String_
# <b>Required: </b> _Yes_
# <b>Length: </b> _2_
# <b>tag: </b> UF
#
attr_accessor :uf
# Registro Nacional de Transportador de Carga (ANTT)
#
# <b>Type: </b> _String_
# <b>Required: </b> _No_
# <b>Length: </b> _max: 20_
# <b>tag: </b> RNTC
#
attr_accessor :rntc
validates :placa, :uf, presence: true
validates :uf, inclusion: { in: BrNfe::Constants::SIGLAS_UF} , allow_blank: true
validates :rntc, length: { maximum: 20 }
validates :placa, length: { is: 7 }
end
end
end
end
end |
#!/bin/bash
test_db="emblem_dev"
export EMBLEM_DATABASE="${test_db}"
function cleanup {
kill -9 $ganache_pid
mongo $test_db --eval "db.dropDatabase()"
mongo admin --eval "db.shutdownServer()"
sleep 1
}
trap cleanup EXIT
cd ../truffle
ganache_pid=`npm run ganache`
echo "Started ganache, pid ${ganache_pid}"
mongod --fork --logpath ../mongod.log
npm run migrate
cd ../
yarn start
|
#!/usr/bin/bash
workdir=$(cd $(dirname "${BASH_SOURCE[0]}") && cd .. & pwd)
[[ ! -d ${workdir}/logs ]] && mkdir ${workdir}/logs
clean-checkpoints() {
texttype=$1
model=$2
if [ "$model" -eq "bertje" ]; then
rm -rf ${workdir}/experiments/${texttype}/GroNLP/bert-base-dutch-cased_256/checkpoint*
elif [ "$model" -eq "mbert" ]; then
rm -rf ${workdir}/experiments/${texttype}/bert-base-multilingual-cased_256/checkpoint*
elif [ "$model" -eq "xlmr" ]; then
rm -rf ${workdir}/experiments/${texttype}/xlm-roberta-base_256/checkpoint*
elif [ "$model" -eq "robbert" ]; then
rm -rf ${workdir}/experiments/${texttype}/pdelobelle/robbert-v2-base_256/checkpoint*
fi
}
train() {
texttype=$1
model=$2
python /data/transformers/examples/pytorch/token-classification/run_ner.py \
${workdir}/resources/cfg/train_${texttype}_${model}_256.json \
&> ${workdir}/logs/train_${texttype}_${model}_256.log
clean-checkpoints $texttype $model
}
train notes mbert
train notes bertje
train notes xlmr
train notes robbert
train all mbert
train all bertje
train all xlmr
train all robbert
train text mbert
train text bertje
train text xlmr
train text robbert
# run predictions on out-of-domain test data
bash ${workdir}/scripts/run-predict-configs.sh
|
import { MotionContextProps } from ".";
import { MotionProps } from "../../motion/types";
export declare function useCreateMotionContext(props: MotionProps, isStatic: boolean): MotionContextProps;
|
<reponame>muddessir/framework<filename>machine/qemu/sources/u-boot/test/py/tests/test_pstore.py
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2020, Collabora
# Author: <NAME> <<EMAIL>>
import pytest
import u_boot_utils
import os
import tempfile
import shutil
PSTORE_ADDR=0x3000000
PSTORE_LENGTH=0x100000
PSTORE_PANIC1='test/py/tests/test_pstore_data_panic1.hex'
PSTORE_PANIC2='test/py/tests/test_pstore_data_panic2.hex'
PSTORE_CONSOLE='test/py/tests/test_pstore_data_console.hex'
ADDR=0x01000008
def load_pstore(u_boot_console):
"""Load PStore records from sample files"""
output = u_boot_console.run_command_list([
'host load hostfs - 0x%x %s' % (PSTORE_ADDR,
os.path.join(u_boot_console.config.source_dir, PSTORE_PANIC1)),
'host load hostfs - 0x%x %s' % (PSTORE_ADDR + 4096,
os.path.join(u_boot_console.config.source_dir, PSTORE_PANIC2)),
'host load hostfs - 0x%x %s' % (PSTORE_ADDR + 253 * 4096,
os.path.join(u_boot_console.config.source_dir, PSTORE_CONSOLE)),
'pstore set 0x%x 0x%x' % (PSTORE_ADDR, PSTORE_LENGTH)])
def checkfile(u_boot_console, path, filesize, checksum):
"""Check file against MD5 checksum"""
output = u_boot_console.run_command_list([
'load hostfs - %x %s' % (ADDR, path),
'printenv filesize'])
assert('filesize=%x' % (filesize) in ''.join(output))
output = u_boot_console.run_command_list([
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(checksum in ''.join(output))
@pytest.mark.buildconfigspec('cmd_pstore')
def test_pstore_display_all_records(u_boot_console):
"""Test that pstore displays all records."""
u_boot_console.run_command('')
load_pstore(u_boot_console)
response = u_boot_console.run_command('pstore display')
assert('**** Dump' in response)
assert('**** Console' in response)
@pytest.mark.buildconfigspec('cmd_pstore')
def test_pstore_display_one_record(u_boot_console):
"""Test that pstore displays only one record."""
u_boot_console.run_command('')
load_pstore(u_boot_console)
response = u_boot_console.run_command('pstore display dump 1')
assert('Panic#2 Part1' in response)
assert('**** Console' not in response)
@pytest.mark.buildconfigspec('cmd_pstore')
def test_pstore_save_records(u_boot_console):
"""Test that pstore saves all records."""
outdir = tempfile.mkdtemp()
u_boot_console.run_command('')
load_pstore(u_boot_console)
u_boot_console.run_command('pstore save hostfs - %s' % (outdir))
checkfile(u_boot_console, '%s/dmesg-ramoops-0' % (outdir), 3798, '8059335ab4cfa62c77324c491659c503')
checkfile(u_boot_console, '%s/dmesg-ramoops-1' % (outdir), 4035, '3ff30df3429d81939c75d0070b5187b9')
checkfile(u_boot_console, '%s/console-ramoops-0' % (outdir), 4084, 'bb44de4a9b8ebd9b17ae98003287325b')
shutil.rmtree(outdir)
|
/*=========================================================================
*
* Copyright NumFOCUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef itkFrequencyShrinkImageFilter_hxx
#define itkFrequencyShrinkImageFilter_hxx
#include <itkFrequencyShrinkImageFilter.h>
#include <itkProgressReporter.h>
#include <numeric>
#include <functional>
#include "itkInd2Sub.h"
#include <itkPasteImageFilter.h>
#include <itkAddImageFilter.h>
#include <itkMultiplyImageFilter.h>
// #include <itkGaussianSpatialFunction.h>
// #include <itkFrequencyImageRegionIteratorWithIndex.h>
namespace itk
{
template <class TImageType>
FrequencyShrinkImageFilter<TImageType>::FrequencyShrinkImageFilter()
{
for (unsigned int j = 0; j < ImageDimension; j++)
{
m_ShrinkFactors[j] = 2;
}
this->m_FrequencyBandFilter = FrequencyBandFilterType::New();
// The band filter only let pass half of the frequencies.
this->m_FrequencyBandFilter->SetFrequencyThresholdsInRadians(0.0, Math::pi_over_2);
bool lowFreqThresholdPassing = true;
bool highFreqThresholdPassing = true;
this->m_FrequencyBandFilter->SetPassBand(lowFreqThresholdPassing, highFreqThresholdPassing);
// The band is not radial, but square like.
this->m_FrequencyBandFilter->SetRadialBand(false);
// Pass high positive freqs but stop negative high ones, to avoid overlaping.
this->m_FrequencyBandFilter->SetPassNegativeHighFrequencyThreshold(false);
}
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::SetShrinkFactors(unsigned int factor)
{
unsigned int j;
for (j = 0; j < ImageDimension; j++)
{
if (factor != m_ShrinkFactors[j])
{
break;
}
}
if (j < ImageDimension)
{
for (j = 0; j < ImageDimension; j++)
{
m_ShrinkFactors[j] = factor;
if (m_ShrinkFactors[j] < 1)
{
m_ShrinkFactors[j] = 1;
}
}
}
this->Modified();
}
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::SetShrinkFactor(unsigned int i, unsigned int factor)
{
if (m_ShrinkFactors[i] == factor)
{
return;
}
m_ShrinkFactors[i] = factor;
this->Modified();
}
/**
* Implementation Detail:
* The implementation calculate the number of different regions in an image,
* depending on the dimension:
* numberOfRegions = 2^dim (positive and negative frequencies per dim)
* then uses function to convert a linear array of regions [0, ..., numberOfRegions - 1]
* to binary subindices (only two options: positive or negative region)
* In 3D: numberOfRegions = Nr = 2^3 = 8
* sizeOfSubindices = [2,2,2]
* Region = 0 -----> Ind2Sub( 0, [2,2,2]) = [0,0,0]
* Region = 1 -----> Ind2Sub( 1, [2,2,2]) = [1,0,0]
* Region = Nr - 1 -----> Ind2Sub(Nr-1, [2,2,2]) = [1,1,1]
* So, if the result of Ind2Sub is 0 we paste the positive frequencies, if 1, negative freq
*/
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::GenerateData()
{
// Get the input and output pointers
const ImageType * inputPtr = this->GetInput();
typename ImageType::Pointer outputPtr = this->GetOutput();
this->AllocateOutputs();
// outputPtr->SetBufferedRegion(outputPtr->GetLargestPossibleRegion());
outputPtr->FillBuffer(0);
// Output is the sum of the four(2D) or eight(3D) quadrants.
// We can do it only because high freqs are removed.
// This filter will remove it by default a BandPass Filter.
if (this->m_ApplyBandFilter)
{
typename ImageType::SpacingType inputSpacing = this->GetInput()->GetSpacing();
const typename ImageType::SpacingType::ValueType spacingValue = inputSpacing[0];
// Check that the spacing is the same in all directions.
{
bool all_equal = true;
for (unsigned int i = 1; i < ImageDimension; ++i)
{
if (itk::Math::NotAlmostEquals(inputSpacing[i], spacingValue))
{
all_equal = false;
}
}
if (!all_equal)
{
itkExceptionMacro(<< "Spacing of input image is not the same in all directions " << inputSpacing);
}
}
this->m_FrequencyBandFilter->SetInput(this->GetInput());
this->m_FrequencyBandFilter->SetFrequencyThresholds(
this->m_FrequencyBandFilter->GetLowFrequencyThreshold() * spacingValue,
this->m_FrequencyBandFilter->GetHighFrequencyThreshold() * spacingValue);
this->m_FrequencyBandFilter->Update();
inputPtr = this->m_FrequencyBandFilter->GetOutput();
}
typename TImageType::SizeType inputSize = inputPtr->GetLargestPossibleRegion().GetSize();
typename TImageType::SizeType outputSize = outputPtr->GetLargestPossibleRegion().GetSize();
const typename TImageType::IndexType indexOrigOut = outputPtr->GetLargestPossibleRegion().GetIndex();
// Manage ImageDimension array linearly:{{{
FixedArray<unsigned int, ImageDimension> nsizes;
unsigned int numberOfRegions = 1;
for (unsigned int dim = 0; dim < ImageDimension; ++dim)
{
nsizes[dim] = 2;
numberOfRegions *= nsizes[dim];
}
FixedArray<unsigned int, ImageDimension> subIndices;
/// }}}
// Prepare filter to paste the different regions into output.
using PasteFilterType = itk::PasteImageFilter<ImageType>;
typename PasteFilterType::Pointer pasteFilter = PasteFilterType::New();
pasteFilter->SetSourceImage(inputPtr);
pasteFilter->SetDestinationImage(outputPtr);
// pasteFilter->InPlaceOn();
using RegionType = typename ImageType::RegionType;
ProgressReporter progress(this, 0, numberOfRegions);
for (unsigned int n = 0; n < numberOfRegions; ++n)
{
subIndices = itk::Ind2Sub<ImageDimension>(n, nsizes);
RegionType zoneRegion;
typename ImageType::SizeType zoneSize;
typename TImageType::IndexType inputIndex = indexOrigOut;
typename TImageType::IndexType outputIndex = indexOrigOut;
// Note that lowFreqsOfInput is inputSize/2 if outputSize is even, (outputSize - 1)/2 if odd.
for (unsigned int dim = 0; dim < ImageDimension; ++dim)
{
zoneSize[dim] = outputSize[dim];
outputIndex[dim] = indexOrigOut[dim];
if (subIndices[dim] == 0) // positive frequencies
{
inputIndex[dim] = indexOrigOut[dim];
}
else // negative frequencies
{
inputIndex[dim] = indexOrigOut[dim] + inputSize[dim] - zoneSize[dim];
}
}
zoneRegion.SetIndex(inputIndex);
zoneRegion.SetSize(zoneSize);
itkDebugMacro(<< "n:" << n << " region: " << zoneRegion);
pasteFilter->SetSourceRegion(zoneRegion);
pasteFilter->SetDestinationIndex(outputIndex);
pasteFilter->Update();
// Sum the quadrants.
using AddFilterType = itk::AddImageFilter<TImageType, TImageType>;
typename AddFilterType::Pointer addFilter = AddFilterType::New();
addFilter->SetInput1(outputPtr);
addFilter->SetInput2(pasteFilter->GetOutput());
addFilter->InPlaceOn();
if (n == numberOfRegions - 1) // Graft the output.
{
addFilter->Update();
outputPtr = addFilter->GetOutput();
using MultiplyFilterType = itk::MultiplyImageFilter<TImageType, TImageType, TImageType>;
typename MultiplyFilterType::Pointer multiplyFilter = MultiplyFilterType::New();
multiplyFilter->SetInput(outputPtr);
multiplyFilter->SetConstant(static_cast<typename TImageType::PixelType::value_type>(1.0 / numberOfRegions));
multiplyFilter->GraftOutput(outputPtr);
multiplyFilter->Update();
this->GraftOutput(multiplyFilter->GetOutput());
// addFilter->GraftOutput(outputPtr);
// addFilter->Update();
// this->GraftOutput(addFilter->GetOutput());
}
else // update
{
addFilter->Update();
outputPtr = addFilter->GetOutput();
}
progress.CompletedPixel();
}
/** Ensure image is hermitian in the Nyquist bands (even)
* Example: Image 2D size 8, index = [0,...,7]
* Each quadrant is a region pasted from the original image. The index refers to the input image of size 8. The input
* image is hermitian, so:
* 0
* 1 == 7
* 2 == 6 <- 6 is Nyq in new image.
* 3 == 5
* 4 <- Nyq original
* Hermitian table, using the equivalences above. (assuming imag part zero to avoid working with conjugates) . Note
* that index 6 is the new Nyquist. 0 1 | 6 7 0 0,0 1,0 | 2,0 1,0 1 0,1 1,1 | 2,1 1,1
* ---------------------
* 6 0,2 1,2 | 2,2 1,2
* 7 0,1 1,1 | 2,1 1,1
* /
*/
// Fix Nyquist band. Folding results for hermiticity.
// {
// typename TImageType::IndexType index = indexOrigOut + lowFreqsOfInput;
// typename TImageType::IndexType modIndex = indexOrigOut + lowFreqsOfInput;
// // typename TImageType::SizeType endSize = outputSize - lowFreqsOfInput;
// for(unsigned int dim = 0; dim < ImageDimension; ++dim)
// {
// for(int i = 1; i < (int)lowFreqsOfInput[dim]; ++i)
// {
// index = indexOrigOut + lowFreqsOfInput;
// modIndex = indexOrigOut + lowFreqsOfInput;
// index.SetElement(dim, indexOrigOut[dim] + i );
// modIndex.SetElement(dim, indexOrigOut[dim] + outputSize[dim] - i );
// typename TImageType::PixelType value =
// std::conj(outputPtr->GetPixel(index));
// outputPtr->SetPixel(modIndex, value);
// // The stored nyquiist value corresponds to the positive side.
// outputPtr->SetPixel(index, value);
// }
// // The stored nyquiist value corresponds to the positive side.
// index.SetElement(dim, indexOrigOut[dim]);
// outputPtr->SetPixel(index, std::conj(outputPtr->GetPixel(index)));
// }
// }
// // Apply a gaussian window of the size of the output.
// using FunctionType = itk::GaussianSpatialFunction<double, ImageDimension>;
// typename FunctionType::Pointer gaussian = FunctionType::New();
// using ArrayType = FixedArray< double, ImageDimension >;
// ArrayType m_Mean;
// ArrayType m_Sigma;
// double m_Scale = 1.0;
// bool m_Normalized = true;
// for (unsigned int i = 0; i<ImageDimension; ++i)
// {
// m_Mean[i] = indexOrigOut[i];
// // 6.0 is equivalent to 3sigmas (border values are close to zero)
// // m_Sigma[i] = outputSize[i]/(2*3.0);
// m_Sigma[i] = outputSize[i]/(2.0*2.35);
// }
// gaussian->SetSigma(m_Sigma);
// gaussian->SetMean(m_Mean);
// gaussian->SetScale(m_Scale);
// gaussian->SetNormalized(m_Normalized);
//
// // Create an iterator that will walk the output region
// using OutputIterator = itk::FrequencyImageRegionIteratorWithIndex< TImageType >;
// OutputIterator outIt = OutputIterator( outputPtr,
// outputPtr->GetRequestedRegion() );
// outIt.GoToBegin();
// while( !outIt.IsAtEnd() )
// {
// typename TImageType::IndexType ind = outIt.GetFrequencyBin();
// typename FunctionType::InputType f;
// for (unsigned int i = 0; i<ImageDimension; ++i)
// {
// f[i] = static_cast<typename FunctionType::InputType::ValueType>(ind[i]);
// }
// const double value = gaussian->Evaluate(f);
// // Set the pixel value to the function value
// outIt.Set( outIt.Get() * static_cast<typename TImageType::PixelType::value_type>(value) );
// ++outIt;
// }
}
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::GenerateInputRequestedRegion()
{
// call the superclass' implementation of this method
Superclass::GenerateInputRequestedRegion();
// get pointers to the input and output
auto * inputPtr = const_cast<TImageType *>(this->GetInput());
itkAssertInDebugAndIgnoreInReleaseMacro(inputPtr != nullptr);
// The filter chops high frequencys [0 1...H,H-1 H-2...1].
// We need the whole input image, indepently of the RequestedRegion.
inputPtr->SetRequestedRegion(inputPtr->GetLargestPossibleRegion());
}
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::GenerateOutputInformation()
{
// Call the superclass' implementation of this method
Superclass::GenerateOutputInformation();
// Get pointers to the input and output
const TImageType * inputPtr = this->GetInput();
TImageType * outputPtr = this->GetOutput();
itkAssertInDebugAndIgnoreInReleaseMacro(inputPtr);
itkAssertInDebugAndIgnoreInReleaseMacro(outputPtr != nullptr);
// Compute the output spacing, the output image size, and the
// output image start index
const typename TImageType::SpacingType & inputSpacing = inputPtr->GetSpacing();
const typename TImageType::SizeType & inputSize = inputPtr->GetLargestPossibleRegion().GetSize();
const typename TImageType::IndexType & inputStartIndex = inputPtr->GetLargestPossibleRegion().GetIndex();
const typename TImageType::PointType & inputOrigin = inputPtr->GetOrigin();
// ContinuousIndex<double,ImageDimension> inputIndexOutputOrigin;
typename TImageType::SpacingType outputSpacing(inputSpacing);
typename TImageType::SizeType outputSize;
typename TImageType::PointType outputOrigin;
typename TImageType::IndexType outputStartIndex;
// TODO Check if you want to modify metadata in this filter.
// Reduce Spacing, a frequency shrinker deletes high frequency domain.
// The spacing is taken into account by FrequencyIterators method GetFrequency().
for (unsigned int i = 0; i < TImageType::ImageDimension; i++)
{
outputSpacing[i] = inputSpacing[i] * m_ShrinkFactors[i];
// inputIndexOutputOrigin[i] = 0.5*(m_ShrinkFactors[i]-1);
// outputStartIndex[i] =
// Math::Ceil<SizeValueType>(inputStartIndex[i]/static_cast<double>( m_ShrinkFactors[i]) );
// outputSize[i] = Math::Floor<SizeValueType>(
// static_cast<double>( inputSize[i] -
// outputStartIndex[i]*m_ShrinkFactors[i]+inputStartIndex[i])
// / static_cast<double>(m_ShrinkFactors[i])
// );
outputStartIndex[i] = inputStartIndex[i];
outputSize[i] =
Math::Floor<SizeValueType>(static_cast<double>(inputSize[i]) / static_cast<double>(m_ShrinkFactors[i]));
if (outputSize[i] < 1)
{
itkExceptionMacro("InputImage is too small! An output pixel does not map to a whole input bin.");
}
}
// inputPtr->TransformContinuousIndexToPhysicalPoint(inputIndexOutputOrigin, outputOrigin);
outputOrigin = inputOrigin;
outputPtr->SetSpacing(outputSpacing);
outputPtr->SetOrigin(outputOrigin);
// Set region
typename TImageType::RegionType outputLargestPossibleRegion;
outputLargestPossibleRegion.SetSize(outputSize);
outputLargestPossibleRegion.SetIndex(outputStartIndex);
outputPtr->SetLargestPossibleRegion(outputLargestPossibleRegion);
}
template <class TImageType>
void
FrequencyShrinkImageFilter<TImageType>::PrintSelf(std::ostream & os, Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "Shrink Factor: ";
for (unsigned int j = 0; j < ImageDimension; j++)
{
os << m_ShrinkFactors[j] << " ";
}
os << std::endl;
os << "ApplyBandFilter: " << this->m_ApplyBandFilter << std::endl;
itkPrintSelfObjectMacro(FrequencyBandFilter);
}
} // end namespace itk
#endif
|
<reponame>April17/Mod5_Project
class UsersController < ApplicationController
skip_before_action :authorized, only: :create
def show
user = User.find_by(id: params[:id])
render json: user, include: "**"
end
def create
user = User.create(user_params)
if user.valid?
render json: { success: "Thank you for signup" }
else
render json: { errors: user.errors.full_messages }
end
end
def update
user = User.find_by(username: user_atm)
if user.update(user_params)
render json: user, include: "**"
else
render json: { errors: user.errors.full_messages }
end
end
def profile
user = User.find_by(username: user_atm)
render json: user, include: "**"
end
def destroy
user = User.find_by(username: user_atm)
if user.destroy
render json: { success: "Delete Successful"}
else
render json: { errors: "Delete Fail"}
end
end
end
private
def user_params
params.permit(:username, :name, :password, :password_confirmation)
end
|
#!/bin/sh
sudo apt-get update
sudo apt-get install tmux
echo "setw -g mouse on" >> ~/.tmux.conf
|
#!/usr/bin/env bash
# Copyright (c) 2014 The Theoscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test marking of spent outputs
# Create a transaction graph with four transactions,
# A/B/C/D
# C spends A
# D spends B and C
# Then simulate C being mutated, to create C'
# that is mined.
# A is still (correctly) considered spent.
# B should be treated as unspent
if [ $# -lt 1 ]; then
echo "Usage: $0 path_to_binaries"
echo "e.g. $0 ../../src"
echo "Env vars BITCOIND and BITCOINCLI may be used to specify the exact binaries used"
exit 1
fi
set -f
BITCOIND=${BITCOIND:-${1}/bitcoind}
CLI=${BITCOINCLI:-${1}/bitcoin-cli}
DIR="${BASH_SOURCE%/*}"
SENDANDWAIT="${DIR}/send.sh"
if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi
. "$DIR/util.sh"
D=$(mktemp -d test.XXXXX)
# Two nodes; one will play the part of merchant, the
# other an evil transaction-mutating miner.
D1=${D}/node1
CreateDataDir $D1 port=11000 rpcport=11001
B1ARGS="-datadir=$D1 -debug=mempool"
$BITCOIND $B1ARGS &
B1PID=$!
D2=${D}/node2
CreateDataDir $D2 port=11010 rpcport=11011
B2ARGS="-datadir=$D2 -debug=mempool"
$BITCOIND $B2ARGS &
B2PID=$!
# Wait until all four nodes are at the same block number
function WaitBlocks {
while :
do
sleep 1
declare -i BLOCKS1=$( GetBlocks $B1ARGS )
declare -i BLOCKS2=$( GetBlocks $B2ARGS )
if (( BLOCKS1 == BLOCKS2 ))
then
break
fi
done
}
# Wait until node has $N peers
function WaitPeers {
while :
do
declare -i PEERS=$( $CLI $1 getconnectioncount )
if (( PEERS == "$2" ))
then
break
fi
sleep 1
done
}
echo "Generating test blockchain..."
# Start with B2 connected to B1:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# 2 block, 50 XBT each == 100 XBT
# These will be transactions "A" and "B"
$CLI $B1ARGS setgenerate true 2
WaitBlocks
# 100 blocks, 0 mature == 0 XBT
$CLI $B2ARGS setgenerate true 100
WaitBlocks
CheckBalance "$B1ARGS" 100
CheckBalance "$B2ARGS" 0
# restart B2 with no connection
$CLI $B2ARGS stop > /dev/null 2>&1
wait $B2PID
$BITCOIND $B2ARGS &
B2PID=$!
B1ADDRESS=$( $CLI $B1ARGS getnewaddress )
B2ADDRESS=$( $CLI $B2ARGS getnewaddress )
# Transaction C: send-to-self, spend A
TXID_C=$( $CLI $B1ARGS sendtoaddress $B1ADDRESS 50.0)
# Transaction D: spends B and C
TXID_D=$( $CLI $B1ARGS sendtoaddress $B2ADDRESS 100.0)
CheckBalance "$B1ARGS" 0
# Mutate TXID_C and add it to B2's memory pool:
RAWTX_C=$( $CLI $B1ARGS getrawtransaction $TXID_C )
# ... mutate C to create C'
L=${RAWTX_C:82:2}
NEWLEN=$( printf "%x" $(( 16#$L + 1 )) )
MUTATEDTX_C=${RAWTX_C:0:82}${NEWLEN}4c${RAWTX_C:84}
# ... give mutated tx1 to B2:
MUTATEDTXID=$( $CLI $B2ARGS sendrawtransaction $MUTATEDTX_C )
echo "TXID_C: " $TXID_C
echo "Mutated: " $MUTATEDTXID
# Re-connect nodes, and have both nodes mine some blocks:
$CLI $B2ARGS addnode 127.0.0.1:11000 onetry
WaitPeers "$B1ARGS" 1
# Having B2 mine the next block puts the mutated
# transaction C in the chain:
$CLI $B2ARGS setgenerate true 1
WaitBlocks
# B1 should still be able to spend 100, because D is conflicted
# so does not count as a spend of B
CheckBalance "$B1ARGS" 100
$CLI $B2ARGS stop > /dev/null 2>&1
wait $B2PID
$CLI $B1ARGS stop > /dev/null 2>&1
wait $B1PID
echo "Tests successful, cleaning up"
rm -rf $D
exit 0
|
<filename>generators/app/templates/app/scripts/component/Layout/Layout.js
import _Base from '../_Base';
import './Layout.css';
export default _Base.extend({
regions: {
content: '[data-view=content]',
modal: '[data-view=modal]',
},
initialize() {},
setContent(View, callback) {
return this.r.content.show(View, () => {
const view = this.r.content.getViewDI(View);
if (typeof callback === 'function') {
return callback(view);
}
});
},
getModalLayout() {
return this.r.modal;
},
});
|
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Dynamics/Coupled_Evolution/FLUID_TO_SOLID_INTERPOLATION_BASE.cpp<gh_stars>10-100
//#####################################################################
// Copyright 2010.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class FLUID_TO_SOLID_INTERPOLATION_BASE
//#####################################################################
#include <PhysBAM_Tools/Arrays/CONSTANT_ARRAY.h>
#include <PhysBAM_Tools/Arrays_Computations/DOT_PRODUCT.h>
#include <PhysBAM_Tools/Arrays_Computations/INNER_PRODUCT.h>
#include <PhysBAM_Tools/Random_Numbers/RANDOM_NUMBERS.h>
#include <PhysBAM_Tools/Read_Write/Octave/OCTAVE_OUTPUT.h>
#include <PhysBAM_Geometry/Basic_Geometry/BASIC_SIMPLEX_POLICY.h>
#include <PhysBAM_Geometry/Basic_Geometry/POINT_SIMPLEX_1D.h>
#include <PhysBAM_Geometry/Basic_Geometry/SEGMENT_2D.h>
#include <PhysBAM_Geometry/Basic_Geometry/TRIANGLE_3D.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY_ID.h>
#include <PhysBAM_Geometry/Grids_Uniform_Collisions/GRID_BASED_COLLISION_GEOMETRY_UNIFORM.h>
#include <PhysBAM_Geometry/Solids_Geometry/RIGID_GEOMETRY.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/POINT_SIMPLICES_1D.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/SEGMENTED_CURVE_2D.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/TRIANGULATED_SURFACE.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Rigid_Bodies/RIGID_BODY.h>
#include <PhysBAM_Solids/PhysBAM_Solids/Solids_Evolution/GENERALIZED_VELOCITY.h>
#include <PhysBAM_Fluids/PhysBAM_Incompressible/Collisions_And_Interactions/DEFORMABLE_OBJECT_FLUID_COLLISIONS.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/FLUID_TO_SOLID_INTERPOLATION_BASE.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/UNIFORM_COLLISION_AWARE_ITERATOR_FACE_COUPLED.h>
#include <PhysBAM_Dynamics/Coupled_Evolution/UNIFORM_COLLISION_AWARE_ITERATOR_FACE_INFO.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
FLUID_TO_SOLID_INTERPOLATION_BASE(const COLLISION_AWARE_INDEX_MAP<TV>& map)
:index_map(map)
{
}
//#####################################################################
// Constructor
//#####################################################################
template<class TV> FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
~FLUID_TO_SOLID_INTERPOLATION_BASE()
{
}
//#####################################################################
// Function Times
//#####################################################################
template<class TV> void FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
Times(const VECTOR_ND<T>& fluid_velocity,GENERALIZED_VELOCITY<TV>& solid_velocity) const
{
solid_velocity*=(T)0;
Times_Add(fluid_velocity,solid_velocity);
}
//#####################################################################
// Function Transpose_Times
//#####################################################################
template<class TV> void FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
Transpose_Times(const GENERALIZED_VELOCITY<TV>& solid_force,VECTOR_ND<T>& fluid_force) const
{
// TODO: Careful to zero out enough of the solids state.
fluid_force.Fill(0);
Transpose_Times_Add(solid_force,fluid_force);
}
//#####################################################################
// Function Test_Matrix
//#####################################################################
template<class TV> void FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
Test_Matrix(int number_fluid_faces,int number_particles,int number_rigid_particles) const
{
RANDOM_NUMBERS<T> random;
ARRAY<TV> V(number_particles),V2(number_particles);
random.Fill_Uniform(V,-1,1);
ARRAY<TWIST<TV> > twist(number_rigid_particles),twist2(number_rigid_particles);
random.Fill_Uniform(twist,-1,1);
VECTOR_ND<T> U(number_fluid_faces),U2(number_fluid_faces);
random.Fill_Uniform(U,-1,1);
ARRAY<int> empty;
GENERALIZED_VELOCITY<TV> solids(V,empty,twist,empty,empty),solids2(V2,empty,twist2,empty,empty);
Times(U,solids2);
Transpose_Times(solids,U2);
CONSTANT_ARRAY<RIGID_BODY_MASS<TV,true> > rigid_mass(twist.m,RIGID_BODY_MASS<TV,true>(1,typename RIGID_BODY_POLICY<TV>::INERTIA_TENSOR()+1));
T inner_solids=ARRAYS_COMPUTATIONS::Dot_Product(V,V2)+ARRAYS_COMPUTATIONS::Inner_Product(rigid_mass,twist,twist2);
T inner_fluids=U.Dot_Product(U,U2);
std::stringstream ss;
ss<<"FLUID_TO_SOLID_INTERPOLATION_BASE Test: "<<inner_solids<<" vs "<<inner_fluids<<" relative "<<
abs(inner_solids-inner_fluids)/maxabs((T)1e-30,inner_solids,inner_fluids)<<std::endl;
LOG::filecout(ss.str());
}
//#####################################################################
// Function Store_Maps
//#####################################################################
template<class TV> void FLUID_TO_SOLID_INTERPOLATION_BASE<TV>::
Store_Maps(const GENERALIZED_VELOCITY<TV>& G)
{
V_size=G.V.array.Size();
V_indices=&G.V.indices;
}
//#####################################################################
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<float,1> >;
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<float,2> >;
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<float,3> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<double,1> >;
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<double,2> >;
template class FLUID_TO_SOLID_INTERPOLATION_BASE<VECTOR<double,3> >;
#endif
|
<reponame>a186r/0x-monorepo
export { CoverageSubprovider } from './coverage_subprovider';
|
#!/bin/zsh
print_info ">>> RUNNING ANSIBLE FOR ARCH LINUX..."
cd $DOTFILES_DIR/00-INSTALLER/01-ANSIBLE
ansible-playbook -i hosts osx/playbook.yml --become --ask-become-pass
|
#include <iostream>
using namespace std;
const int ROWS = 3;
const int COLS = 3;
// Function to print the transpose of a matrix
void printTranspose(int mat[ROWS][COLS])
{
for (int i = 0; i < ROWS; i++)
{
for (int j = 0; j < COLS; j++)
{
cout << mat[j][i] << " ";
}
cout << endl;
}
}
int main()
{
int mat[ROWS][COLS] = {
{1, 2, 3},
{4, 5, 6},
{7, 8, 9}
};
cout << "Transpose of the matrix: \n";
printTranspose(mat);
return 0;
} |
# Archlinux Archive date.
AA_YEAR='2015'
AA_MONTH='09'
AA_DAY='01'
AA_ROOT='http://seblu.net/a/archive'
|
public int findMax(int number1, int number2) {
if (number1 > number2) {
return number1;
} else {
return number2;
}
} |
<gh_stars>100-1000
import Config from '@/config';
import { Cls } from '@/typings/ci';
import CircleCICls from './CircleCI';
import GitHubActionsCls from './GitHubActions';
import TravisCICls from './TravisCI';
export const CircleCI = 'CircleCI';
export const GitHubActions = 'GitHubActions';
export const TravisCI = 'TravisCI';
export type CIs = CircleCICls | GitHubActionsCls | TravisCICls;
let ci: CIs | void;
interface Tests {
[label: string]: Cls;
}
interface CIConfig {
provider: string;
}
const tests: Tests = {
[CircleCI]: CircleCICls,
[GitHubActions]: GitHubActionsCls,
[TravisCI]: TravisCICls,
};
const normalizeConfiguredCI = (configuredCI?: string | CIConfig): CIConfig | void => {
if (configuredCI) {
return typeof configuredCI === 'string' ? { provider: configuredCI } : configuredCI;
}
return undefined;
};
const whichCI = (): CIs | void => {
if (ci) {
return ci;
}
const configuredCI = normalizeConfiguredCI(Config.get('configs.ci'));
const CI = configuredCI ? configuredCI.provider : Object.keys(tests).find((key: string): boolean => tests[key].is());
switch (CI) {
case CircleCI:
ci = new CircleCICls();
return ci;
case GitHubActions:
ci = new GitHubActionsCls();
return ci;
case TravisCI:
ci = new TravisCICls();
return ci;
default:
return undefined;
}
};
export default whichCI;
|
<gh_stars>0
// Node全局对象为global.最根本作用就是作为全局变量的宿主,
// 输出当前正在执行文件所在位置的绝对路径
//E:\work\Node\HttpRouter\src\let\let01.js
console.log(__filename);
// 当前执行脚本所在的目录
// E:\work\Node\HttpRouter\src\let
console.log(__dirname);
// process是全局变量,即global对象的属性,描述当前Nodejs进程的状态
// 获取执行的路径
// D:\Program Files\nodejs\node.exe
console.log(process.execPath);
// 运行程序所在的平台系统 'darwin', 'freebsd', 'linux', 'sunos' 或 'win32'
// 运行平台:win32; 进程号:13428; 进程名:C:\Windows\System32\cmd.exe - node let01.js
console.log("运行平台:"+ process.platform + "; 进程号:"+ process.pid + "; 进程名:"+ process.title);
|
<reponame>ziedamami/VegaMall
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { ProductResponse } from 'app/model/ProductResponse';
import { Product } from 'app/model/Product';
@Injectable({
providedIn: 'root'
})
export class ProductService {
constructor( private http:HttpClient) {}
GetAllCategory()
{
return this.http.get('http://localhost:8085/v1/api/provider/product/Findallcategory')
}
GetSubCategoryByCategory(IdCategory:Number)
{
return this.http.get('http://localhost:8085/v1/api/provider/product/category/GetSubCategoryByCategoryId/'+IdCategory);
}
GetAllBrand()
{
return this.http.get('http://localhost:8085/v1/provider/GetAllBrands')
}
AddNewProduct(product:Product,idcategorie : Number,brandId : Number,vatId : Number)
{
return this.http.post<ProductResponse>("http://localhost:8085/v1/provider/product/addproduct?idcategorie="+idcategorie+"&brandId="+brandId+"&vatId="+vatId,product).toPromise();
}
GetAllCharacteristicByCategory(IdCategory:Number)
{
return this.http.get('http://localhost:8085/v1/Category/getAllCharacteristic/'+IdCategory);
}
GetAllValuesByCharacteristic(IdChar:Number)
{
return this.http.get('http://localhost:8085/v1/Category/Characteristic/getAllValue/'+IdChar);
}
AddNewProductCharacteristic(idProduct : Number,CharId : Number,Value : String)
{
return this.http.post<ProductResponse>("http://localhost:8085/v1/provider/AddCharToProduct?idproduct="+idProduct+"&charcteristiqueid="+CharId+"&Value="+Value,null).toPromise();
}
GetAllVat()
{
return this.http.get('http://localhost:8085/v1/getAllVat');
}
}
|
grep -i '^tls_cert' /etc/ldap.conf | grep -v "#" | awk '{ print $2 }' | xargs chmod -R 644 |
<reponame>mohamedkhairy/dhis2-android-sdk
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.trackedentity;
import org.hisp.dhis.android.core.BaseRealIntegrationTest;
import org.hisp.dhis.android.core.D2;
import org.hisp.dhis.android.core.D2Factory;
import org.hisp.dhis.android.core.arch.call.factories.internal.QueryCallFactory;
import org.hisp.dhis.android.core.arch.db.stores.internal.IdentifiableObjectStore;
import org.hisp.dhis.android.core.arch.handlers.internal.Handler;
import org.hisp.dhis.android.core.arch.handlers.internal.ObjectWithoutUidHandlerImpl;
import org.hisp.dhis.android.core.category.CategoryCombo;
import org.hisp.dhis.android.core.category.CategoryComboTableInfo;
import org.hisp.dhis.android.core.common.Access;
import org.hisp.dhis.android.core.common.DataAccess;
import org.hisp.dhis.android.core.common.ObjectWithUid;
import org.hisp.dhis.android.core.maintenance.D2Error;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnit;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitProgramLink;
import org.hisp.dhis.android.core.organisationunit.internal.OrganisationUnitProgramLinkStore;
import org.hisp.dhis.android.core.organisationunit.internal.OrganisationUnitStore;
import org.hisp.dhis.android.core.program.Program;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttribute;
import org.hisp.dhis.android.core.program.internal.ProgramStore;
import org.hisp.dhis.android.core.program.internal.ProgramTrackedEntityAttributeStore;
import org.hisp.dhis.android.core.trackedentity.internal.TrackedEntityAttributeReservedValueQuery;
import org.hisp.dhis.android.core.trackedentity.internal.TrackedEntityAttributeReservedValueStore;
import org.hisp.dhis.android.core.trackedentity.internal.TrackedEntityAttributeReservedValueStoreInterface;
import org.hisp.dhis.android.core.trackedentity.internal.TrackedEntityAttributeStore;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import static com.google.common.truth.Truth.assertThat;
import static org.hisp.dhis.android.core.data.utils.FillPropertiesTestUtils.CREATED;
import static org.hisp.dhis.android.core.data.utils.FillPropertiesTestUtils.FUTURE_DATE;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TrackedEntityAttributeReservedValueManagerRealIntegrationShould extends BaseRealIntegrationTest {
private TrackedEntityAttributeReservedValueStoreInterface store;
private String organisationUnitUid = "org_unit_uid";
private String programUid = "program_uid";
private String categoryComboUid = "category_combo_uid";
private String ownerUid = "xs8A6tQJY0s";
private D2 d2;
private OrganisationUnit organisationUnit;
private String pattern;
@Mock
QueryCallFactory<TrackedEntityAttributeReservedValue,
TrackedEntityAttributeReservedValueQuery> trackedEntityAttributeReservedValueQueryCallFactory;
@Mock
Callable<List<TrackedEntityAttributeReservedValue>> trackedEntityAttributeReservedValueCall;
@Captor
private ArgumentCaptor<TrackedEntityAttributeReservedValueQuery> trackedEntityAttributeReservedValueQueryCaptor;
private TrackedEntityAttributeReservedValueManager manager;
@Before
public void setUp() throws IOException {
super.setUp();
MockitoAnnotations.initMocks(this);
d2 = D2Factory.forNewDatabase();
login();
store = TrackedEntityAttributeReservedValueStore.create(d2.databaseAdapter());
IdentifiableObjectStore<OrganisationUnit> organisationUnitStore =
OrganisationUnitStore.create(d2.databaseAdapter());
IdentifiableObjectStore<TrackedEntityAttribute> trackedEntityAttributeStore =
TrackedEntityAttributeStore.create(d2.databaseAdapter());
manager = d2.trackedEntityModule().reservedValueManager();
Handler<TrackedEntityAttributeReservedValue> handler = new ObjectWithoutUidHandlerImpl<>(store);
List<TrackedEntityAttributeReservedValue> trackedEntityAttributeReservedValues = new ArrayList<>();
TrackedEntityAttributeReservedValue.Builder reservedValueBuilder =
TrackedEntityAttributeReservedValue.builder()
.ownerObject("owner_obj")
.ownerUid(ownerUid)
.key("key")
.created(CREATED)
.expiryDate(FUTURE_DATE)
.organisationUnit(organisationUnitUid);
TrackedEntityAttributeReservedValue reservedValue1 = reservedValueBuilder.value("value1").build();
TrackedEntityAttributeReservedValue reservedValue2 = reservedValueBuilder.value("value2").build();
TrackedEntityAttributeReservedValue reservedValue3 = reservedValueBuilder.value("value3").build();
trackedEntityAttributeReservedValues.add(reservedValue1);
trackedEntityAttributeReservedValues.add(reservedValue2);
trackedEntityAttributeReservedValues.add(reservedValue3);
organisationUnit = OrganisationUnit.builder().uid(organisationUnitUid).code("org_unit_code").build();
organisationUnitStore.insert(organisationUnit);
pattern = "CURRENT_DATE(YYYYMM) + \"-\" + CURRENT_DATE(ww) + ORG_UNIT_CODE(...)";
trackedEntityAttributeStore.updateOrInsert(TrackedEntityAttribute.builder().uid(ownerUid).pattern(pattern).build());
CategoryCombo categoryCombo = CategoryCombo.builder().uid(categoryComboUid).build();
d2.databaseAdapter().insert(CategoryComboTableInfo.TABLE_INFO.name(), null, categoryCombo.toContentValues());
Program program = Program.builder().uid(programUid).categoryCombo(ObjectWithUid.create(categoryCombo.uid()))
.access(Access.create(null, null, DataAccess.create(true, true))).build();
ProgramStore.create(d2.databaseAdapter()).insert(program);
ProgramTrackedEntityAttribute programTrackedEntityAttribute =
ProgramTrackedEntityAttribute.builder()
.uid("ptea_uid")
.trackedEntityAttribute(ObjectWithUid.create(ownerUid))
.program(ObjectWithUid.create(programUid))
.build();
ProgramTrackedEntityAttributeStore.create(d2.databaseAdapter()).insert(programTrackedEntityAttribute);
OrganisationUnitProgramLink organisationUnitProgramLink =
OrganisationUnitProgramLink.builder().organisationUnit(organisationUnitUid).program(programUid).build();
OrganisationUnitProgramLinkStore.create(d2.databaseAdapter()).insert(organisationUnitProgramLink);
when(trackedEntityAttributeReservedValueQueryCallFactory.create(
any(TrackedEntityAttributeReservedValueQuery.class)))
.thenReturn(trackedEntityAttributeReservedValueCall);
handler.handleMany(trackedEntityAttributeReservedValues);
}
// @Test
public void get_one_reserved_value() throws D2Error {
assertThat(selectAll().size()).isEqualTo(3);
String value1 = d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(value1).isEqualTo("value1");
}
// @Test
public void get_more_than_one_reserved_value() throws D2Error {
String value1 = d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
String value2 = d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
String value3 = d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(value1).isEqualTo("value1");
assertThat(value2).isEqualTo("value2");
assertThat(value3).isEqualTo("value3");
}
// @Test
public void sync_reserved_values_for_one_tracked_entity_attribute() {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, 100);
assertThat(selectAll().size()).isEqualTo(100);
}
// @Test
public void sync_20_reserved_values_for_one_tracked_entity_attribute() {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, 20);
assertThat(selectAll().size()).isEqualTo(20);
}
// @Test
public void sync_100_reserved_values_when_not_number_of_values_to_reserve_is_specified() {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, null);
assertThat(selectAll().size()).isEqualTo(100);
}
// @Test
public void sync_pop_sync_again_and_have_99_reserved_values_when_not_number_of_values_to_reserve_is_specified()
throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, null);
assertThat(selectAll().size()).isEqualTo(100);
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(selectAll().size()).isEqualTo(99);
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, null);
assertThat(selectAll().size()).isEqualTo(99);
}
// @Test
public void fill_up_to_100_values_if_db_does_not_have_at_least_50_values_when_not_number_of_values_to_reserve_is_specified()
throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, 50);
assertThat(selectAll().size()).isEqualTo(50);
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, null);
assertThat(selectAll().size()).isEqualTo(50);
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(selectAll().size()).isEqualTo(49);
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, null);
assertThat(selectAll().size()).isEqualTo(100);
}
// @Test
public void sync_pop_sync_again_and_have_99_reserved_values_if_less_than_existing_values_are_requested()
throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, 100);
assertThat(selectAll().size()).isEqualTo(100);
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(selectAll().size()).isEqualTo(99);
d2.trackedEntityModule().reservedValueManager().blockingDownloadReservedValues(ownerUid, 20);
assertThat(selectAll().size()).isEqualTo(99);
}
// @Test
public void reserve_100_new_values_and_take_one() throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(selectAll().size()).isEqualTo(99);
}
// @Test
public void have_98_values_after_sync_and_take_two() throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, organisationUnitUid);
assertThat(selectAll().size()).isEqualTo(98);
}
// @Test
public void sync_all_tracked_entity_instances() throws Exception {
assertThat(selectAll().size()).isEqualTo(3);
d2.metadataModule().blockingDownload();
d2.trackedEntityModule().reservedValueManager().blockingDownloadAllReservedValues(null);
/* 100 Reserved values by default * 2 TEA with generated property true on server = 200 */
assertThat(selectAll().size()).isEqualTo(200);
}
// @Test
public void create_the_right_query_when_nothing_is_passed() {
manager.blockingDownloadAllReservedValues(null);
assertQueryIsCreatedRight(97);
}
// @Test
public void create_the_right_query_when_only_an_attribute_is_passed() {
manager.blockingDownloadReservedValues(ownerUid, null);
assertQueryIsCreatedRight(97);
}
// @Test
public void create_the_right_query_when_only_a_organisation_unit_is_passed() {
manager.blockingDownloadAllReservedValues(null);
assertQueryIsCreatedRight(97);
}
// @Test
public void create_the_right_query_when_an_attribute_and_a_organisation_unit_is_passed() {
manager.blockingDownloadReservedValues(ownerUid, null);
assertQueryIsCreatedRight(97);
}
// @Test
public void create_the_right_query_when_a_number_of_values_to_fill_up_is_passed() {
manager.blockingDownloadReservedValues(null, 20);
assertQueryIsCreatedRight(17);
}
// @Test
public void create_the_right_query_when_a_number_of_values_to_fill_up_and_an_attribute_is_passed() {
manager.blockingDownloadReservedValues(ownerUid, 20);
assertQueryIsCreatedRight(17);
}
// @Test
public void create_the_right_query_when_a_number_of_values_to_fill_up_and_a_organisation_unit_is_passed() {
manager.blockingDownloadAllReservedValues(20);
assertQueryIsCreatedRight(17);
}
// @Test
public void create_the_right_query_when_all_arguments_are_passed() {
manager.blockingDownloadReservedValues(ownerUid, 20);
assertQueryIsCreatedRight(17);
}
// @Test (expected = D2Error.class)
public void return_d2_call_exception_if_no_valid_org_unit() throws D2Error {
d2.trackedEntityModule().reservedValueManager().blockingGetValue(ownerUid, "not_stored_organisation_unit_uid");
}
private List<TrackedEntityAttributeReservedValue> selectAll() {
return store.selectAll();
}
private void login() {
try {
if (!d2.userModule().isLogged().blockingGet()) {
d2.userModule().logIn(username, password, url).blockingGet();
}
} catch (Exception ignored) {
}
}
/*
* This method stopped working because QueryCallFactory mock instance differs from Dagger's injected one,
* so the code is calling .create() on Dagger's instance and .verify() is trying to catch the call from Mockito's instace.
*/
private void assertQueryIsCreatedRight(Integer numberOfValuesExpected) {
verify(trackedEntityAttributeReservedValueQueryCallFactory).create(trackedEntityAttributeReservedValueQueryCaptor.capture());
TrackedEntityAttributeReservedValueQuery query = trackedEntityAttributeReservedValueQueryCaptor.getValue();
assertThat(query.organisationUnit().uid()).isEqualTo(organisationUnit.uid());
assertThat(query.numberToReserve()).isEqualTo(numberOfValuesExpected); // values expected - 3 that it had before.
assertThat(query.trackedEntityAttributePattern()).isEqualTo(pattern);
assertThat(query.trackedEntityAttributeUid()).isEqualTo(ownerUid);
}
}
|
/*
* Copyright 2014 VMware, Inc. All rights reserved. Licensed under the Apache v2 License.
*/
package govcloudair
import (
. "gopkg.in/check.v1"
)
func (s *S) Test_FindCatalogItem(c *C) {
// Get the Org populated
testServer.Response(200, nil, orgExample)
org, err := s.vdc.GetVDCOrg()
_ = testServer.WaitRequest()
testServer.Flush()
c.Assert(err, IsNil)
// Populate Catalog
testServer.Response(200, nil, catalogExample)
cat, err := org.FindCatalog("Public Catalog")
_ = testServer.WaitRequest()
testServer.Flush()
// Find Catalog Item
testServer.Response(200, nil, catalogitemExample)
catitem, err := cat.FindCatalogItem("CentOS64-32bit")
_ = testServer.WaitRequest()
testServer.Flush()
c.Assert(err, IsNil)
c.Assert(catitem.CatalogItem.HREF, Equals, "http://localhost:4444/api/catalogItem/1176e485-8858-4e15-94e5-ae4face605ae")
c.Assert(catitem.CatalogItem.Description, Equals, "id: cts-6.4-32bit")
// Test non-existant catalog item
catitem, err = cat.FindCatalogItem("INVALID")
c.Assert(err, NotNil)
}
var catalogExample = `
<?xml version="1.0" ?>
<Catalog href="http://localhost:4444/api/catalog/e8a20fdf-8a78-440c-ac71-0420db59f854" id="urn:vcloud:catalog:e8a20fdf-8a78-440c-ac71-0420db59f854" name="Public Catalog" type="application/vnd.vmware.vcloud.catalog+xml" xmlns="http://www.vmware.com/vcloud/v1.5" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.vmware.com/vcloud/v1.5 http://10.6.32.3/api/v1.5/schema/master.xsd">
<Link href="http://localhost:4444/api/catalog/e8a20fdf-8a78-440c-ac71-0420db59f854/metadata" rel="down" type="application/vnd.vmware.vcloud.metadata+xml"/>
<Description>vCHS service catalog</Description>
<CatalogItems>
<CatalogItem href="http://localhost:4444/api/catalogItem/013d1994-f009-4c40-ac48-517fe7d952a0" id="013d1994-f009-4c40-ac48-517fe7d952a0" name="W2K12-STD-64BIT" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/05384603-e07e-4f00-a95e-776b427f22d9" id="05384603-e07e-4f00-a95e-776b427f22d9" name="W2K12-STD-R2-SQL2K14-WEB" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/1176e485-8858-4e15-94e5-ae4face605ae" id="1176e485-8858-4e15-94e5-ae4face605ae" name="CentOS64-32bit" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/1a729040-71b6-412c-bda9-20b9085f9882" id="1a729040-71b6-412c-bda9-20b9085f9882" name="W2K8-STD-R2-64BIT-SQL2K8-STD-R2-SP2" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/222624b5-e62a-4f5b-a2af-b33a4664005e" id="222624b5-e62a-4f5b-a2af-b33a4664005e" name="W2K12-STD-64BIT-SQL2K12-STD-SP1" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/54cb2af1-4439-48fe-85b6-4c9524930ce6" id="54cb2af1-4439-48fe-85b6-4c9524930ce6" name="Ubuntu Server 12.04 LTS (amd64 20140619)" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/693f342b-d872-41d1-983b-fd5cc2c15f7c" id="693f342b-d872-41d1-983b-fd5cc2c15f7c" name="W2K8-STD-R2-64BIT" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/8d4edd11-393f-4cda-ace4-d5b8f1548928" id="8d4edd11-393f-4cda-ace4-d5b8f1548928" name="CentOS64-64bit" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/bfca201c-e8f3-49f8-a828-397e16fa6cfe" id="bfca201c-e8f3-49f8-a828-397e16fa6cfe" name="W2K12-STD-R2-64BIT" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/cb508cd9-664a-4fec-8eb1-ae5934aad6ad" id="cb508cd9-664a-4fec-8eb1-ae5934aad6ad" name="W2K12-STD-64BIT-SQL2K12-WEB-SP1" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/d0be59f3-ef80-4298-bd4c-f2258a3fec37" id="d0be59f3-ef80-4298-bd4c-f2258a3fec37" name="W2K8-STD-R2-64BIT-SQL2K8-WEB-R2-SP2" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/dbbf4633-64a3-4ac1-b9e0-7f923efa3f13" id="dbbf4633-64a3-4ac1-b9e0-7f923efa3f13" name="Ubuntu Server 12.04 LTS (i386 20140619)" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/ed996ae8-3081-4e16-a7b6-4bed1c462aa4" id="ed996ae8-3081-4e16-a7b6-4bed1c462aa4" name="CentOS63-64bit" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/f4dc0f92-74ae-413e-8e0f-25e6568a8195" id="f4dc0f92-74ae-413e-8e0f-25e6568a8195" name="W2K12-STD-R2-SQL2K14-STD" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
<CatalogItem href="http://localhost:4444/api/catalogItem/ff9c9b63-ca3b-4e39-ab72-7eb9049f8b05" id="ff9c9b63-ca3b-4e39-ab72-7eb9049f8b05" name="CentOS63-32bit" type="application/vnd.vmware.vcloud.catalogItem+xml"/>
</CatalogItems>
<IsPublished>true</IsPublished>
<DateCreated>2013-10-15T01:14:22.370Z</DateCreated>
<VersionNumber>60</VersionNumber>
</Catalog>
`
|
import { Column } from './../models/column.interface';
import { Formatter } from './../models/formatter.interface';
export const percentCompleteFormatter: Formatter = (row: number, cell: number, value: any, columnDef: Column, dataContext: any): string => {
if (value === null || value === '') {
return '-';
} else if (value < 50) {
return `<span style='color:red;font-weight:bold;'>${value}%</span>`;
} else {
return `<span style='color:green'>${value}%</span>`;
}
};
|
<gh_stars>1-10
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.0")
addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.2")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
|
from __future__ import unicode_literals
import functools
from ..py27compat import text_type
from ..util import properties
from ..backend import KeyringBackend
from ..errors import PasswordDeleteError, ExceptionRaisedContext
try:
# prefer pywin32-ctypes
from win32ctypes.pywin32 import pywintypes
from win32ctypes.pywin32 import win32cred
# force demand import to raise ImportError
win32cred.__name__
except ImportError:
# fallback to pywin32
try:
import pywintypes
import win32cred
except ImportError:
pass
def has_pywin32():
"""
Does this environment have pywin32?
Should return False even when Mercurial's Demand Import allowed import of
win32cred.
"""
with ExceptionRaisedContext() as exc:
win32cred.__name__
return not bool(exc)
class WinVaultKeyring(KeyringBackend):
"""
WinVaultKeyring stores encrypted passwords using the Windows Credential
Manager.
Requires pywin32
This backend does some gymnastics to simulate multi-user support,
which WinVault doesn't support natively. See
https://bitbucket.org/kang/python-keyring-lib/issue/47/winvaultkeyring-only-ever-returns-last#comment-731977
for details on the implementation, but here's the gist:
Passwords are stored under the service name unless there is a collision
(another password with the same service name but different user name),
in which case the previous password is moved into a compound name:
{username}@{service}
"""
@properties.ClassProperty
@classmethod
def priority(cls):
"""
If available, the preferred backend on Windows.
"""
if not has_pywin32():
raise RuntimeError("Requires Windows and pywin32")
return 5
@staticmethod
def _compound_name(username, service):
return '%(username)s@%(service)s' % vars()
def get_password(self, service, username):
# first attempt to get the password under the service name
res = self._get_password(service)
if not res or res['UserName'] != username:
# It wasn't found so attempt to get it with the compound name
res = self._get_password(self._compound_name(username, service))
if not res:
return None
blob = res['CredentialBlob']
return blob.decode('utf-16')
def _get_password(self, target):
try:
res = win32cred.CredRead(
Type=win32cred.CRED_TYPE_GENERIC,
TargetName=target,
)
except pywintypes.error as e:
e = OldPywinError.wrap(e)
if e.winerror == 1168 and e.funcname == 'CredRead': # not found
return None
raise
return res
def set_password(self, service, username, password):
existing_pw = self._get_password(service)
if existing_pw:
# resave the existing password using a compound target
existing_username = existing_pw['UserName']
target = self._compound_name(existing_username, service)
self._set_password(target, existing_username,
existing_pw['CredentialBlob'].decode('utf-16'))
self._set_password(service, username, text_type(password))
def _set_password(self, target, username, password):
credential = dict(Type=win32cred.CRED_TYPE_GENERIC,
TargetName=target,
UserName=username,
CredentialBlob=password,
Comment="Stored using python-keyring",
Persist=win32cred.CRED_PERSIST_ENTERPRISE)
win32cred.CredWrite(credential, 0)
def delete_password(self, service, username):
compound = self._compound_name(username, service)
deleted = False
for target in service, compound:
existing_pw = self._get_password(target)
if existing_pw and existing_pw['UserName'] == username:
deleted = True
self._delete_password(target)
if not deleted:
raise PasswordDeleteError(service)
def _delete_password(self, target):
win32cred.CredDelete(
Type=win32cred.CRED_TYPE_GENERIC,
TargetName=target,
)
class OldPywinError(object):
"""
A compatibility wrapper for old PyWin32 errors, such as reported in
https://bitbucket.org/kang/python-keyring-lib/issue/140/
"""
def __init__(self, orig):
self.orig = orig
@property
def funcname(self):
return self.orig[1]
@property
def winerror(self):
return self.orig[0]
@classmethod
def wrap(cls, orig_err):
attr_check = functools.partial(hasattr, orig_err)
is_old = not all(map(attr_check, ['funcname', 'winerror']))
return cls(orig_err) if is_old else orig_err
|
const test = require('ava');
const { default: a31 } = require('@mmstudio/an000031');
const { default: a } = require('../dist/index');
test('使用票根登陆', async (t) => {
const userid = 'taoqiufeng';
const token = '<PASSWORD>';
const ip = '127.0.0.1';
const ra31 = await a31(userid, 'taoqf001', token, ip);
t.is(ra31.userid, userid);
const user = await a(userid, token, ip);
t.is(user.userid, userid);
});
test('使用票根登陆,失败', async (t) => {
const userid = 'taoqiufeng';
const token = 'mm';
const ip = '127.0.0.1';
const user = await a(userid, token, ip);
t.is(user, null);
});
|
<reponame>foliveira/homebrew-cask<gh_stars>0
cask 'torbrowser' do
version '5.5.4'
sha256 '7dd8ba199d08b725e74ef5af33ecb2600e9abe33d859b2d975a102d74aed7a63'
url "https://dist.torproject.org/torbrowser/#{version}/TorBrowser-#{version}-osx64_en-US.dmg"
name 'Tor Browser'
homepage 'https://www.torproject.org/projects/torbrowser.html'
license :oss
gpg "#{url}.asc",
key_id: 'ef6e286dda85ea2a4ba7de684e2c6e8793298290'
app 'TorBrowser.app'
zap delete: [
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/org.mozilla.tor browser.sfl',
'~/Library/Preferences/org.mozilla.tor browser.plist',
]
end
|
import random
def generate_resource_change(resources):
updated_resources = {}
for resource, (current_value, max_value) in resources.items():
change = random.randint(-5, 5)
new_value = current_value + change
new_value = min(new_value, max_value)
new_value = max(new_value, 0)
updated_resources[resource] = (new_value, max_value)
return updated_resources |
<gh_stars>10-100
package codecheck.github.operations
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import org.json4s.JArray
import org.json4s.JObject
import org.json4s.JString
import codecheck.github.api.GitHubAPI
import codecheck.github.models.PullRequestReviewInput
import codecheck.github.models.PullRequestReview
trait PullRequestReviewOp {
self: GitHubAPI =>
def listPullRequestReviews(
owner: String,
repo: String,
number: Long
): Future[List[PullRequestReview]] = {
exec("GET", s"/repos/$owner/$repo/pulls/$number/reviews").map(
_.body match {
case JArray(arr) => arr.map(v => PullRequestReview(v))
case _ => throw new IllegalStateException()
}
)
}
def getPullRequestReview(owner: String, repo: String, number: Long, id: Long): Future[Option[PullRequestReview]] = {
val path = s"/repos/$owner/$repo/pulls/$number/reviews/$id"
exec("GET", path, fail404=false).map(res =>
res.statusCode match {
case 404 => None
case 200 => Some(PullRequestReview(res.body))
}
)
}
def createPullRequestReview(owner: String, repo: String, number: Long, input: PullRequestReviewInput): Future[PullRequestReview] = {
val path = s"/repos/$owner/$repo/pulls/$number/reviews"
exec("POST", path, input.value).map { result =>
PullRequestReview(result.body)
}
}
def dismissPullRequestReview(owner: String, repo: String, number: Long, id: Long, message: String): Future[PullRequestReview] = {
val path = s"/repos/$owner/$repo/pulls/$number/reviews/$id/dismissals"
exec("PUT", path, JObject(List(
"message" -> JString(message)
))).map { result =>
new PullRequestReview(result.body)
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.