text stringlengths 1 1.05M |
|---|
$ v run recursion.v
5040
|
package api
import (
"encoding/json"
"fmt"
"net/http"
"github.com/ManuStoessel/wirvsvirus/backend/entity"
"github.com/gorilla/mux"
log "github.com/sirupsen/logrus"
)
func getUser(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.User{}
data = data.Read(id)
if data != nil {
responseBody, err := json.Marshal(data)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"user": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal user data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Trace("User found.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find user.")
}
func updateUser(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.User{}
data = data.Read(id)
if data != nil {
userToBeUpdated := entity.User{}
err := json.NewDecoder(r.Body).Decode(&userToBeUpdated)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "could not parse body as user"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to unmarshal body as user.")
return
}
userToBeUpdated.ID = id
userToBeUpdated.Update()
responseBody, err := json.Marshal(userToBeUpdated)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"user": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal user data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Trace("User updated.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find user.")
}
func deleteUser(w http.ResponseWriter, r *http.Request) {
queries := mux.Vars(r)
w.Header().Set("Content-Type", "application/json")
if id, ok := queries["id"]; ok {
data := &entity.User{}
data = data.Read(id)
if data != nil {
data.Delete()
responseBody, err := json.Marshal(data)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"user": fmt.Sprintf("%+v", data),
}).Error("Unable to marshal user data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"id": id,
}).Debug("User deleted.")
return
}
}
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(`{"error": "not found"}`))
log.WithFields(log.Fields{
"queries": fmt.Sprintf("%+v", queries),
}).Error("Unable to find user.")
}
func createUser(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
err := r.ParseForm()
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "body not parsed"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to parse body.")
return
}
userToBeCreated := entity.User{}
err = json.NewDecoder(r.Body).Decode(&userToBeCreated)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"error": "could not parse body as user"}`))
log.WithFields(log.Fields{
"body": fmt.Sprintf("%+v", r.Body),
}).Error("Unable to unmarshal body as user.")
return
}
userToBeCreated.Create()
response, err := json.Marshal(userToBeCreated)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "could not marshal user"}`))
log.WithFields(log.Fields{
"user": fmt.Sprintf("%+v", userToBeCreated),
}).Error("Unable to marshal user as body.")
return
}
w.WriteHeader(http.StatusCreated)
w.Write(response)
log.WithFields(log.Fields{
"user": fmt.Sprintf("%+v", userToBeCreated),
}).Debug("User created.")
return
}
func listUsers(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
userList := UserList{}
user := entity.User{}
userList.Users = user.ListAll()
userList.Count = len(userList.Users)
responseBody, err := json.Marshal(userList)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(`{"error": "error marshalling data"}`))
log.WithFields(log.Fields{
"userlist": fmt.Sprintf("%+v", userList),
}).Error("Unable to marshal userlist data.")
return
}
w.WriteHeader(http.StatusOK)
w.Write(responseBody)
log.WithFields(log.Fields{
"listlength": fmt.Sprintf("%+v", userList.Count),
}).Trace("Userlist returned.")
return
}
|
import hashlib
import json
import six
from copy import copy
from datetime import datetime
from itertools import product
from logging import getLogger
from threading import Thread, Event
from time import time
from typing import List, Set, Union, Any, Sequence, Optional, Mapping, Callable
from .job import TrainsJob
from .parameters import Parameter
from ..backend_interface.util import get_or_create_project
from ..logger import Logger
from ..backend_api.services import workers as workers_service, tasks as tasks_service, events as events_service
from ..task import Task
logger = getLogger('clearml.automation.optimization')
class Objective(object):
"""
Optimization ``Objective`` class to maximize / minimize over all experiments. This class will sample a specific
scalar from all experiments, and maximize / minimize over single scalar (i.e., title and series combination).
``SearchStrategy`` and ``HyperParameterOptimizer`` use ``Objective`` in the strategy search algorithm.
"""
def __init__(self, title, series, order='max', extremum=False):
# type: (str, str, str, bool) -> ()
"""
Construct ``Objective`` object that will return the scalar value for a specific task ID.
:param str title: The scalar graph title to sample from.
:param str series: The scalar series title to sample from.
:param str order: The setting for maximizing or minimizing the objective scalar value.
The values are:
- ``max``
- ``min``
:param bool extremum: Return the global minimum / maximum reported metric value
The values are:
- ``True`` - Return the global minimum / maximum reported metric value.
- ``False`` - Return the last value reported for a specific Task. (Default)
"""
self.title = title
self.series = series
assert order in ('min', 'max',)
# normalize value so we always look for the highest objective value
self.sign = -1 if (isinstance(order, str) and order.lower().strip() == 'min') else +1
self._metric = None
self.extremum = extremum
def get_objective(self, task_id):
# type: (Union[str, Task, TrainsJob]) -> Optional[float]
"""
Return a specific task scalar value based on the objective settings (title/series).
:param str task_id: The Task id to retrieve scalar from (or ``TrainsJob`` object).
:return: The scalar value.
"""
# create self._metric
self._get_last_metrics_encode_field()
if isinstance(task_id, Task):
task_id = task_id.id
elif isinstance(task_id, TrainsJob):
task_id = task_id.task_id()
# noinspection PyBroadException, Py
try:
# noinspection PyProtectedMember
task = Task._query_tasks(
task_ids=[task_id], only_fields=['last_metrics.{}.{}'.format(self._metric[0], self._metric[1])])[0]
except Exception:
return None
metrics = task.last_metrics
if not metrics:
return None
# noinspection PyBroadException
try:
values = metrics[self._metric[0]][self._metric[1]]
if not self.extremum:
return values['value']
return values['min_value'] if self.sign < 0 else values['max_value']
except Exception:
return None
def get_current_raw_objective(self, task):
# type: (Union[TrainsJob, Task]) -> (int, float)
"""
Return the current raw value (without sign normalization) of the objective.
:param str task: The Task or Job to retrieve scalar from (or ``TrainsJob`` object).
:return: Tuple(iteration, value) if, and only if, the metric exists. None if the metric does not exist.
"""
if isinstance(task, Task):
task_id = task.id
elif isinstance(task, TrainsJob):
task_id = task.task_id()
else:
task_id = task
if not task_id:
raise ValueError("Task ID not provided")
# send request
# noinspection PyBroadException
try:
# noinspection PyProtectedMember
res = Task._get_default_session().send(
events_service.ScalarMetricsIterHistogramRequest(
task=task_id, key='iter', samples=None),
)
except Exception:
res = None
if not res:
return None
response = res.wait()
if not response.ok() or not response.response_data:
return None
scalars = response.response_data
# noinspection PyBroadException
try:
return scalars[self.title][self.series]['x'][-1], scalars[self.title][self.series]['y'][-1]
except Exception:
return None
def get_objective_sign(self):
# type: () -> float
"""
Return the sign of the objective.
- ``+1`` - If maximizing
- ``-1`` - If minimizing
:return: Objective function sign.
"""
return self.sign
def get_objective_metric(self):
# type: () -> (str, str)
"""
Return the metric title, series pair of the objective.
:return: (title, series)
"""
return self.title, self.series
def get_normalized_objective(self, task_id):
# type: (Union[str, Task, TrainsJob]) -> Optional[float]
"""
Return a normalized task scalar value based on the objective settings (title/series).
I.e. objective is always to maximize the returned value
:param str task_id: The Task id to retrieve scalar from.
:return: Normalized scalar value.
"""
objective = self.get_objective(task_id=task_id)
if objective is None:
return None
# normalize value so we always look for the highest objective value
return self.sign * objective
def get_top_tasks(self, top_k, optimizer_task_id=None):
# type: (int, Optional[str]) -> Sequence[Task]
"""
Return a list of Tasks of the top performing experiments, based on the title/series objective.
:param int top_k: The number of Tasks (experiments) to return.
:param str optimizer_task_id: Parent optimizer Task ID
:return: A list of Task objects, ordered by performance, where index 0 is the best performing Task.
"""
task_filter = {'page_size': int(top_k), 'page': 0}
if optimizer_task_id:
task_filter['parent'] = optimizer_task_id
order_by = self._get_last_metrics_encode_field()
if order_by and (order_by.startswith('last_metrics') or order_by.startswith('-last_metrics')):
parts = order_by.split('.')
if parts[-1] in ('min', 'max', 'last'):
title = hashlib.md5(str(parts[1]).encode('utf-8')).hexdigest()
series = hashlib.md5(str(parts[2]).encode('utf-8')).hexdigest()
minmax = 'min_value' if 'min' in parts[3] else ('max_value' if 'max' in parts[3] else 'value')
order_by = '{}last_metrics.'.join(
('-' if order_by and order_by[0] == '-' else '', title, series, minmax))
if order_by:
task_filter['order_by'] = [order_by]
return Task.get_tasks(task_filter=task_filter)
def _get_last_metrics_encode_field(self):
# type: () -> str
"""
Return encoded representation of the title/series metric.
:return: The objective title/series.
"""
if not self._metric:
title = hashlib.md5(str(self.title).encode('utf-8')).hexdigest()
series = hashlib.md5(str(self.series).encode('utf-8')).hexdigest()
self._metric = title, series
return '{}last_metrics.{}.{}.{}'.format(
'-' if self.sign > 0 else '', self._metric[0], self._metric[1],
('min_value' if self.sign < 0 else 'max_value') if self.extremum else 'value')
class Budget(object):
class Field(object):
def __init__(self, limit=None):
# type: (Optional[float]) -> ()
self.limit = limit
self.current = {}
def update(self, uid, value):
# type: (Union[str, int], float) -> ()
if value is not None:
try:
self.current[uid] = float(value)
except (TypeError, ValueError):
pass
@property
def used(self):
# type: () -> (Optional[float])
if self.limit is None or not self.current:
return None
return sum(self.current.values())/float(self.limit)
def __init__(self, jobs_limit, iterations_limit, compute_time_limit):
# type: (Optional[int], Optional[int], Optional[float]) -> ()
self.jobs = self.Field(jobs_limit)
self.iterations = self.Field(iterations_limit)
self.compute_time = self.Field(compute_time_limit)
def to_dict(self):
# type: () -> (Mapping[str, Mapping[str, float]])
# returned dict is Mapping[Union['jobs', 'iterations', 'compute_time'], Mapping[Union['limit', 'used'], float]]
current_budget = {}
jobs = self.jobs.used
current_budget['jobs'] = {'limit': self.jobs.limit, 'used': jobs if jobs else 0}
iterations = self.iterations.used
current_budget['iterations'] = {'limit': self.iterations.limit, 'used': iterations if iterations else 0}
compute_time = self.compute_time.used
current_budget['compute_time'] = {'limit': self.compute_time.limit, 'used': compute_time if compute_time else 0}
return current_budget
class SearchStrategy(object):
"""
The base search strategy class. Inherit this class to implement your custom strategy.
"""
_tag = 'optimization'
_job_class = TrainsJob # type: TrainsJob
def __init__(
self,
base_task_id, # type: str
hyper_parameters, # type: Sequence[Parameter]
objective_metric, # type: Objective
execution_queue, # type: str
num_concurrent_workers, # type: int
pool_period_min=2., # type: float
time_limit_per_job=None, # type: Optional[float]
compute_time_limit=None, # type: Optional[float]
min_iteration_per_job=None, # type: Optional[int]
max_iteration_per_job=None, # type: Optional[int]
total_max_jobs=None, # type: Optional[int]
**_ # type: Any
):
# type: (...) -> ()
"""
Initialize a search strategy optimizer.
:param str base_task_id: The Task ID (str)
:param list hyper_parameters: The list of parameter objects to optimize over.
:param Objective objective_metric: The Objective metric to maximize / minimize.
:param str execution_queue: The execution queue to use for launching Tasks (experiments).
:param int num_concurrent_workers: The maximum number of concurrent running machines.
:param float pool_period_min: The time between two consecutive pools (minutes).
:param float time_limit_per_job: The maximum execution time per single job in minutes. When time limit is
exceeded, the job is aborted. (Optional)
:param float compute_time_limit: The maximum compute time in minutes. When time limit is exceeded,
all jobs aborted. (Optional)
:param int min_iteration_per_job: The minimum iterations (of the Objective metric) per single job (Optional)
:param int max_iteration_per_job: The maximum iterations (of the Objective metric) per single job.
When maximum iterations is exceeded, the job is aborted. (Optional)
:param int total_max_jobs: The total maximum jobs for the optimization process. The default value is ``None``,
for unlimited.
"""
super(SearchStrategy, self).__init__()
self._base_task_id = base_task_id
self._hyper_parameters = hyper_parameters
self._objective_metric = objective_metric
self._execution_queue = execution_queue
self._num_concurrent_workers = num_concurrent_workers
self.pool_period_minutes = pool_period_min
self.time_limit_per_job = time_limit_per_job
self.compute_time_limit = compute_time_limit
self.max_iteration_per_job = max_iteration_per_job
self.min_iteration_per_job = min_iteration_per_job
self.total_max_jobs = total_max_jobs
self._stop_event = Event()
self._current_jobs = []
self._pending_jobs = []
self._num_jobs = 0
self._job_parent_id = None
self._job_project_id = None
self._created_jobs_ids = {}
self._naming_function = None
self._job_project = {}
self.budget = Budget(
jobs_limit=self.total_max_jobs,
compute_time_limit=self.compute_time_limit if self.compute_time_limit else None,
iterations_limit=self.total_max_jobs * self.max_iteration_per_job if
self.max_iteration_per_job and self.total_max_jobs else None
)
self._validate_base_task()
self._optimizer_task = None
def start(self):
# type: () -> ()
"""
Start the Optimizer controller function loop(). If the calling process is stopped, the controller will stop
as well.
.. important::
This function returns only after the optimization is completed or :meth:`stop` was called.
"""
counter = 0
while True:
logger.debug('optimization loop #{}'.format(counter))
if not self.process_step():
break
if self._stop_event.wait(timeout=self.pool_period_minutes * 60.):
break
counter += 1
def stop(self):
# type: () -> ()
"""
Stop the current running optimization loop. Called from a different thread than the :meth:`start`.
"""
self._stop_event.set()
def process_step(self):
# type: () -> bool
"""
Abstract helper function. Implementation is not required. Default use in start default implementation
Main optimization loop, called from the daemon thread created by :meth:`start`.
- Call monitor job on every ``TrainsJob`` in jobs:
- Check the performance or elapsed time, and then decide whether to kill the jobs.
- Call create_job:
- Check if spare job slots exist, and if they do call create a new job based on previous tested experiments.
:return: True, if continue the optimization. False, if immediately stop.
"""
updated_jobs = []
for job in self._current_jobs:
if self.monitor_job(job):
updated_jobs.append(job)
self._current_jobs = updated_jobs
pending_jobs = []
for job in self._pending_jobs:
if job.is_pending():
pending_jobs.append(job)
else:
self.budget.jobs.update(job.task_id(), 1)
self._pending_jobs = pending_jobs
free_workers = self._num_concurrent_workers - len(self._current_jobs)
# do not create more jobs if we hit the limit
if self.total_max_jobs and self._num_jobs >= self.total_max_jobs:
return bool(self._current_jobs)
# see how many free slots we have and create job
for i in range(max(0, free_workers)):
new_job = self.create_job()
if not new_job:
break
self._num_jobs += 1
new_job.launch(self._execution_queue)
self._current_jobs.append(new_job)
self._pending_jobs.append(new_job)
return bool(self._current_jobs)
def create_job(self):
# type: () -> Optional[TrainsJob]
"""
Abstract helper function. Implementation is not required. Default use in process_step default implementation
Create a new job if needed. return the newly created job. If no job needs to be created, return ``None``.
:return: A Newly created TrainsJob object, or None if no TrainsJob created.
"""
return None
def monitor_job(self, job):
# type: (TrainsJob) -> bool
"""
Helper function, Implementation is not required. Default use in process_step default implementation.
Check if the job needs to be aborted or already completed.
If returns ``False``, the job was aborted / completed, and should be taken off the current job list
If there is a budget limitation, this call should update
``self.budget.compute_time.update`` / ``self.budget.iterations.update``
:param TrainsJob job: A ``TrainsJob`` object to monitor.
:return: False, if the job is no longer relevant.
"""
abort_job = self.update_budget_per_job(job)
if abort_job:
job.abort()
return False
return not job.is_stopped()
def update_budget_per_job(self, job):
abort_job = False
if self.time_limit_per_job:
elapsed = job.elapsed() / 60.
if elapsed > 0:
self.budget.compute_time.update(job.task_id(), elapsed)
if elapsed > self.time_limit_per_job:
abort_job = True
if self.compute_time_limit:
if not self.time_limit_per_job:
elapsed = job.elapsed() / 60.
if elapsed > 0:
self.budget.compute_time.update(job.task_id(), elapsed)
if self.max_iteration_per_job:
iterations = self._get_job_iterations(job)
if iterations > 0:
self.budget.iterations.update(job.task_id(), iterations)
if iterations > self.max_iteration_per_job:
abort_job = True
return abort_job
def get_running_jobs(self):
# type: () -> Sequence[TrainsJob]
"""
Return the current running TrainsJobs.
:return: List of TrainsJob objects.
"""
return self._current_jobs
def get_created_jobs_ids(self):
# type: () -> Mapping[str, dict]
"""
Return a Task IDs dict created by this optimizer until now, including completed and running jobs.
The values of the returned dict are the parameters used in the specific job
:return: dict of task IDs (str) as keys, and their parameters dict as values.
"""
return {job_id: job_val[1] for job_id, job_val in self._created_jobs_ids.items()}
def get_created_jobs_tasks(self):
# type: () -> Mapping[str, dict]
"""
Return a Task IDs dict created by this optimizer until now.
The values of the returned dict are the TrainsJob.
:return: dict of task IDs (str) as keys, and their TrainsJob as values.
"""
return {job_id: job_val[0] for job_id, job_val in self._created_jobs_ids.items()}
def get_top_experiments(self, top_k):
# type: (int) -> Sequence[Task]
"""
Return a list of Tasks of the top performing experiments, based on the controller ``Objective`` object.
:param int top_k: The number of Tasks (experiments) to return.
:return: A list of Task objects, ordered by performance, where index 0 is the best performing Task.
"""
# noinspection PyProtectedMember
top_tasks = self._get_child_tasks(
parent_task_id=self._job_parent_id or self._base_task_id,
order_by=self._objective_metric._get_last_metrics_encode_field(),
additional_filters={'page_size': int(top_k), 'page': 0})
return top_tasks
def get_objective_metric(self):
# type: () -> (str, str)
"""
Return the metric title, series pair of the objective.
:return: (title, series)
"""
return self._objective_metric.get_objective_metric()
def helper_create_job(
self,
base_task_id, # type: str
parameter_override=None, # type: Optional[Mapping[str, str]]
task_overrides=None, # type: Optional[Mapping[str, str]]
tags=None, # type: Optional[Sequence[str]]
parent=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> TrainsJob
"""
Create a Job using the specified arguments, ``TrainsJob`` for details.
:return: A newly created Job instance.
"""
if parameter_override:
param_str = ['{}={}'.format(k, parameter_override[k]) for k in sorted(parameter_override.keys())]
if self._naming_function:
name = self._naming_function(self._base_task_name, parameter_override)
elif self._naming_function is False:
name = None
else:
name = '{}: {}'.format(self._base_task_name, ' '.join(param_str))
comment = '\n'.join(param_str)
else:
name = None
comment = None
tags = (tags or []) + [self._tag, 'opt' + (': {}'.format(self._job_parent_id) if self._job_parent_id else '')]
new_job = self._job_class(
base_task_id=base_task_id, parameter_override=parameter_override,
task_overrides=task_overrides, tags=tags, parent=parent or self._job_parent_id,
name=name, comment=comment,
project=self._job_project_id or self._get_task_project(parent or self._job_parent_id),
**kwargs)
self._created_jobs_ids[new_job.task_id()] = (new_job, parameter_override)
logger.info('Creating new Task: {}'.format(parameter_override))
return new_job
def set_job_class(self, job_class):
# type: (TrainsJob) -> ()
"""
Set the class to use for the :meth:`helper_create_job` function.
:param TrainsJob job_class: The Job Class type.
"""
self._job_class = job_class
def set_job_default_parent(self, job_parent_task_id, project_name=None):
# type: (Optional[str], Optional[str]) -> ()
"""
Set the default parent for all Jobs created by the :meth:`helper_create_job` method.
:param str job_parent_task_id: The parent Task ID.
:param str project_name: If specified, create the jobs in the specified project
"""
self._job_parent_id = job_parent_task_id
# noinspection PyProtectedMember
self._job_project_id = get_or_create_project(
session=Task._get_default_session(), project_name=project_name, description='HPO process spawned Tasks') \
if project_name else None
def set_job_naming_scheme(self, naming_function):
# type: (Optional[Callable[[str, dict], str]]) -> ()
"""
Set the function used to name a newly created job.
:param callable naming_function:
.. code-block:: py
naming_functor(base_task_name, argument_dict) -> str
"""
self._naming_function = naming_function
def set_optimizer_task(self, task):
# type: (Task) -> ()
"""
Set the optimizer task object to be used to store/generate reports on the optimization process.
Usually this is the current task of this process.
:param Task task: The optimizer`s current Task.
"""
self._optimizer_task = task
def _validate_base_task(self):
# type: () -> ()
"""
Check the base task exists and contains the requested Objective metric and hyper parameters.
"""
# check if the task exists
try:
task = Task.get_task(task_id=self._base_task_id)
self._base_task_name = task.name
except ValueError:
raise ValueError("Could not find base task id {}".format(self._base_task_id))
# check if the hyper-parameters exist:
task_parameters = task.get_parameters(backwards_compatibility=False)
missing_params = [h.name for h in self._hyper_parameters if h.name not in task_parameters]
if missing_params:
logger.warning('Could not find requested hyper-parameters {} on base task {}'.format(
missing_params, self._base_task_id))
# check if the objective metric exists (i.e. no typos etc)
if self._objective_metric.get_objective(self._base_task_id) is None:
logger.warning('Could not find requested metric {} report on base task {}'.format(
self._objective_metric.get_objective_metric(), self._base_task_id))
def _get_task_project(self, parent_task_id):
# type: (str) -> (Optional[str])
if not parent_task_id:
return
if parent_task_id not in self._job_project:
task = Task.get_task(task_id=parent_task_id)
self._job_project[parent_task_id] = task.project
return self._job_project.get(parent_task_id)
def _get_job_iterations(self, job):
# type: (Union[TrainsJob, Task]) -> int
iteration_value = self._objective_metric.get_current_raw_objective(job)
return iteration_value[0] if iteration_value else -1
@classmethod
def _get_child_tasks_ids(
cls,
parent_task_id, # type: str
status=None, # type: Optional[Union[Task.TaskStatusEnum], Sequence[Task.TaskStatusEnum]]
order_by=None, # type: Optional[str]
additional_filters=None # type: Optional[dict]
):
# type: (...) -> (Sequence[str])
"""
Helper function. Return a list of tasks is tagged automl, with specific ``status``, ordered by ``sort_field``.
:param str parent_task_id: The base Task ID (parent).
:param status: The current status of requested tasks (for example, ``in_progress`` and ``completed``).
:param str order_by: The field name to sort results.
Examples:
.. code-block:: py
"-last_metrics.title.series.min"
"last_metrics.title.series.max"
"last_metrics.title.series.last"
"execution.parameters.name"
"updated"
:param dict additional_filters: The additional task filters.
:return: A list of Task IDs (str)
"""
task_filter = {
'parent': parent_task_id,
# 'tags': [cls._tag],
# since we have auto archive we do not want to filter out archived tasks
# 'system_tags': ['-archived'],
}
task_filter.update(additional_filters or {})
if status:
task_filter['status'] = status if isinstance(status, (tuple, list)) else [status]
if order_by and (order_by.startswith('last_metrics') or order_by.startswith('-last_metrics')):
parts = order_by.split('.')
if parts[-1] in ('min', 'max', 'last'):
title = hashlib.md5(str(parts[1]).encode('utf-8')).hexdigest()
series = hashlib.md5(str(parts[2]).encode('utf-8')).hexdigest()
minmax = 'min_value' if 'min' in parts[3] else ('max_value' if 'max' in parts[3] else 'value')
order_by = '{}last_metrics.'.join(
('-' if order_by and order_by[0] == '-' else '', title, series, minmax))
if order_by:
task_filter['order_by'] = [order_by]
# noinspection PyProtectedMember
task_objects = Task._query_tasks(**task_filter)
return [t.id for t in task_objects]
@classmethod
def _get_child_tasks(
cls,
parent_task_id, # type: str
status=None, # type: Optional[Union[Task.TaskStatusEnum], Sequence[Task.TaskStatusEnum]]
order_by=None, # type: Optional[str]
additional_filters=None # type: Optional[dict]
):
# type: (...) -> (Sequence[Task])
"""
Helper function. Return a list of tasks tagged automl, with specific ``status``, ordered by ``sort_field``.
:param str parent_task_id: The base Task ID (parent).
:param status: The current status of requested tasks (for example, ``in_progress`` and ``completed``).
:param str order_by: The field name to sort results.
Examples:
.. code-block:: py
"-last_metrics.title.series.min"
"last_metrics.title.series.max"
"last_metrics.title.series.last"
"execution.parameters.name"
"updated"
:param dict additional_filters: The additional task filters.
:return: A list of Task objects
"""
return [
Task.get_task(task_id=t_id) for t_id in cls._get_child_tasks_ids(
parent_task_id=parent_task_id,
status=status,
order_by=order_by,
additional_filters=additional_filters)
]
class GridSearch(SearchStrategy):
"""
Grid search strategy controller. Full grid sampling of every hyper-parameter combination.
"""
def __init__(
self,
base_task_id, # type: str
hyper_parameters, # type: Sequence[Parameter]
objective_metric, # type: Objective
execution_queue, # type: str
num_concurrent_workers, # type: int
pool_period_min=2., # type: float
time_limit_per_job=None, # type: Optional[float]
compute_time_limit=None, # type: Optional[float]
max_iteration_per_job=None, # type: Optional[int]
total_max_jobs=None, # type: Optional[int]
**_ # type: Any
):
# type: (...) -> ()
"""
Initialize a grid search optimizer
:param str base_task_id: The Task ID.
:param list hyper_parameters: The list of parameter objects to optimize over.
:param Objective objective_metric: The Objective metric to maximize / minimize.
:param str execution_queue: The execution queue to use for launching Tasks (experiments).
:param int num_concurrent_workers: The maximum number of concurrent running machines.
:param float pool_period_min: The time between two consecutive pools (minutes).
:param float time_limit_per_job: The maximum execution time per single job in minutes. When the time limit is
exceeded job is aborted. (Optional)
:param float compute_time_limit: The maximum compute time in minutes. When time limit is exceeded,
all jobs aborted. (Optional)
:param int max_iteration_per_job: The maximum iterations (of the Objective metric)
per single job, When exceeded, the job is aborted.
:param int total_max_jobs: The total maximum jobs for the optimization process. The default is ``None``, for
unlimited.
"""
super(GridSearch, self).__init__(
base_task_id=base_task_id, hyper_parameters=hyper_parameters, objective_metric=objective_metric,
execution_queue=execution_queue, num_concurrent_workers=num_concurrent_workers,
pool_period_min=pool_period_min, time_limit_per_job=time_limit_per_job,
compute_time_limit=compute_time_limit, max_iteration_per_job=max_iteration_per_job,
total_max_jobs=total_max_jobs, **_)
self._param_iterator = None
def create_job(self):
# type: () -> Optional[TrainsJob]
"""
Create a new job if needed. Return the newly created job. If no job needs to be created, return ``None``.
:return: A newly created TrainsJob object, or None if no TrainsJob is created.
"""
try:
parameters = self._next_configuration()
except StopIteration:
return None
return self.helper_create_job(base_task_id=self._base_task_id, parameter_override=parameters)
def _next_configuration(self):
# type: () -> Mapping[str, str]
def param_iterator_fn():
hyper_params_values = [p.to_list() for p in self._hyper_parameters]
for state in product(*hyper_params_values):
yield dict(kv for d in state for kv in d.items())
if not self._param_iterator:
self._param_iterator = param_iterator_fn()
return next(self._param_iterator)
class RandomSearch(SearchStrategy):
"""
Random search strategy controller. Random uniform sampling of hyper-parameters.
"""
# Number of already chosen random samples before assuming we covered the entire hyper-parameter space
_hp_space_cover_samples = 42
def __init__(
self,
base_task_id, # type: str
hyper_parameters, # type: Sequence[Parameter]
objective_metric, # type: Objective
execution_queue, # type: str
num_concurrent_workers, # type: int
pool_period_min=2., # type: float
time_limit_per_job=None, # type: Optional[float]
compute_time_limit=None, # type: Optional[float]
max_iteration_per_job=None, # type: Optional[int]
total_max_jobs=None, # type: Optional[int]
**_ # type: Any
):
# type: (...) -> ()
"""
Initialize a random search optimizer.
:param str base_task_id: The Task ID.
:param list hyper_parameters: The list of Parameter objects to optimize over.
:param Objective objective_metric: The Objective metric to maximize / minimize.
:param str execution_queue: The execution queue to use for launching Tasks (experiments).
:param int num_concurrent_workers: The maximum umber of concurrent running machines.
:param float pool_period_min: The time between two consecutive pools (minutes).
:param float time_limit_per_job: The maximum execution time per single job in minutes,
when time limit is exceeded job is aborted. (Optional)
:param float compute_time_limit: The maximum compute time in minutes. When time limit is exceeded,
all jobs aborted. (Optional)
:param int max_iteration_per_job: The maximum iterations (of the Objective metric)
per single job. When exceeded, the job is aborted.
:param int total_max_jobs: The total maximum jobs for the optimization process. The default is ``None``, for
unlimited.
"""
super(RandomSearch, self).__init__(
base_task_id=base_task_id, hyper_parameters=hyper_parameters, objective_metric=objective_metric,
execution_queue=execution_queue, num_concurrent_workers=num_concurrent_workers,
pool_period_min=pool_period_min, time_limit_per_job=time_limit_per_job,
compute_time_limit=compute_time_limit, max_iteration_per_job=max_iteration_per_job,
total_max_jobs=total_max_jobs, **_)
self._hyper_parameters_collection = set()
def create_job(self):
# type: () -> Optional[TrainsJob]
"""
Create a new job if needed. Return the newly created job. If no job needs to be created, return ``None``.
:return: A newly created TrainsJob object, or None if no TrainsJob created
"""
parameters = None
# maximum tries to ge a random set that is not already in the collection
for i in range(self._hp_space_cover_samples):
parameters = {}
for p in self._hyper_parameters:
parameters.update(p.get_value())
# hash the parameters dictionary
param_hash = hash(json.dumps(parameters, sort_keys=True))
# if this is a new set of parameters, use it.
if param_hash not in self._hyper_parameters_collection:
self._hyper_parameters_collection.add(param_hash)
break
# try again
parameters = None
# if we failed to find a random set of parameters, assume we selected all of them
if not parameters:
return None
return self.helper_create_job(base_task_id=self._base_task_id, parameter_override=parameters)
class HyperParameterOptimizer(object):
"""
Hyper-parameter search controller. Clones the base experiment, changes arguments and tries to maximize/minimize
the defined objective.
"""
_tag = 'optimization'
def __init__(
self,
base_task_id, # type: str
hyper_parameters, # type: Sequence[Parameter]
objective_metric_title, # type: str
objective_metric_series, # type: str
objective_metric_sign='min', # type: str
optimizer_class=RandomSearch, # type: type(SearchStrategy)
max_number_of_concurrent_tasks=10, # type: int
execution_queue='default', # type: str
optimization_time_limit=None, # type: Optional[float]
compute_time_limit=None, # type: Optional[float]
auto_connect_task=True, # type: Union[bool, Task]
always_create_task=False, # type: bool
spawn_task_project=None, # type: Optional[str]
save_top_k_tasks_only=None, # type: Optional[int]
**optimizer_kwargs # type: Any
):
# type: (...) -> ()
"""
Create a new hyper-parameter controller. The newly created object will launch and monitor the new experiments.
:param str base_task_id: The Task ID to be used as template experiment to optimize.
:param list hyper_parameters: The list of Parameter objects to optimize over.
:param str objective_metric_title: The Objective metric title to maximize / minimize (for example,
``validation``).
:param str objective_metric_series: The Objective metric series to maximize / minimize (for example, ``loss``).
:param str objective_metric_sign: The objective to maximize / minimize.
The values are:
- ``min`` - Minimize the last reported value for the specified title/series scalar.
- ``max`` - Maximize the last reported value for the specified title/series scalar.
- ``min_global`` - Minimize the min value of *all* reported values for the specific title/series scalar.
- ``max_global`` - Maximize the max value of *all* reported values for the specific title/series scalar.
:param class.SearchStrategy optimizer_class: The SearchStrategy optimizer to use for the hyper-parameter search
:param int max_number_of_concurrent_tasks: The maximum number of concurrent Tasks (experiments) running at the
same time.
:param str execution_queue: The execution queue to use for launching Tasks (experiments).
:param float optimization_time_limit: The maximum time (minutes) for the entire optimization process. The
default is ``None``, indicating no time limit.
:param float compute_time_limit: The maximum compute time in minutes. When time limit is exceeded,
all jobs aborted. (Optional)
:param bool auto_connect_task: Store optimization arguments and configuration in the Task
The values are:
- ``True`` - The optimization argument and configuration will be stored in the Task. All arguments will
be under the hyper-parameter section ``opt``, and the optimization hyper_parameters space will
stored in the Task configuration object section.
- ``False`` - Do not store with Task.
- ``Task`` - A specific Task object to connect the optimization process with.
:param bool always_create_task: Always create a new Task
The values are:
- ``True`` - No current Task initialized. Create a new task named ``optimization`` in the ``base_task_id``
project.
- ``False`` - Use the :py:meth:`task.Task.current_task` (if exists) to report statistics.
:param str spawn_task_project: If project name is specified, create all optimization Jobs (Tasks) in the
specified project, instead of the original base_task_id project.
:param int save_top_k_tasks_only: If specified and above 0, keep only the top_k performing Tasks,
and archive the rest of the created Tasks. Default: -1 keep everything, nothing will be archived.
:param ** optimizer_kwargs: Arguments passed directly to the optimizer constructor.
Example:
.. code-block:: py
:linenos:
:caption: Example
from clearml import Task
from clearml.automation import UniformParameterRange, DiscreteParameterRange
from clearml.automation import GridSearch, RandomSearch, HyperParameterOptimizer
task = Task.init('examples', 'HyperParameterOptimizer example')
an_optimizer = HyperParameterOptimizer(
base_task_id='fa30fa45d95d4927b87c323b5b04dc44',
hyper_parameters=[
UniformParameterRange('lr', min_value=0.01, max_value=0.3, step_size=0.05),
DiscreteParameterRange('network', values=['ResNet18', 'ResNet50', 'ResNet101']),
],
objective_metric_title='title',
objective_metric_series='series',
objective_metric_sign='min',
max_number_of_concurrent_tasks=5,
optimizer_class=RandomSearch,
execution_queue='workers', time_limit_per_job=120, pool_period_min=0.2)
# This will automatically create and print the optimizer new task id
# for later use. if a Task was already created, it will use it.
an_optimizer.set_time_limit(in_minutes=10.)
an_optimizer.start()
# we can create a pooling loop if we like
while not an_optimizer.reached_time_limit():
top_exp = an_optimizer.get_top_experiments(top_k=3)
print(top_exp)
# wait until optimization completed or timed-out
an_optimizer.wait()
# make sure we stop all jobs
an_optimizer.stop()
"""
# create a new Task, if we do not have one already
self._task = auto_connect_task if isinstance(auto_connect_task, Task) else Task.current_task()
if not self._task and always_create_task:
base_task = Task.get_task(task_id=base_task_id)
self._task = Task.init(
project_name=base_task.get_project_name(),
task_name='Optimizing: {}'.format(base_task.name),
task_type=Task.TaskTypes.optimizer,
)
opts = dict(
base_task_id=base_task_id,
objective_metric_title=objective_metric_title,
objective_metric_series=objective_metric_series,
objective_metric_sign=objective_metric_sign,
max_number_of_concurrent_tasks=max_number_of_concurrent_tasks,
execution_queue=execution_queue,
optimization_time_limit=optimization_time_limit,
compute_time_limit=compute_time_limit,
optimizer_kwargs=optimizer_kwargs)
# make sure all the created tasks are our children, as we are creating them
if self._task:
self._task.add_tags([self._tag])
if auto_connect_task:
optimizer_class, hyper_parameters, opts = self._connect_args(
optimizer_class=optimizer_class, hyper_param_configuration=hyper_parameters, **opts)
self.base_task_id = opts['base_task_id']
self.hyper_parameters = hyper_parameters
self.max_number_of_concurrent_tasks = opts['max_number_of_concurrent_tasks']
self.execution_queue = opts['execution_queue']
self.objective_metric = Objective(
title=opts['objective_metric_title'], series=opts['objective_metric_series'],
order='min' if opts['objective_metric_sign'] in ('min', 'min_global') else 'max',
extremum=opts['objective_metric_sign'].endswith('_global'))
# if optimizer_class is an instance, use it as is.
if type(optimizer_class) != type:
self.optimizer = optimizer_class
else:
self.optimizer = optimizer_class(
base_task_id=opts['base_task_id'], hyper_parameters=hyper_parameters,
objective_metric=self.objective_metric, execution_queue=opts['execution_queue'],
num_concurrent_workers=opts['max_number_of_concurrent_tasks'],
compute_time_limit=opts['compute_time_limit'], **opts.get('optimizer_kwargs', {}))
self.optimizer.set_optimizer_task(self._task)
self.optimization_timeout = None
self.optimization_start_time = None
self._thread = None
self._stop_event = None
self._report_period_min = 5.
self._thread_reporter = None
self._experiment_completed_cb = None
self._save_top_k_tasks_only = max(0, save_top_k_tasks_only or 0)
self.optimizer.set_job_default_parent(
self._task.id if self._task else None, project_name=spawn_task_project or None)
self.set_time_limit(in_minutes=opts['optimization_time_limit'])
def get_num_active_experiments(self):
# type: () -> int
"""
Return the number of current active experiments.
:return: The number of active experiments.
"""
if not self.optimizer:
return 0
return len(self.optimizer.get_running_jobs())
def get_active_experiments(self):
# type: () -> Sequence[Task]
"""
Return a list of Tasks of the current active experiments.
:return: A list of Task objects, representing the current active experiments.
"""
if not self.optimizer:
return []
return [j.task for j in self.optimizer.get_running_jobs()]
def start(self, job_complete_callback=None):
# type: (Optional[Callable[[str, float, int, dict, str], None]]) -> bool
"""
Start the HyperParameterOptimizer controller. If the calling process is stopped, then the controller stops
as well.
:param Callable job_complete_callback: Callback function, called when a job is completed.
.. code-block:: py
def job_complete_callback(
job_id, # type: str
objective_value, # type: float
objective_iteration, # type: int
job_parameters, # type: dict
top_performance_job_id # type: str
):
pass
:return: True, if the controller started. False, if the controller did not start.
"""
if not self.optimizer:
return False
if self._thread:
return True
self.optimization_start_time = time()
self._experiment_completed_cb = job_complete_callback
self._stop_event = Event()
self._thread = Thread(target=self._daemon)
self._thread.daemon = True
self._thread.start()
self._thread_reporter = Thread(target=self._report_daemon)
self._thread_reporter.daemon = True
self._thread_reporter.start()
return True
def stop(self, timeout=None, wait_for_reporter=True):
# type: (Optional[float], Optional[bool]) -> ()
"""
Stop the HyperParameterOptimizer controller and the optimization thread.
:param float timeout: Wait timeout for the optimization thread to exit (minutes).
The default is ``None``, indicating do not wait terminate immediately.
:param wait_for_reporter: Wait for reporter to flush data.
"""
if not self._thread or not self._stop_event or not self.optimizer:
if self._thread_reporter and wait_for_reporter:
self._thread_reporter.join()
return
_thread = self._thread
self._stop_event.set()
self.optimizer.stop()
# wait for optimizer thread
if timeout is not None:
_thread.join(timeout=timeout * 60.)
# stop all running tasks:
for j in self.optimizer.get_running_jobs():
j.abort()
# clear thread
self._thread = None
if wait_for_reporter:
# wait for reporter to flush
self._thread_reporter.join()
def is_active(self):
# type: () -> bool
"""
Is the optimization procedure active (still running)
The values are:
- ``True`` - The optimization procedure is active (still running).
- ``False`` - The optimization procedure is not active (not still running).
.. note::
If the daemon thread has not yet started, ``is_active`` returns ``True``.
:return: A boolean indicating whether the optimization procedure is active (still running) or stopped.
"""
return self._stop_event is None or self._thread is not None
def is_running(self):
# type: () -> bool
"""
Is the optimization controller is running
The values are:
- ``True`` - The optimization procedure is running.
- ``False`` - The optimization procedure is running.
:return: A boolean indicating whether the optimization procedure is active (still running) or stopped.
"""
return self._thread is not None
def wait(self, timeout=None):
# type: (Optional[float]) -> bool
"""
Wait for the optimizer to finish.
.. note::
This method does not stop the optimizer. Call :meth:`stop` to terminate the optimizer.
:param float timeout: The timeout to wait for the optimization to complete (minutes).
If ``None``, then wait until we reached the timeout, or optimization completed.
:return: True, if the optimization finished. False, if the optimization timed out.
"""
if not self.is_running():
return True
if timeout is not None:
timeout *= 60.
else:
timeout = max(0, self.optimization_timeout - self.optimization_start_time) \
if self.optimization_timeout else None
_thread = self._thread
_thread.join(timeout=timeout)
if _thread.is_alive():
return False
return True
def set_time_limit(self, in_minutes=None, specific_time=None):
# type: (Optional[float], Optional[datetime]) -> ()
"""
Set a time limit for the HyperParameterOptimizer controller. If we reached the time limit, stop the optimization
process. If ``specific_time`` is provided, use it; otherwise, use the ``in_minutes``.
:param float in_minutes: The maximum processing time from current time (minutes).
:param datetime specific_time: The specific date/time limit.
"""
if specific_time:
self.optimization_timeout = specific_time.timestamp()
else:
self.optimization_timeout = (float(in_minutes) * 60.) + time() if in_minutes else None
def get_time_limit(self):
# type: () -> datetime
"""
Return the controller optimization time limit.
:return: The absolute datetime limit of the controller optimization process.
"""
return datetime.fromtimestamp(self.optimization_timeout)
def elapsed(self):
# type: () -> float
"""
Return minutes elapsed from controller stating time stamp.
:return: The minutes from controller start time. A negative value means the process has not started yet.
"""
if self.optimization_start_time is None:
return -1.0
return (time() - self.optimization_start_time) / 60.
def reached_time_limit(self):
# type: () -> bool
"""
Did the optimizer reach the time limit
The values are:
- ``True`` - The time limit passed.
- ``False`` - The time limit did not pass.
This method returns immediately, it does not wait for the optimizer.
:return: True, if optimizer is running and we passed the time limit, otherwise returns False.
"""
if self.optimization_start_time is None:
return False
if not self.is_running():
return False
return time() > self.optimization_timeout
def get_top_experiments(self, top_k):
# type: (int) -> Sequence[Task]
"""
Return a list of Tasks of the top performing experiments, based on the controller ``Objective`` object.
:param int top_k: The number of Tasks (experiments) to return.
:return: A list of Task objects, ordered by performance, where index 0 is the best performing Task.
"""
if not self.optimizer:
return []
return self.optimizer.get_top_experiments(top_k=top_k)
def get_optimizer(self):
# type: () -> SearchStrategy
"""
Return the currently used optimizer object.
:return: The SearchStrategy object used.
"""
return self.optimizer
def set_default_job_class(self, job_class):
# type: (TrainsJob) -> ()
"""
Set the Job class to use when the optimizer spawns new Jobs.
:param TrainsJob job_class: The Job Class type.
"""
self.optimizer.set_job_class(job_class)
def set_report_period(self, report_period_minutes):
# type: (float) -> ()
"""
Set reporting period for the accumulated objective report (minutes). This report is sent on the Optimizer Task,
and collects the Objective metric from all running jobs.
:param float report_period_minutes: The reporting period (minutes). The default is once every 10 minutes.
"""
self._report_period_min = float(report_period_minutes)
@classmethod
def get_optimizer_top_experiments(
cls,
objective_metric_title, # type: str
objective_metric_series, # type: str
objective_metric_sign, # type: str
optimizer_task_id, # type: str
top_k, # type: int
):
# type: (...) -> Sequence[Task]
"""
Return a list of Tasks of the top performing experiments
for a specific HyperParameter Optimization session (i.e. Task ID), based on the title/series objective.
:param str objective_metric_title: The Objective metric title to maximize / minimize (for example,
``validation``).
:param str objective_metric_series: The Objective metric series to maximize / minimize (for example, ``loss``).
:param str objective_metric_sign: The objective to maximize / minimize.
The values are:
- ``min`` - Minimize the last reported value for the specified title/series scalar.
- ``max`` - Maximize the last reported value for the specified title/series scalar.
- ``min_global`` - Minimize the min value of *all* reported values for the specific title/series scalar.
- ``max_global`` - Maximize the max value of *all* reported values for the specific title/series scalar.
:param str optimizer_task_id: Parent optimizer Task ID
:param top_k: The number of Tasks (experiments) to return.
:return: A list of Task objects, ordered by performance, where index 0 is the best performing Task.
"""
objective = Objective(
title=objective_metric_title, series=objective_metric_series, order=objective_metric_sign)
return objective.get_top_tasks(top_k=top_k, optimizer_task_id=optimizer_task_id)
def _connect_args(self, optimizer_class=None, hyper_param_configuration=None, **kwargs):
# type: (SearchStrategy, dict, Any) -> (SearchStrategy, list, dict)
if not self._task:
logger.warning('Auto Connect turned on but no Task was found, '
'hyper-parameter optimization argument logging disabled')
return optimizer_class, hyper_param_configuration, kwargs
configuration_dict = {'parameter_optimization_space': [c.to_dict() for c in hyper_param_configuration]}
self._task.connect_configuration(configuration_dict)
# this is the conversion back magic:
configuration_dict = {'parameter_optimization_space': [
Parameter.from_dict(c) for c in configuration_dict['parameter_optimization_space']]}
complex_optimizer_kwargs = None
if 'optimizer_kwargs' in kwargs:
# do not store complex optimizer kwargs:
optimizer_kwargs = kwargs.pop('optimizer_kwargs', {})
complex_optimizer_kwargs = {
k: v for k, v in optimizer_kwargs.items()
if not isinstance(v, six.string_types + six.integer_types +
(six.text_type, float, list, tuple, dict, type(None)))}
kwargs['optimizer_kwargs'] = {
k: v for k, v in optimizer_kwargs.items() if k not in complex_optimizer_kwargs}
# skip non basic types:
arguments = {'opt': kwargs}
if type(optimizer_class) != type:
logger.warning('Auto Connect optimizer_class disabled, {} is already instantiated'.format(optimizer_class))
self._task.connect(arguments)
else:
arguments['opt']['optimizer_class'] = str(optimizer_class).split('.')[-1][:-2] \
if not isinstance(optimizer_class, str) else optimizer_class
self._task.connect(arguments)
# this is the conversion back magic:
original_class = optimizer_class
optimizer_class = arguments['opt'].pop('optimizer_class', None)
if optimizer_class == 'RandomSearch':
optimizer_class = RandomSearch
elif optimizer_class == 'GridSearch':
optimizer_class = GridSearch
elif optimizer_class == 'OptimizerBOHB':
from .hpbandster import OptimizerBOHB
optimizer_class = OptimizerBOHB
elif optimizer_class == 'OptimizerOptuna':
from .optuna import OptimizerOptuna
optimizer_class = OptimizerOptuna
else:
logger.warning("Could not resolve optimizer_class {} reverting to original class {}".format(
optimizer_class, original_class))
optimizer_class = original_class
if complex_optimizer_kwargs:
if 'optimizer_kwargs' not in arguments['opt']:
arguments['opt']['optimizer_kwargs'] = complex_optimizer_kwargs
else:
arguments['opt']['optimizer_kwargs'].update(complex_optimizer_kwargs)
return optimizer_class, configuration_dict['parameter_optimization_space'], arguments['opt']
def _daemon(self):
# type: () -> ()
"""
Implement the main pooling thread, calling loop every ``self.pool_period_minutes`` minutes.
"""
self.optimizer.start()
self._thread = None
def _report_daemon(self):
# type: () -> ()
title, series = self.objective_metric.get_objective_metric()
title = '{}/{}'.format(title, series)
counter = 0
completed_jobs = dict()
task_logger = None
cur_completed_jobs = set()
cur_task = self._task or Task.current_task()
if cur_task and self.optimizer:
# noinspection PyProtectedMember
child_tasks = self.optimizer._get_child_tasks(
parent_task_id=cur_task.id, status=['completed', 'stopped'])
hyper_parameters = [h.name for h in self.hyper_parameters]
for task in child_tasks:
params = {k: v for k, v in task.get_parameters().items() if k in hyper_parameters}
params["status"] = str(task.status)
# noinspection PyProtectedMember
iteration_value = task.get_last_iteration()
objective = self.objective_metric.get_objective(task)
completed_jobs[task.id] = (
objective if objective is not None else -1,
iteration_value if iteration_value is not None else -1,
params
)
while self._thread is not None:
timeout = self.optimization_timeout - time() if self.optimization_timeout else 0.
if timeout >= 0:
timeout = min(self._report_period_min * 60., timeout if timeout else self._report_period_min * 60.)
# make sure that we have the first report fired before we actually go to sleep, wait for 15 sec.
if counter <= 0:
timeout = 15
print('Progress report #{} completed, sleeping for {} minutes'.format(counter, timeout / 60.))
if self._stop_event.wait(timeout=timeout):
# wait for one last report
timeout = -1
counter += 1
# get task to report on.
cur_task = self._task or Task.current_task()
if cur_task:
task_logger = cur_task.get_logger()
# do some reporting
self._report_remaining_budget(task_logger, counter)
if self.optimizer.budget.compute_time.used and self.optimizer.budget.compute_time.used >= 1.0:
# Reached compute time limit
timeout = -1
self._report_resources(task_logger, counter)
# collect a summary of all the jobs and their final objective values
cur_completed_jobs = set(self.optimizer.get_created_jobs_ids().keys()) - \
{j.task_id() for j in self.optimizer.get_running_jobs()}
self._report_completed_status(completed_jobs, cur_completed_jobs, task_logger, title)
self._report_completed_tasks_best_results(set(completed_jobs.keys()), task_logger, title, counter)
self._auto_archive_low_performance_tasks(completed_jobs)
# if we should leave, stop everything now.
if timeout < 0:
# we should leave
self.stop(wait_for_reporter=False)
return
if task_logger and counter:
counter += 1
self._report_remaining_budget(task_logger, counter)
self._report_resources(task_logger, counter)
self._report_completed_status(completed_jobs, cur_completed_jobs, task_logger, title, force=True)
self._report_completed_tasks_best_results(set(completed_jobs.keys()), task_logger, title, counter)
self._auto_archive_low_performance_tasks(completed_jobs)
def _report_completed_status(self, completed_jobs, cur_completed_jobs, task_logger, title, force=False):
job_ids_sorted_by_objective = self.__sort_jobs_by_objective(completed_jobs)
best_experiment = \
(self.objective_metric.get_normalized_objective(job_ids_sorted_by_objective[0]),
job_ids_sorted_by_objective[0]) \
if job_ids_sorted_by_objective else (float('-inf'), None)
if force or cur_completed_jobs != set(completed_jobs.keys()):
pairs = []
labels = []
created_jobs = copy(self.optimizer.get_created_jobs_ids())
id_status = {j_id: j_run.status() for j_id, j_run in self.optimizer.get_created_jobs_tasks().items()}
for i, (job_id, params) in enumerate(created_jobs.items()):
value = self.objective_metric.get_objective(job_id)
if job_id in completed_jobs:
if value != completed_jobs[job_id][0]:
iteration_value = self.objective_metric.get_current_raw_objective(job_id)
completed_jobs[job_id] = (
value,
iteration_value[0] if iteration_value else -1,
copy(dict(**params, **{"status": id_status.get(job_id)}))) # noqa
elif completed_jobs.get(job_id):
completed_jobs[job_id] = (completed_jobs[job_id][0],
completed_jobs[job_id][1],
copy(dict(**params, **{"status": id_status.get(job_id)}))) # noqa
pairs.append((i, completed_jobs[job_id][0]))
labels.append(str(completed_jobs[job_id][2])[1:-1])
elif value is not None:
pairs.append((i, value))
labels.append(str(params)[1:-1])
iteration_value = self.objective_metric.get_current_raw_objective(job_id)
completed_jobs[job_id] = (
value,
iteration_value[0] if iteration_value else -1,
copy(dict(**params, **{"status": id_status.get(job_id)}))) # noqa
# callback new experiment completed
if self._experiment_completed_cb:
normalized_value = self.objective_metric.get_normalized_objective(job_id)
if normalized_value is not None and normalized_value > best_experiment[0]:
best_experiment = normalized_value, job_id
c = completed_jobs[job_id]
self._experiment_completed_cb(job_id, c[0], c[1], c[2], best_experiment[1])
if pairs:
print('Updating job performance summary plot/table')
# update scatter plot
task_logger.report_scatter2d(
title='optimization', series=title,
scatter=pairs, iteration=0, labels=labels,
mode='markers', xaxis='job #', yaxis='objective')
# update summary table
job_ids = list(completed_jobs.keys())
job_ids_sorted_by_objective = sorted(
job_ids, key=lambda x: completed_jobs[x][0], reverse=bool(self.objective_metric.sign >= 0))
# sort the columns except for 'objective', 'iteration'
columns = list(sorted(set([c for k, v in completed_jobs.items() for c in v[2].keys()])))
# add the index column (task id) and the first two columns 'objective', 'iteration' then the rest
table_values = [['task id', 'objective', 'iteration'] + columns]
table_values += \
[([job, completed_jobs[job][0], completed_jobs[job][1]] +
[completed_jobs[job][2].get(c, '') for c in columns]) for job in job_ids_sorted_by_objective]
task_logger.report_table(
"summary", "job", 0, table_plot=table_values,
extra_layout={"title": "objective: {}".format(title)})
# upload summary as artifact
if force:
task = self._task or Task.current_task()
if task:
task.upload_artifact(name='summary', artifact_object={'table': table_values})
def _report_remaining_budget(self, task_logger, counter):
# noinspection PyBroadException
try:
budget = self.optimizer.budget.to_dict()
except Exception:
budget = {}
# report remaining budget
for budget_part, value in budget.items():
task_logger.report_scalar(
title='remaining budget', series='{} %'.format(budget_part),
iteration=counter, value=round(100 - value['used'] * 100., ndigits=1))
if self.optimization_timeout and self.optimization_start_time:
task_logger.report_scalar(
title='remaining budget', series='time %',
iteration=counter,
value=round(100 - (100. * (time() - self.optimization_start_time) /
(self.optimization_timeout - self.optimization_start_time)), ndigits=1)
)
def _report_completed_tasks_best_results(self, completed_jobs, task_logger, title, counter):
# type: (Set[str], Logger, str, int) -> ()
if not completed_jobs:
return
value_func, series_name = (max, "max") if self.objective_metric.get_objective_sign() > 0 else \
(min, "min")
latest_completed, obj_values = self._get_latest_completed_task_value(completed_jobs, series_name)
if latest_completed:
val = value_func(obj_values)
task_logger.report_scalar(
title=title,
series=series_name,
iteration=counter,
value=val)
task_logger.report_scalar(
title=title,
series="last reported",
iteration=counter,
value=latest_completed)
def _report_resources(self, task_logger, iteration):
# type: (Logger, int) -> ()
self._report_active_workers(task_logger, iteration)
self._report_tasks_status(task_logger, iteration)
def _report_active_workers(self, task_logger, iteration):
# type: (Logger, int) -> ()
res = self.__get_session().send(workers_service.GetAllRequest())
response = res.wait()
if response.ok():
all_workers = response
queue_workers = len(
[
worker.get("id")
for worker in all_workers.response_data.get("workers")
for q in worker.get("queues")
if q.get("name") == self.execution_queue
]
)
task_logger.report_scalar(title="resources",
series="queue workers",
iteration=iteration,
value=queue_workers)
def _report_tasks_status(self, task_logger, iteration):
# type: (Logger, int) -> ()
tasks_status = {"running tasks": 0, "pending tasks": 0}
for job in self.optimizer.get_running_jobs():
if job.is_running():
tasks_status["running tasks"] += 1
else:
tasks_status["pending tasks"] += 1
for series, val in tasks_status.items():
task_logger.report_scalar(
title="resources", series=series,
iteration=iteration, value=val)
def _get_latest_completed_task_value(self, cur_completed_jobs, series_name):
# type: (Set[str], str) -> (float, List[float])
completed_value = None
latest_completed = None
obj_values = []
cur_task = self._task or Task.current_task()
for j in cur_completed_jobs:
res = cur_task.send(tasks_service.GetByIdRequest(task=j))
response = res.wait()
if not response.ok() or response.response_data["task"].get("status") != Task.TaskStatusEnum.completed:
continue
completed_time = datetime.strptime(response.response_data["task"]["completed"].partition("+")[0],
"%Y-%m-%dT%H:%M:%S.%f")
completed_time = completed_time.timestamp()
completed_values = self._get_last_value(response)
obj_values.append(completed_values['max_value'] if series_name == "max" else completed_values['min_value'])
if not latest_completed or completed_time > latest_completed:
latest_completed = completed_time
completed_value = completed_values['value']
return completed_value, obj_values
def _get_last_value(self, response):
metrics, title, series, values = TrainsJob.get_metric_req_params(self.objective_metric.title,
self.objective_metric.series)
last_values = response.response_data["task"]['last_metrics'][title][series]
return last_values
def _auto_archive_low_performance_tasks(self, completed_jobs):
if self._save_top_k_tasks_only <= 0:
return
# sort based on performance
job_ids_sorted_by_objective = self.__sort_jobs_by_objective(completed_jobs)
# query system_tags only
res = self.__get_session().send(tasks_service.GetAllRequest(
id=job_ids_sorted_by_objective, status=['completed', 'stopped'], only_fields=['id', 'system_tags']))
response = res.wait()
if not response.ok():
return
tasks_system_tags_lookup = {
task.get("id"): task.get("system_tags") for task in response.response_data.get("tasks")}
for i, task_id in enumerate(job_ids_sorted_by_objective):
system_tags = tasks_system_tags_lookup.get(task_id, [])
if i < self._save_top_k_tasks_only and Task.archived_tag in system_tags:
print('Restoring from archive Task id={} (#{} objective={})'.format(
task_id, i, completed_jobs[task_id][0]))
# top_k task and is archived, remove archive tag
system_tags = list(set(system_tags) - {Task.archived_tag})
res = self.__get_session().send(
tasks_service.EditRequest(task=task_id, system_tags=system_tags, force=True))
res.wait()
elif i >= self._save_top_k_tasks_only and Task.archived_tag not in system_tags:
print('Archiving Task id={} (#{} objective={})'.format(
task_id, i, completed_jobs[task_id][0]))
# Not in top_k task and not archived, add archive tag
system_tags = list(set(system_tags) | {Task.archived_tag})
res = self.__get_session().send(
tasks_service.EditRequest(task=task_id, system_tags=system_tags, force=True))
res.wait()
def __get_session(self):
cur_task = self._task or Task.current_task()
if cur_task:
return cur_task.default_session
# noinspection PyProtectedMember
return Task._get_default_session()
def __sort_jobs_by_objective(self, completed_jobs):
if not completed_jobs:
return []
job_ids_sorted_by_objective = list(sorted(
completed_jobs.keys(), key=lambda x: completed_jobs[x][0], reverse=bool(self.objective_metric.sign >= 0)))
return job_ids_sorted_by_objective
|
package com.java.study.algorithm.zuo.dadvanced.advanced_class_01;
import java.util.Objects;
/**
* 给一个字符串str,代表一个整数,找到除了这个数之外,绝对值和这个数相差 最小的回文数。
* 例如:
* str = “123”
* 返回“121”
* 注意: 假设字符串str一定能变成long类型
*/
public class Code_07_Find_the_Closest_Palindrome4 {
public static String Find_the_Closest_Palindrome(String str) {
long value = Long.parseLong(str);
long raw = getRawCheck(str);
System.out.println("ansmer:raw:----------------" + raw);
long maxValue = raw > value ? raw : getMaxValueCheck(raw);
System.out.println("ansmer:big:----------------" + maxValue);
long smallValue = raw < value ? raw : getMinValueCheck(raw);
System.out.println("ansmer:small:----------------" + smallValue);
return String.valueOf(maxValue - value >= value - smallValue ? smallValue : maxValue);
}
private static long getMinValueCheck(long raw) {
long minValue1 = getMinValue(raw);
long minValue2 = getSmallPalindrome(raw);
if (minValue1 != minValue2) {
minValue1 = getMinValue(raw);
minValue2 = getSmallPalindrome(raw);
}
return minValue1;
}
private static long getMaxValueCheck(long raw) {
long maxValue1 = getMaxValue(raw);
long maxValue2 = getBigPalindrome(raw);
if (maxValue1 != maxValue2) {
maxValue1 = getMaxValue(raw);
maxValue2 = getBigPalindrome(raw);
}
return maxValue1;
}
private static long getRawCheck(String str) {
return getRaw(str);
}
/**
* 获取大的值
* 增加,从中间位置偏上进行增加
* 0 1 2 3 4 从2位置增加
* 0 1 2 3 从1位置增加
*
* @param raw
* @return
*/
private static long getMaxValue(long raw) {
char[] array = String.valueOf(raw).toCharArray();
char[] help = new char[array.length + 1];
help[0] = '0';
for (int i = 0; i < array.length; i++) {
help[i + 1] = array[i];
}
int centerIndex = help.length / 2;
for (int i = centerIndex; i >= 0; i--) {
if (++help[i] > '9') {
help[i] = '0';
} else {
break;
}
}
int startIndex = help[0] == '0' ? 1 : 0;
int endIndex = help.length - 1;
while (startIndex < endIndex) {
help[endIndex--] = help[startIndex++];
}
return Long.parseLong(String.valueOf(help));
}
public static Long getBigPalindrome(Long raw) {
char[] chs = String.valueOf(raw).toCharArray();
char[] res = new char[chs.length + 1];
res[0] = '0';
for (int i = 0; i < chs.length; i++) {
res[i + 1] = chs[i];
}
int size = chs.length;
for (int j = (size - 1) / 2 + 1; j >= 0; j--) {
if (++res[j] > '9') {
res[j] = '0';
} else {
break;
}
}
int offset = res[0] == '1' ? 1 : 0;
size = res.length;
for (int i = size - 1; i >= (size + offset) / 2; i--) {
res[i] = res[size - i - offset];
}
return Long.valueOf(String.valueOf(res));
}
/**
* 奇数从中间位置减少
* 偶数从第二个中间位置减少
* 0 1 2 3 4 从2位置减少
* 0 1 2 3 从1位置减少
*
* @param raw
* @return
*/
private static long getMinValue(long raw) {
char[] array = String.valueOf(raw).toCharArray();
int centerIndex = array.length / 2;
for (int i = centerIndex; i >= 0; i--) {
if (--array[i] < '0') {
array[i] = '9';
} else {
break;
}
}
// 如果首位减到0,则后面都是 99 离他最近的最小回文数
if (array[0] == '0') {
// 只有一位,说明就是一个1,直接返回0即可
if (array.length == 1) {
return 0;
}
for (int i = 1; i < array.length; i++) {
array[i] = '9';
}
return Long.parseLong(String.valueOf(array));
} else {
int startIndex = 0;
int endIndex = array.length - 1;
while (startIndex < endIndex) {
array[startIndex++] = array[endIndex--];
}
return Long.parseLong(String.valueOf(array));
}
}
public static Long getSmallPalindrome(Long raw) {
char[] chs = String.valueOf(raw).toCharArray();
char[] res = new char[chs.length];
int size = res.length;
for (int i = 0; i < size; i++) {
res[i] = chs[i];
}
for (int j = (size - 1) / 2; j >= 0; j--) {
if (--res[j] < '0') {
res[j] = '9';
} else {
break;
}
}
if (res[0] == '0') {
res = new char[size - 1];
for (int i = 0; i < res.length; i++) {
res[i] = '9';
}
return size == 1 ? 0 : Long.parseLong(String.valueOf(res));
}
for (int k = 0; k < size / 2; k++) {
res[size - 1 - k] = res[k];
}
return Long.valueOf(String.valueOf(res));
}
/**
* 将前半部分直接拷贝到后半部分
*
* @param str
* @return
*/
private static long getRaw(String str) {
char[] array = str.toCharArray();
int centerIndex = (array.length) / 2;
int length = array.length - 1;
int index = 0;
while (index < centerIndex) {
array[length - index] = array[index];
index++;
}
return Long.parseLong(String.valueOf(array));
}
public static void main(String[] args) {
//
// long value = 289234;
//
// System.out.println(value);
// String value1 = String.valueOf(com.java.study.answer.zuo.dadvanced.advanced_class_01.Code_07_Find_the_Closest_Palindrome.nearestPalindromic(String.valueOf(value)));
// System.out.println(value1 + "-------" + (Long.parseLong(value1) - value));
//
// String value2 = Find_the_Closest_Palindrome(String.valueOf(value));
// System.out.println(value2 + "-------" + (Long.parseLong(value2) - value));
////
for (int i = 0; i < 1000000; i++) {
long value = (long) (Math.random() * 10000000L + 1);
// System.out.println(value);
String value1 = String.valueOf(com.java.study.answer.zuo.dadvanced.advanced_class_01.Code_07_Find_the_Closest_Palindrome.nearestPalindromic(String.valueOf(value)));
String value2 = Find_the_Closest_Palindrome(String.valueOf(value));
if (!Objects.equals(value1, value2)) {
System.out.println("fuck!" + value);
break;
}
System.out.println("ok");
}
}
//
// public static String mirroring(String s) {
// String x = s.substring(0, (s.length()) / 2);
// return x + (s.length() % 2 == 1 ? s.charAt(s.length() / 2) : "") + new StringBuilder(x).reverse().toString();
// }
//
// public static String nearestPalindromic(String n) {
// if (n.equals("1")) {
// return "0";
// }
//
//
// String a = mirroring(n);
// long diff1 = Long.MAX_VALUE;
// diff1 = Math.abs(Long.parseLong(n) - Long.parseLong(a));
// if (diff1 == 0) {
// diff1 = Long.MAX_VALUE;
// }
// StringBuilder s = new StringBuilder(n);
// int i = (s.length() - 1) / 2;
// while (i >= 0 && s.charAt(i) == '0') {
// s.replace(i, i + 1, "9");
// i--;
// }
// if (i == 0 && s.charAt(i) == '1') {
// s.delete(0, 1);
// int mid = (s.length() - 1) / 2;
// s.replace(mid, mid + 1, "9");
// } else {
// s.replace(i, i + 1, "" + (char) (s.charAt(i) - 1));
// }
//
// String b = mirroring(s.toString());
// long diff2 = Math.abs(Long.parseLong(n) - Long.parseLong(b));
//
//
// s = new StringBuilder(n);
// i = (s.length() - 1) / 2;
// while (i >= 0 && s.charAt(i) == '9') {
// s.replace(i, i + 1, "0");
// i--;
// }
// if (i < 0) {
// s.insert(0, "1");
// } else {
// s.replace(i, i + 1, "" + (char) (s.charAt(i) + 1));
// }
//
// String c = mirroring(s.toString());
// long diff3 = Math.abs(Long.parseLong(n) - Long.parseLong(c));
//
// if (diff2 <= diff1 && diff2 <= diff3) {
// return b;
// }
//
// if (diff1 <= diff3 && diff1 <= diff2) {
// return a;
// } else {
// return c;
// }
//
// }
} |
/*
* Copyright 2011 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ardverk.collection.spt;
import java.io.Serializable;
import java.util.AbstractCollection;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* A simple/lightweight implementation of a PATRICIA {@link Trie}.
*/
public class PatriciaTrie<K, V> extends AbstractTrie<K, V> implements Serializable {
private static final long serialVersionUID = 7464215084236615537L;
private static KeyAnalyzer<Object> DEFAULT = new KeyAnalyzer<Object>() {
@Override
public boolean isSet(Object key, int bitIndex) {
return ((PatriciaKey<?>)key).isBitSet(bitIndex);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public int bitIndex(Object key, Object otherKey) {
return ((PatriciaKey)key).bitIndex(otherKey);
}
};
private final KeyAnalyzer<? super K> keyAnalyzer;
private volatile RootNode<K, V> root = new RootNode<K, V>();
private volatile int size = 0;
private transient volatile Entry<? extends K, ? extends V>[] entries = null;
private transient volatile EntrySet entrySet = null;
private transient volatile KeySet keySet = null;
private transient volatile Values values = null;
private transient volatile int modCount = 0;
public PatriciaTrie() {
this(DEFAULT);
}
public PatriciaTrie(KeyAnalyzer<? super K> keyAnalyzer) {
this.keyAnalyzer = keyAnalyzer;
}
public PatriciaTrie(Map<? extends K, ? extends V> m) {
this(keyAnalyzer(m), m);
}
public PatriciaTrie(KeyAnalyzer<? super K> keyAnalyzer,
Map<? extends K, ? extends V> m) {
this.keyAnalyzer = keyAnalyzer;
putAll(m);
}
/**
* Returns the {@link KeyAnalyzer}.
*/
public KeyAnalyzer<? super K> getKeyAnalyzer() {
return keyAnalyzer;
}
@Override
public Entry<K, V> select(K key) {
Node<K, V> entry = selectR(root.left, key, -1);
if (!entry.isEmpty()) {
return entry;
}
return null;
}
private Node<K, V> selectR(Node<K, V> h, K key, int bitIndex) {
if (h.bitIndex <= bitIndex) {
return h;
}
if (!isSet(key, h.bitIndex)) {
return selectR(h.left, key, h.bitIndex);
} else {
return selectR(h.right, key, h.bitIndex);
}
}
@Override
public V put(K key, V value) {
// This is a shortcut! The root is the only place to store null!
if (key == null) {
return putForNullKey(key, value);
}
Entry<K, V> entry = select(key);
K existing = null;
if (entry != null) {
existing = entry.getKey();
if (equals(key, existing)) {
return entry.setValue(value);
}
}
int bitIndex = bitIndex(key, existing);
if (bitIndex == KeyAnalyzer.NULL_KEY) {
return putForNullKey(key, value);
}
assert (bitIndex >= 0);
root.left = putR(root.left, key, value, bitIndex, root);
incrementSize();
return null;
}
/**
* Stores the given key-value at the {@link RootNode}.
*/
private V putForNullKey(K key, V value) {
if (root.isEmpty()) {
incrementSize();
}
return root.setKeyValue(key, value);
}
private Node<K, V> putR(Node<K, V> h, K key, V value, int bitIndex, Node<K, V> p) {
if ((h.bitIndex >= bitIndex) || (h.bitIndex <= p.bitIndex)) {
Node<K, V> t = new Node<K, V>(key, value, bitIndex);
boolean isSet = isSet(key, t.bitIndex);
t.left = isSet ? h : t;
t.right = isSet ? t : h;
return t;
}
if (!isSet(key, h.bitIndex)) {
h.left = putR(h.left, key, value, bitIndex, h);
} else {
h.right = putR(h.right, key, value, bitIndex, h);
}
return h;
}
@Override
public V remove(Object key) {
@SuppressWarnings("unchecked")
Entry<K, V> entry = entry((K)key);
if (entry != null) {
return removeEntry(entry);
}
return null;
}
/**
* Removes the given {@link Entry} from the Trie.
*/
private V removeEntry(final Entry<? extends K, ? extends V> entry) {
// We're traversing the old Trie and adding elements to the new Trie!
RootNode<K, V> old = clear0();
traverseR(old.left, (e) -> {
if (!entry.equals(e)) {
put(e.getKey(), e.getValue());
}
return true;
}, -1);
return entry.getValue();
}
@Override
public void select(K key, Cursor<? super K, ? super V> cursor) {
selectR(root.left, key, cursor, -1);
}
private boolean selectR(Node<K, V> h, K key,
Cursor<? super K, ? super V> cursor, int bitIndex) {
if (h.bitIndex <= bitIndex) {
if (!h.isEmpty()) {
return cursor.select(h);
}
return true;
}
if (!isSet(key, h.bitIndex)) {
if (selectR(h.left, key, cursor, h.bitIndex)) {
return selectR(h.right, key, cursor, h.bitIndex);
}
} else {
if (selectR(h.right, key, cursor, h.bitIndex)) {
return selectR(h.left, key, cursor, h.bitIndex);
}
}
return false;
}
@Override
public void traverse(Cursor<? super K, ? super V> cursor) {
traverseR(root.left, cursor, -1);
}
private static <K, V> boolean traverseR(Node<K, V> h,
Cursor<? super K, ? super V> cursor, int bitIndex) {
if (h.bitIndex <= bitIndex) {
if (!h.isEmpty()) {
return cursor.select(h);
}
return true;
}
if (traverseR(h.left, cursor, h.bitIndex)) {
return traverseR(h.right, cursor, h.bitIndex);
}
return false;
}
@Override
public void clear() {
clear0();
}
@Override
public int size() {
return size;
}
@Override
public Set<Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = new EntrySet();
}
return entrySet;
}
@Override
public Set<K> keySet() {
if (keySet == null) {
keySet = new KeySet();
}
return keySet;
}
@Override
public Collection<V> values() {
if (values == null) {
values = new Values();
}
return values;
}
@Override
public Entry<K, V> firstEntry() {
Node<K, V> entry = followLeft(root.left, -1, root);
if (!entry.isEmpty()) {
return entry;
}
return null;
}
@Override
public Entry<K, V> lastEntry() {
Node<K, V> entry = followRight(root.left, -1);
if (!entry.isEmpty()) {
return entry;
}
return null;
}
private Node<K, V> followLeft(Node<K, V> h, int bitIndex, Node<K, V> p) {
if (h.bitIndex <= bitIndex) {
if (!h.isEmpty()) {
return h;
}
return p;
}
return followLeft(h.left, h.bitIndex, h);
}
private Node<K, V> followRight(Node<K, V> h, int bitIndex) {
if (h.bitIndex <= bitIndex) {
return h;
}
return followRight(h.right, h.bitIndex);
}
/**
* Increments the {@link #size} counter and calls {@link #clearEntriesArray()}.
*/
private void incrementSize() {
++size;
clearEntriesArray();
}
/**
* Clears the {@link PatriciaTrie} and returns the old {@link RootNode}.
* The {@link RootNode} may be used to {@link #traverse(RootNode, Cursor)}
* the old {@link PatriciaTrie}.
*
* @see #remove(Object)
*/
private RootNode<K, V> clear0() {
RootNode<K, V> previous = root;
root = new RootNode<K, V>();
size = 0;
clearEntriesArray();
return previous;
}
/**
* Clears the {@link #entries} array.
*/
private void clearEntriesArray() {
entries = null;
++modCount;
}
/**
* @see KeyAnalyzer#isSet(Object, int)
*/
private boolean isSet(K key, int bitIndex) {
return keyAnalyzer.isSet(key, bitIndex);
}
/**
* @see KeyAnalyzer#bitIndex(Object, Object)
*/
private int bitIndex(K key, K otherKey) {
return keyAnalyzer.bitIndex(key, otherKey);
}
/**
* Turns the {@link PatriciaTrie} into an {@link Entry[]}. The array
* is being cached for as long as the {@link PatriciaTrie} isn't being
* modified.
*
* @see ViewIterator
*/
private Entry<? extends K, ? extends V>[] toArray() {
if (entries == null) {
@SuppressWarnings("unchecked")
final Entry<? extends K, ? extends V>[] dst
= new Entry[size()];
traverse(new Cursor<K, V>() {
private int index = 0;
@Override
public boolean select(Entry<? extends K, ? extends V> entry) {
dst[index++] = entry;
return true;
}
});
entries = dst;
}
return entries;
}
/**
* Returns a {@link KeyAnalyzer} for the given {@link Map}.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private static KeyAnalyzer<Object> keyAnalyzer(Map<?, ?> m) {
if (m instanceof PatriciaTrie<?, ?>) {
return ((PatriciaTrie)m).getKeyAnalyzer();
}
return DEFAULT;
}
/**
* An {@link Iterator} for {@link Entry}s.
*
* @see PatriciaTrie#toArray()
*/
private abstract class ViewIterator<E> implements Iterator<E> {
private final Entry<? extends K, ? extends V>[] entries = toArray();
private int expectedModCount = PatriciaTrie.this.modCount;
private int index = 0;
private Entry<? extends K, ? extends V> current = null;
@Override
public boolean hasNext() {
return index < entries.length;
}
@Override
public E next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
if (expectedModCount != PatriciaTrie.this.modCount) {
throw new ConcurrentModificationException();
}
current = entries[index++];
return next(current);
}
/**
* Called for each {@link Entry}.
*
* @see #next()
*/
protected abstract E next(Entry<? extends K, ? extends V> entry);
@Override
public void remove() {
if (current == null) {
throw new IllegalStateException();
}
removeEntry(current);
expectedModCount = PatriciaTrie.this.modCount;
current = null;
}
}
/**
* An abstract base class for the various views.
*/
private abstract class AbstractView<E> extends AbstractCollection<E> {
@Override
public void clear() {
PatriciaTrie.this.clear();
}
@Override
public int size() {
return PatriciaTrie.this.size();
}
}
/**
* @see PatriciaTrie#entrySet()
*/
private class EntrySet extends AbstractView<Entry<K, V>> implements Set<Entry<K, V>> {
private Entry<K, V> entry(Entry<K, V> entry) {
Entry<K, V> other = PatriciaTrie.this.entry(entry.getKey());
if (other != null && other.equals(entry)) {
return other;
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object o) {
if (o instanceof Entry<?, ?>) {
return entry((Entry<K, V>)o) != null;
}
return false;
}
@Override
public boolean remove(Object o) {
if (o instanceof Entry<?, ?>) {
@SuppressWarnings("unchecked")
Entry<K, V> entry = entry((Entry<K, V>)o);
if (entry != null) {
int size = size();
PatriciaTrie.this.removeEntry(entry);
return size != size();
}
}
return false;
}
@Override
public Iterator<Entry<K, V>> iterator() {
return new ViewIterator<Entry<K, V>>() {
@SuppressWarnings("unchecked")
@Override
protected Entry<K, V> next(Entry<? extends K, ? extends V> entry) {
return (Entry<K, V>)entry;
}
};
}
}
/**
* @see PatriciaTrie#keySet()
*/
private class KeySet extends AbstractView<K> implements Set<K> {
@Override
public boolean remove(Object key) {
int size = size();
PatriciaTrie.this.remove(key);
return size != size();
}
@Override
public boolean contains(Object o) {
return PatriciaTrie.this.containsKey(o);
}
@Override
public Iterator<K> iterator() {
return new ViewIterator<K>() {
@Override
protected K next(Entry<? extends K, ? extends V> entry) {
return entry.getKey();
}
};
}
}
/**
* @see PatriciaTrie#values()
*/
private class Values extends AbstractView<V> {
@Override
public boolean remove(Object value) {
for (Entry<K, V> entry : entrySet()) {
if (AbstractTrie.equals(value, entry.getValue())) {
int size = size();
PatriciaTrie.this.removeEntry(entry);
return size != size();
}
}
return false;
}
@Override
public Iterator<V> iterator() {
return new ViewIterator<V>() {
@Override
protected V next(Entry<? extends K, ? extends V> entry) {
return entry.getValue();
}
};
}
}
/**
* The root node of the {@link Trie}.
*/
private static class RootNode<K, V> extends Node<K, V> {
private static final long serialVersionUID = -8857149853096688620L;
private boolean empty = true;
public RootNode() {
super(null, null, -1);
this.left = this;
}
/**
* Sets the key and value of the root node.
*/
public V setKeyValue(K key, V value) {
this.key = key;
this.empty = false;
return setValue(value);
}
@Override
public boolean isEmpty() {
return empty;
}
}
/**
* A node in the {@link Trie}.
*/
private static class Node<K, V> implements Entry<K, V>, Serializable {
private static final long serialVersionUID = -2409938371345117780L;
private final int bitIndex;
protected K key;
protected V value;
protected Node<K, V> left;
protected Node<K, V> right;
private Node(K key, V value, int bitIndex) {
this.bitIndex = bitIndex;
this.key = key;
this.value = value;
}
/**
* Returns {@code true} if the {@link Node} has no key-value.
*/
public boolean isEmpty() {
return false;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public V setValue(V value) {
V existing = this.value;
this.value = value;
return existing;
}
@Override
public int hashCode() {
return 31 * (key != null ? key.hashCode() : 0)
+ (value != null ? value.hashCode() : 0);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (!(o instanceof Entry<?, ?>)) {
return false;
}
Entry<?, ?> other = (Entry<?, ?>)o;
return AbstractTrie.equals(key, other.getKey())
&& AbstractTrie.equals(value, other.getValue());
}
@Override
public String toString() {
return key + " (" + bitIndex + ") -> " + value;
}
}
}
|
/*!
* \file
* \author <NAME>
* \date 20.10.2012
*/
#ifndef _MESSAGE_
#define _MESSAGE_
#include <iostream>
/*! for the simplified construction of a Message use this Macro*/
#define _ping_ __FILE__, __LINE__
namespace spectral
{
/*! @brief class intended for the use in throw statements
*
* @ingroup exceptions
* The objects of this class store a message (that describes the error when thrown)
* that can then be displayed in a catch block
* \code
* try{ throw Message("This is an error!\n", _ping_);}
* catch( Message& m) {m.display();}
* \endcode
*/
class Message
{
private:
const char* m;
const char* f;
const int l;
public:
/*! @brief Constructor
*
* @param message A character string containing the message
* @param file The file in which the exception is thrown (contained in the predefined Macro __FILE__)
* @param line The line in which the exception is thrown (contained in the predefined Macro __LINE__)
* \note The Macro _ping_ combines __FILE__, __LINE__ in one.
*/
Message(const char* message, const char* file, const int line): m(message), f(file), l(line){}
/*! @brief prints file, line and message to std::cerr
*
* It is virtual so that derived classes, that store more Information can display these too.
*/
virtual void display() const
{
std::cerr << "Message from file "<<f<<" in line " <<l<<": "<<std::endl
<< m<<std::endl;
}
};
} //namespace spectral
#endif // _MESSAGE_
|
<reponame>isandlaTech/cohorte-runtime
/**
* File: AbstractExtensibleSCAElement.java
* Author: <NAME>
* Date: 6 janv. 2012
*/
package org.psem2m.sca.converter.model;
/**
* Basic class for extensible SCA elements
*
* @author <NAME>
*/
public abstract class AbstractExtensibleSCAElement extends AbstractSCAElement
implements IExtensible {
/**
* Returns the kind of extension, i.e. the tag name
*
* @return the kind of extension
*/
@Override
public String getKind() {
return pXmlElement.getLocalName();
}
/*
* (non-Javadoc)
*
* @see
* org.psem2m.sca.converter.model.AbstractSCAElement#toString(java.lang.
* StringBuilder, java.lang.String)
*/
@Override
public void toString(final StringBuilder aBuilder, final String aPrefix) {
aBuilder.append(aPrefix);
aBuilder.append(getClass().getSimpleName());
aBuilder.append("(kind=").append(getKind()).append(")");
}
}
|
<filename>app/routes/posts.server.routes.js
'use strict';
module.exports = function(app) {
var users = require('../../app/controllers/users.server.controller');
var posts = require('../../app/controllers/posts.server.controller');
// Posts Routes
app.route('/posts')
.get(posts.list)
.post(users.requiresLogin, posts.create);
app.route('/posts/:postId')
.get(posts.read)
.put(users.requiresLogin, posts.hasAuthorization, posts.update)
.delete(users.requiresLogin, posts.hasAuthorization, posts.delete);
// Finish by binding the Post middleware
app.param('postId', posts.postByID);
};
|
void initTestCase()
{
// Implement the initialization of the test case environment here
// This could include setting up mock objects, initializing variables, opening files, etc.
// For example:
// MockObject::initialize();
// TestEnvironment::setup();
// FileHandler::openFile("testfile.txt");
} |
<gh_stars>1-10
import { Nullable } from "@babylonjs/core/types";
import { Matrix, Vector2 } from "@babylonjs/core/Maths/math.vector";
import { Color3 } from "@babylonjs/core/Maths/math.color";
import { IAnimatable } from '@babylonjs/core/Animations/animatable.interface';
import { SmartArray } from "@babylonjs/core/Misc/smartArray";
import { BaseTexture } from "@babylonjs/core/Materials/Textures/baseTexture";
import { RenderTargetTexture } from "@babylonjs/core/Materials/Textures/renderTargetTexture";
import { PushMaterial } from "@babylonjs/core/Materials/pushMaterial";
import { AbstractMesh } from "@babylonjs/core/Meshes/abstractMesh";
import { SubMesh } from "@babylonjs/core/Meshes/subMesh";
import { Mesh } from "@babylonjs/core/Meshes/mesh";
import { Scene } from "@babylonjs/core/scene";
import "./water.fragment";
import "./water.vertex";
export declare class WaterMaterial extends PushMaterial {
renderTargetSize: Vector2;
private _bumpTexture;
bumpTexture: BaseTexture;
diffuseColor: Color3;
specularColor: Color3;
specularPower: number;
private _disableLighting;
disableLighting: boolean;
private _maxSimultaneousLights;
maxSimultaneousLights: number;
/**
* Defines the wind force.
*/
windForce: number;
/**
* Defines the direction of the wind in the plane (X, Z).
*/
windDirection: Vector2;
/**
* Defines the height of the waves.
*/
waveHeight: number;
/**
* Defines the bump height related to the bump map.
*/
bumpHeight: number;
/**
* Defines wether or not: to add a smaller moving bump to less steady waves.
*/
private _bumpSuperimpose;
bumpSuperimpose: boolean;
/**
* Defines wether or not color refraction and reflection differently with .waterColor2 and .colorBlendFactor2. Non-linear (physically correct) fresnel.
*/
private _fresnelSeparate;
fresnelSeparate: boolean;
/**
* Defines wether or not bump Wwves modify the reflection.
*/
private _bumpAffectsReflection;
bumpAffectsReflection: boolean;
/**
* Defines the water color blended with the refraction (near).
*/
waterColor: Color3;
/**
* Defines the blend factor related to the water color.
*/
colorBlendFactor: number;
/**
* Defines the water color blended with the reflection (far).
*/
waterColor2: Color3;
/**
* Defines the blend factor related to the water color (reflection, far).
*/
colorBlendFactor2: number;
/**
* Defines the maximum length of a wave.
*/
waveLength: number;
/**
* Defines the waves speed.
*/
waveSpeed: number;
/**
* Defines the number of times waves are repeated. This is typically used to adjust waves count according to the ground's size where the material is applied on.
*/
waveCount: number;
/**
* Sets or gets whether or not automatic clipping should be enabled or not. Setting to true will save performances and
* will avoid calculating useless pixels in the pixel shader of the water material.
*/
disableClipPlane: boolean;
protected _renderTargets: SmartArray<RenderTargetTexture>;
private _mesh;
private _refractionRTT;
private _reflectionRTT;
private _reflectionTransform;
private _lastTime;
private _lastDeltaTime;
private _useLogarithmicDepth;
private _waitingRenderList;
private _imageProcessingConfiguration;
private _imageProcessingObserver;
/**
* Gets a boolean indicating that current material needs to register RTT
*/
get hasRenderTargetTextures(): boolean;
/**
* Constructor
*/
constructor(name: string, scene: Scene, renderTargetSize?: Vector2);
get useLogarithmicDepth(): boolean;
set useLogarithmicDepth(value: boolean);
get refractionTexture(): Nullable<RenderTargetTexture>;
get reflectionTexture(): Nullable<RenderTargetTexture>;
addToRenderList(node: any): void;
enableRenderTargets(enable: boolean): void;
getRenderList(): Nullable<AbstractMesh[]>;
get renderTargetsEnabled(): boolean;
needAlphaBlending(): boolean;
needAlphaTesting(): boolean;
getAlphaTestTexture(): Nullable<BaseTexture>;
isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean;
bindForSubMesh(world: Matrix, mesh: Mesh, subMesh: SubMesh): void;
private _createRenderTargets;
getAnimatables(): IAnimatable[];
getActiveTextures(): BaseTexture[];
hasTexture(texture: BaseTexture): boolean;
dispose(forceDisposeEffect?: boolean): void;
clone(name: string): WaterMaterial;
serialize(): any;
getClassName(): string;
static Parse(source: any, scene: Scene, rootUrl: string): WaterMaterial;
static CreateDefaultMesh(name: string, scene: Scene): Mesh;
}
|
#!/usr/bin/env bash
set -eu -o pipefail
# -e: exits if a command fails
# -u: errors if an variable is referenced before being set
# -o pipefail: causes a pipeline to produce a failure return code if any command errors
readonly PACKAGES=${@:?"No package names specified"}
readonly RULES_NODEJS_DIR=$(cd $(dirname "$0")/..; pwd)
readonly PACKAGES_DIR="${RULES_NODEJS_DIR}/packages"
echo_and_run() { echo "+ $@" ; "$@" ; }
for package in ${PACKAGES[@]} ; do
(
# Clean package
cd "${PACKAGES_DIR}/${package}"
printf "\n\nCleaning package ${package}\n"
${RULES_NODEJS_DIR}/scripts/unlink_deps.sh
echo_and_run bazel clean --expunge
echo_and_run rm -rf `find . -type d -name node_modules -prune`
)
done
|
/*:
* @plugindesc <RS_MultiTouch>
* @author biud436
* @help
* This plugin allows you to make sure that interact three finger or more in the touch screen-based devices.
* But Just remember that this plugin is not finished a development yet, so maybe it is a lot of bugs.
*
* Here is a list of available functions.
*
* RS.MultiTouch.isTriggered(index);
* RS.MultiTouch.isReleased(index);
* RS.MultiTouch.isPressed(index);
* RS.MultiTouch.isRepeated(index);
* RS.MultiTouch.isLongPressed(index);
*
* Do pass just one parameter named index.
* The index parameter is a number value for finger.
*
* Note that our Multi-Touch functions will be deprecated in the future.
* They will integrate to static touch input object.
*
* I recommend that don't try to this plugin.
* because this plugin is not finished a development yet!
*
*/
var Imported = Imported || {};
Imported.RS_MultiTouch = true;
var RS = RS || {};
RS.MultiTouch = RS.MultiTouch || {};
(function($) {
var parameters = $plugins.filter(function (i) {
return i.description.contains('<RS_MultiTouch>');
});
parameters = (parameters.length > 0) && parameters[0].parameters;
$.Params = RS.MultiTouch.Params || {};
$.Params.MAX_TOUCH = 4;
function duplicate(x) {
var _x = x;
return _x;
};
//=======================================================
// TouchInput (New)
//=======================================================
TouchInput._initTouch = function() {
this._eventTouches = [];
this._touches = [];
var max = RS.MultiTouch.Params.MAX_TOUCH;
for(var i = 0; i < max; i++) {
this._eventTouches[i] = {
x: 0,
y: 0,
phase: 'none',
touched: false,
screenPressed: false,
pressedTime: 0
};
}
this._touches = this._eventTouches.slice(0);
};
TouchInput._updateTouch = function() {
var max = RS.MultiTouch.Params.MAX_TOUCH;
for(var i = 0; i < max; i++) {
this._touches[i].x = duplicate(this._eventTouches[i].x);
this._touches[i].y = duplicate(this._eventTouches[i].y);
this._touches[i].phase = duplicate(this._eventTouches[i].phase);
this._touches[i].touched = duplicate(this._eventTouches[i].touched);
this._touches[i].screenPressed = duplicate(this._eventTouches[i].screenPressed);
this._touches[i].pressedTime = duplicate(this._eventTouches[i].pressedTime);
this._touches[i].started = duplicate(this._eventTouches[i].started);
this._touches[i].moved = duplicate(this._eventTouches[i].moved);
this._touches[i].clicked = duplicate(this._eventTouches[i].clicked);
if(this._eventTouches[i].screenPressed) {
this._eventTouches[i].pressedTime >= 0;
}
}
};
/**
* 업데이트와 렌더링 이후 터치 변수를 클리어 한다.
*/
TouchInput._clearTouch = function() {
var max = RS.MultiTouch.Params.MAX_TOUCH;
for(var i = 0; i < max; i++) {
this._eventTouches[i].x = 0;
this._eventTouches[i].y = 0;
this._eventTouches[i].phase = 'none';
this._eventTouches[i].touched = false;
}
};
var alias_SceneManager_tickEnd = SceneManager.tickEnd;
SceneManager.tickEnd = function() {
alias_SceneManager_tickEnd.call(this);
TouchInput._clearTouch();
};
TouchInput.getTouch = function(index) {
var max = RS.MultiTouch.Params.MAX_TOUCH;
if(index > max || index < 0) return;
return this._touches[index];
};
TouchInput.__touch__isRepeated = function(index) {
var max = RS.MultiTouch.Params.MAX_TOUCH;
if(index > max || index < 0) return false;
var touch = this._touches[index];
return (touch.screenPressed &&
(touch.touched ||
(touch.pressedTime >= this.keyRepeatWait &&
touch.pressedTime % this.keyRepeatInterval === 0)));
};
TouchInput.__touch__isLongPressed = function(index) {
var max = RS.MultiTouch.Params.MAX_TOUCH;
if(index > max || index < 0) return false;
var touch = this._touches[index];
return touch.screenPressed && touch.pressedTime >= this.keyRepeatWait;
};
TouchInput.__touch__setTouch = function(index, x, y, phase, touched) {
var max = RS.MultiTouch.Params.MAX_TOUCH;
if(index > max || index < 0) return;
this._eventTouches[index].x = x;
this._eventTouches[index].y = y;
this._eventTouches[index].phase = phase;
this._eventTouches[index].touched = touched;
};
TouchInput.__touch__touched = function(index, x, y) {
this.__touch__setTouch(index, x, y, 'touched', true);
this._eventTouches[index].screenPressed = true;
this._eventTouches[index].started = true;
};
TouchInput.__touch__moved = function(index, x, y) {
this.__touch__setTouch(index, x, y, 'moved', true);
if(this._eventTouches[index].started) {
this._eventTouches[index].moved = true;
}
}
TouchInput.__touch__released = function(index, x, y) {
this.__touch__setTouch(index, x, y, 'released', false);
this._eventTouches[index].screenPressed = false;
if(this._eventTouches[index].started && !this._eventTouches[index].moved) {
this._eventTouches[index].started = false;
this._eventTouches[index].moved = false;
}
};
//=======================================================
// TouchInput (Override)
//=======================================================
var _alias_TouchInput_clear = TouchInput.clear;
TouchInput.clear = function() {
_alias_TouchInput_clear.call(this);
this._initTouch();
};
var _alias_TouchInput_update = TouchInput.update;
TouchInput.update = function() {
_alias_TouchInput_update.call(this);
this._updateTouch();
};
TouchInput._onTouchStart = function(event) {
var isValid = false;
for (var i = 0; i < event.changedTouches.length; i++) {
var touch = event.changedTouches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
if (Graphics.isInsideCanvas(x, y)) {
this._screenPressed = true;
this._pressedTime = 0;
if (event.touches.length >= 2) {
this._onCancel(x, y);
} else {
this._onTrigger(x, y);
}
isValid = true;
}
}
if (window.cordova || window.navigator.standalone) {
isValid = true;
}
for (var i = 0; i < event.touches.length; i++) {
var touch = event.touches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
if (Graphics.isInsideCanvas(x, y)) {
this.__touch__touched(touch.identifier, x, y);
isValid = true;
}
}
if(isValid) {
event.preventDefault();
}
};
TouchInput._onTouchMove = function(event) {
var isValid = false;
for (var i = 0; i < event.changedTouches.length; i++) {
var touch = event.changedTouches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
this._onMove(x, y);
isValid = true;
}
for (var i = 0; i < event.touches.length; i++) {
var touch = event.touches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
if (Graphics.isInsideCanvas(x, y)) {
this.__touch__moved(touch.identifier, x, y);
isValid = true;
}
}
if(isValid) {
event.preventDefault();
}
};
TouchInput._onTouchEnd = function(event) {
for (var i = 0; i < event.changedTouches.length; i++) {
var touch = event.changedTouches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
this._screenPressed = false;
this._onRelease(x, y);
}
for (var i = 0; i < event.touches.length; i++) {
var touch = event.touches[i];
var x = Graphics.pageToCanvasX(touch.pageX);
var y = Graphics.pageToCanvasY(touch.pageY);
if (Graphics.isInsideCanvas(x, y)) {
this.__touch__released(touch.identifier, x, y);
}
}
};
//=======================================================
// RS.MultiTouch
//=======================================================
$.isTriggered = function(index) {
var touch = TouchInput.getTouch(index);
if(index === 0) {
return this._triggered;
} else {
return touch.touched || touch.started;
}
};
$.isReleased = function(index) {
var touch = TouchInput.getTouch(index);
return touch.phase === "released";
};
$.isPressed = function(index) {
var touch = TouchInput.getTouch(index);
return touch.touched && touch.screenPressed;
};
$.isRepeated = function(index) {
return TouchInput.__touch__isRepeated(index);
};
$.isLongPressed = function(index) {
return TouchInput.__touch__isLongPressed(index);
};
})(RS.MultiTouch); |
# optimized algorithm using dynamic programming
def fibonacci(n):
# intialize array for memoization
memo = [0] * (n+1)
# base cases
memo[0] = 0
memo[1] = 1
# fill the memo array
for i in range(2, n+1):
memo[i] = memo[i-1] + memo[i-2]
# return the last element of the memo array
return memo[n]
# compute the nth Fibonacci number
num = fibonacci(n)
print("The", n, "th Fibonacci number is", num) |
package flect
import "unicode"
// Capitalize will cap the first letter of string
// user = User
// <NAME> = <NAME>
// widget_id = Widget_id
func Capitalize(s string) string {
return New(s).Capitalize().String()
}
// Capitalize will cap the first letter of string
// user = User
// <NAME> = <NAME>
// widget_id = Widget_id
func (i Ident) Capitalize() Ident {
if len(i.Parts) == 0 {
return New("")
}
runes := []rune(i.Original)
runes[0] = unicode.ToTitle(runes[0])
return New(string(runes))
}
|
import React from 'react';
import { useDispatch } from 'react-redux';
import { deleteContact } from '../redux/contact/contact';
const ContactItem = (props: ContactItemProps) => {
const dispatch = useDispatch();
const deleteFromContact = (contact: Contact) => {
dispatch(deleteContact(contact));
};
return (
<li className="list__item" id={props.contact.id}>
<div className="list__info">{props.contact.name}</div>
<div className="list__info">{props.contact.number}</div>
<button
className="list__btn"
type="button"
onClick={() => deleteFromContact(props.contact)}
>
✕
</button>
</li>
);
};
export default ContactItem;
|
import simple_test
def run_test():
test_name = "test29"
command_line_args = ["-h"]
test_output = simple_test.test(test_name, command_line_args)
print(test_output)
run_test() |
import { newIndex } from './constants.js';
//REGION LINE GRAPH
export default class LineGraph {
constructor(regionCount) {
if(regionCount[0].name === "Hokkaido") {
const array_1 = regionCount[0].dailyConfirmedCount;
const straw5 = array_1.map(i => i / 2);
this._reg = straw5;
} else {
this._reg = regionCount[0].dailyConfirmedCount;
}
}
plot() {
const ctx = document.getElementById('graph_active_dailyR').getContext('2d');
const gradientStroke = ctx.createLinearGradient(0,0,400,0);
// gradientStroke.addColorStop(0, 'rgba(255,0,0,0.1)');
// gradientStroke.addColorStop(1, 'rgba(255,0,0,0.5)');
gradientStroke.addColorStop(0, 'rgba(153,153,153,0.1)');
gradientStroke.addColorStop(1, 'rgba(153,153,153,0.9)');
const gradientFill = ctx.createLinearGradient(0,175,0,0);
// gradientFill.addColorStop(0, 'rgba(255,0,0,0.0)');
// gradientFill.addColorStop(1, 'rgba(255,0,0,0.5)');
gradientFill.addColorStop(0, 'rgba(153,153,153,0.1)');
gradientFill.addColorStop(1, 'rgba(153,153,153,0.5)');
const myLineChart = new Chart(ctx, {
type: 'line',
data: {
labels: newIndex,
datasets: [
{
label: 'Confirmed',
// backgroundColor: 'rgba(50, 50, 50, 0.99)',
backgroundColor: gradientFill,
// borderColor: 'rgb(100, 100, 100)',
// borderColor: 'rgba(255,0,0,0.9)',
borderColor: gradientStroke,
borderWidth: 1,
pointBorderWidth: 0,
pointRadius: 0,
lineTension: 0,
// pointBackgroundColor: 'rgb(100, 100, 100)',
// pointBackgroundColor: 'rgba(255,0,0,0.9)',
data: this._reg,
},
// {
// label: 'Deaths',
// // backgroundColor: 'rgba(153, 153, 153, 0.2)',
// borderColor: 'rgb(107, 107, 107)',
// borderWidth: 0.1,
// pointRadius: 1,
// pointBorderWidth: 0,
// lineTension: 0,
// z: 10,
// pointBackgroundColor: 'rgb(107, 107, 107)',
// data: kiwidead
// },
],
},
options: {
legend: {
display: false
},
// title: {
// display: true,
// text: 'KANTO'
// },
hover: {
mode: null
},
layout: {
padding: {
top: 20,
left: 0,
right: 0
},
},
scales: {
yAxes: [{
display: true,
position: 'right',
ticks: {
fontColor: 'rgba(255,255,255,0.75)',
beginAtZero: true,
// max: 1500,
// min: -100,
maxTicksLimit: 8,
// stepSize: 100,
padding: 10
},
gridLines: {
// color: 'rgb(28, 28, 28)',
color: 'rgba(238, 238, 238, 0.1)',
// zeroLineColor: 'rgb(28, 28, 28)',
zeroLineColor: 'rgba(238, 238, 238, 0.1)',
},
scaleLabel: {
display: true,
labelString: 'Daily Cases' ,
fontColor: 'rgba(255,255,255,0.75)',
},
}],
xAxes: [{
display: true,
ticks: {
fontColor: 'rgba(255,255,255,0.75)',
padding: 10,
maxTicksLimit: 15
},
time: {
displayFormats: {
quarter: 'MMM D YYYY'
}
},
gridLines: {
// color: 'rgb(28, 28, 28)',
color: 'rgba(238, 238, 238, 0.1)',
// zeroLineColor: 'rgb(28, 28, 28)',
zeroLineColor: 'rgba(238, 238, 238, 0.1)',
},
}]
},
animation: {
animateScale: true,
duration: 2000
},
responsive: true,
maintainAspectRatio: true,
tooltips: {
intersect: false,
mode: 'index',
backgroundColor: 'rgba(58, 58, 58, 0.5)',
borderColor: 'rgba(250, 250, 250, 0.9)',
borderWidth: 1,
titleAlign: 'center',
titleFontFamily: "roboto",
titleFontColor: 'rgb(245, 245, 245)',
titleFontSize: 14,
bodyFontFamily: "roboto",
bodyFontColor: 'rgb(239, 239, 239)',
bodyFontSize: 14,
bodyAlign: 'center',
caretSize: 8,
displayColors: false,
}
}
});
}
}
|
/*
* @Descripttion: 地图
* @version: 1.0.0
* @Author: LSC
* @Date: 2021-06-10 10:06:31
* @LastEditors: LSC
* @LastEditTime: 2021-06-10 10:13:53
*/
import view from '@/components/view.vue'
export default {
title: '地图分布',
path: 'mapDistribution',
name: 'mapDistribution',
component: view,
children: [
{
title: '设备分布',
path: '/mapDistribution/deviceDist',
name: 'deviceDist',
component: (resolve) =>
require(['@/views/smartElectricity/mapDistribution/deviceDist.vue'], resolve)
},
{
title: '网关分布',
path: '/mapDistribution/projectDist',
name: 'projectDist',
component: (resolve) =>
require(['@/views/smartElectricity/mapDistribution/projectDist.vue'], resolve)
},
{
title: '项目分布',
path: '/mapDistribution/gatewayDist',
name: 'gatewayDist',
component: (resolve) =>
require(['@/views/smartElectricity/mapDistribution/gatewayDist.vue'], resolve)
}
]
}
|
#!/bin/bash
declare -r JBOL='/usr/local/share/jbol'
for t in *.json
do
#echo $t 1>&2
jq -L $JBOL \
--arg TEST $t \
--from-file run.jq \
--raw-output \
$t
echo
done | grep .
# vim:syntax=sh:ai:sw=4:ts=4:et
|
<reponame>rafaeltorquato/javaee7-template
//package study.client.jaxws;
//
//public class PersonJaxWsClient {
//
// /**
// * @param args the command line arguments
// */
// public static void main(String[] args) {
// listAllPersons();
// }
//
// private static void listAllPersons() {
// //TODO Fix, create a full javaee client
// final study.client.jaxws.PersonJaxWsEndpoint port = new study.client.jaxws.PersonJaxWsEndpointService().getPersonJaxWsEndpointPort();
// System.out.println(port.list());
// }
//
//}
|
import Phaser from 'phaser'
export default class CallbackParameters extends Phaser.Scene
{
preload()
{
this.load.atlas('gems','/assets/tests/columns/gems.png','/assets/tests/columns/gems.json')
}
create()
{
const marker = this.add.sprite(400, 400, 'gems', 'ruby_0000')
const animConfig: Phaser.Types.Animations.Animation = {
key: 'diamond',
frames: this.anims.generateFrameNames('gems', { prefix: 'diamond_', end: 15, zeroPad: 4 }),
repeat: 6
}
const anim = this.anims.create(animConfig) as Phaser.Animations.Animation
anim.on(Phaser.Animations.Events.ANIMATION_REPEAT, (animation: Phaser.Animations.Animation, frame: Phaser.Animations.AnimationFrame) => {
marker.y -= 32
})
// Play the animation.
// Each time it repeats it fires the ANIMATION_REPEAT event
this.add.sprite(200, 400, 'gems').play('diamond')
}
}
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 - 2019 Karlsruhe Institute of Technology - Steinbuch Centre for Computing
# This code is distributed under the MIT License
# Please, see the LICENSE file
#
# Created on Thu Feb 28 09:18:17 2019
# @author: valentin.kozlov
#
# 1. (done) Set number of requests
# 2. (done) Read how many images are in the 'testdata' directory -> file_array
# 3. (done) Append file_array, such that the size of array > number of requests
# 4. (done) submit the number of requests, output is rerdirected in smthg
# DateNow-predict_multi-number.log
# 5. (ToDo) In a while loop open files, check content (size?) until all is done or timeout(?)
# 6. (ToDo) Report number of successful requests
### 1. Default parameters
NumRequests=3
RemoteURL="http://147.213.75.181:10017/"
Model="Dogs_Breed"
TestDir=$PWD/testdata
##### USAGEMESSAGE #####
USAGEMESSAGE="Usage: $0 <options> ; where <options> are: \n
--num_requests=number \t \t Number of requests to call \n
--remote_url=url \t \t http://WebAddress:Port, e.g. http://147.213.75.181:10017 \n
--model=string \t \t Name of the deployed user's Model \n
--test_dir \t \t \t Directory with test data (default 'testdata') \n"
##### PARSE SCRIPT FLAGS #####
arr=("$@")
if [ $# -eq 0 ]; then
# use default config (0)
echo "Using default values ..."
elif [ $1 == "-h" ] || [ $1 == "--help" ]; then
# print usagemessage
shopt -s xpg_echo
echo $USAGEMESSAGE
exit 1
elif [ $# -ge 1 ] && [ $# -le 4 ]; then
for i in "${arr[@]}"; do
[[ $i = *"--num_requests"* ]] && NumRequests=${i#*=}
[[ $i = *"--remote_url"* ]] && RemoteURL=${i#*=}
[[ $i = *"--model"* ]] && Model=${i#*=}
[[ $i = *"--test_dir"* ]] && TestDir=${i#*=}
done
else
# Too many arguments were given (>1)
echo "ERROR! Too many arguments provided!"
shopt -s xpg_echo
echo $USAGEMESSAGE
exit 2
fi
RemoteURL=${RemoteURL%/}
### 2. Check how many files in TestDir
# looking for only .jpeg, .jpg
FileList=($(ls -1 $TestDir | egrep "(.jpeg|.jpg)"))
# Make new array of files
FileListLong=("${FileList[@]}")
### 3. Append FileListLong until its size > NumRequests
while [ ${#FileListLong[@]} -lt $NumRequests ]
do
FileListLong=( ${FileListLong[*]} ${FileList[*]})
done
echo ""
echo ${FileListLong[*]}
echo ${#FileListLong[@]}
### 4. Submit the number of requests (NumRequests)
DateNow=$(date +%y%m%d_%H%M%S)
counter=0
for j in $(seq 1 $NumRequests)
do
test_file=${FileListLong[counter]}
log_file="$DateNow-predict_multi-${counter}.log"
echo "curl -X POST '${RemoteURL}/models/${Model}/predict' \
-H 'accept: application/json' -H 'Content-Type: multipart/form-data' \
-F 'data=@${test_file};type=image/jpeg'" > $log_file
echo "" >> $log_file
curl -X POST "${RemoteURL}/models/${Model}/predict" \
-H "accept: application/json" -H "Content-Type: multipart/form-data" \
-F "data=@${TestDir}/${test_file};type=image/jpeg" >> $log_file &
# wait: waits for the upload of an image to finish, then goes to next call
# without it first call is fine, others "Internal Server Error"
wait
let counter=counter+1
done |
cleos push action blockcoined close '{"host":"eoszhiminzou", "challenger":"bob"}' -p eoszhiminzou@active
|
require 'rails_helper'
RSpec.describe ProviderInterface::ConditionsComponent do
describe 'rendered component' do
let(:conditions) { ['Fitness to teach check'] }
it 'renders the conditions' do
application_with_conditions_met = build(:application_choice, status: 'recruited', offer: { 'conditions' => conditions })
result = render_inline(described_class.new(application_choice: application_with_conditions_met))
expect(result.to_html).to include('Fitness to teach check')
end
it 'indicates whether conditions are met' do
application_with_conditions_met = build(:application_choice, status: 'recruited', offer: { 'conditions' => conditions })
result = render_inline(described_class.new(application_choice: application_with_conditions_met))
expect(result.css('.app-tag').text).to eq('Met')
end
it 'indicates whether conditions are pending' do
application_with_pending_conditions = build(:application_choice, status: 'awaiting_provider_decision', offer: { 'conditions' => conditions })
result = render_inline(described_class.new(application_choice: application_with_pending_conditions))
expect(result.css('.app-tag').text).to eq('Pending')
end
it 'indicates whether conditions are met for deferred offers' do
application_with_conditions_met = build(:application_choice,
status: 'offer_deferred',
status_before_deferral: 'recruited',
offer: { 'conditions' => conditions })
result = render_inline(described_class.new(application_choice: application_with_conditions_met))
expect(result.css('.app-tag').text).to eq('Met')
end
it 'indicates whether conditions are pending for deferred offers' do
application_with_pending_conditions = build(:application_choice,
status: 'offer_deferred',
status_before_deferral: 'pending_conditions',
offer: { 'conditions' => conditions })
result = render_inline(described_class.new(application_choice: application_with_pending_conditions))
expect(result.css('.app-tag').text).to eq('Pending')
end
end
end
|
#!/bin/bash
# Don't show items on Desktop
defaults write com.apple.finder CreateDesktop -bool false
# Open a new Finder window in home directory
defaults write com.apple.finder NewWindowTarget -string "PfHm"
defaults write com.apple.finder NewWindowTargetPath -string "file://${HOME}/"
# Show all files
defaults write com.apple.finder AppleShowAllFiles YES
# Show Path bar
defaults write com.apple.finder ShowPathbar -bool true
# Show Status Bar
defaults write com.apple.finder ShowStatusBar -bool true
# Show Tab View
defaults write com.apple.finder ShowTabView -bool true
# Show all extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Open save panel with expanded
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
# Kill all
killall Finder
killall -1 SystemUIServer
echo "Please reboot"
|
<reponame>Josephat-n/worthit
from django.test import TestCase
from .models import Profile, Project
from django.contrib.auth.models import User
# Create your tests here.
class ProfileTestClass(TestCase):
# Setup Method
def setUp(self):
# self.name=User(id = 1)
self.profile_one=Profile(bio= '<PASSWORD>', name_id=1, contact = '0731-633511')
# Testing Instance
def test_instance(self):
self.assertTrue(isinstance(self.profile_one,Profile))
# Testing Save Method
def test_save_method(self):
self.profile_one.save_profile()
prof = Profile.objects.all()
self.assertTrue(len(prof) > 0)
# Teardown Method
def tearDown(self):
Profile.objects.all().delete()
#Delete Method
def test_delete(self):
self.profile_one.save_profile()
self.profile_one.delete_profile()
prof = Profile.objects.all()
self.assertTrue(len(prof)<1)
class ProjectTestClass(TestCase):
# Setup Method
def setUp(self):
self.profile=Profile(id = 1)
self.project_one=Project(title='git-search',description='Consumes the github API and allows serches in github',live_link= 'httpsa://gitthub.com/Josephat-n/gitSearch')
# Testing Instance
def test_instance(self):
self.assertTrue(isinstance(self.project_one,Project))
# Testing Save Method
def test_save_method(self):
self.project_one.save_project()
projs = Project.objects.all()
self.assertTrue(len(projs) > 0)
# Teardown Method
def tearDown(self):
Project.objects.all().delete()
#Delete Method
def test_delete(self):
self.project_one.save_project()
self.project_one.delete_project()
projs = Project.objects.all()
self.assertTrue(len(projs)<1) |
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <netdb.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <sys/time.h>
#include <stdbool.h>
#include <time.h>
#define SERVER_PORT 1500 //porta do servidor(padronizada)
#define MAX_MSG 1024 //tamanho max do buffer
#define TEMPO_PADRAO 1 //tempo padrão do temporizador
#define IP_LOCAL "192.168.1.11" //ipv4 do cliente(PODE SER ALTERADO)
#define LIMITE_DADOS 50 //índice que possui o início dos dados
//variáveis globais
int cliente_socket, recebido, tam_server;
char mensagem[MAX_MSG+1]="";
/* protótipos das funções */
void inicializar_sockaddr_in(struct sockaddr_in *estrutura, int port);
void inicializar_host(struct sockaddr_in *estrutura, char* ip, int port);
int digitos_numero(int num);
void temporizador_de_dados(float tempo_aceitavel);
int retorne_quantidade(char* men, int num);
bool checksum_correto(char* checksum, char* mensagem);
/* inicio main */
int main(int argc, char *argv[]) {
int bind_cliente, i=2;
struct sockaddr_in cliente, server, cliente_servidor;
char ip_de_envio[15]="", porta_de_envio[5]="", nome_arquivo[30]="", extensao[5]="";
int porta_de_envio_int;
/* criação do socket do cliente */
cliente_socket = socket(AF_INET,SOCK_DGRAM,0);
if(cliente_socket<0) {
printf("%s: Problema no socket \n",argv[0]);
exit(1);
}
if(argc!=3) {
//./nomeprograma ip_server nome_arquivo
printf("Parametros: %s <ip_server> <arquivo.extensao>\n", argv[0]);
exit(1);
}
/* inicializa o host do server e do cliente */
printf("%s: Configurando servidor chefe...\n", argv[0]);
inicializar_host(&server, argv[1], SERVER_PORT);
inicializar_sockaddr_in(&cliente, SERVER_PORT);
/* criação do bind do cliente */
bind_cliente = bind(cliente_socket, (struct sockaddr *) &cliente, sizeof(cliente));
if(bind_cliente<0) {
printf("%s: Problema no bind\n", argv[0]);
exit(1);
}
strcpy(nome_arquivo, argv[2]);
printf("NOME ARQUIVO: %s\n", nome_arquivo);
int passou=0, j=0;
/* encontra a extensão do arquivo */
for(int i=0; i<strlen(nome_arquivo); i++){
if(passou==1){
extensao[j]=nome_arquivo[i];
j++;
}
if(nome_arquivo[i]=='.'){
passou=1;
}
}
printf("EXTENSAO: %s\n",extensao);
/* requisita download de arquivos */
for(i=2;i<argc;i++) {
//envia requisição ao servidor
sprintf(mensagem, "%s %s", argv[i], IP_LOCAL);
while((bind_cliente = sendto(cliente_socket, mensagem, sizeof(mensagem)+1, 0,
(struct sockaddr *) &server, sizeof(server)))<0){
temporizador_de_dados(TEMPO_PADRAO);
}
printf("Para Server: %s\n", mensagem);
int tam_cliente = sizeof(cliente);
tam_server = sizeof(server);
//recebe resposta do servidor
while((recebido = recvfrom(cliente_socket, mensagem, MAX_MSG, 0, (struct sockaddr *) &cliente, &tam_cliente))<0){
temporizador_de_dados(TEMPO_PADRAO);
}
printf("Server: %s\n", mensagem);
}
//se a resposta do servidor foi negatica: não existe ou já foi copiado
if(strncmp(mensagem, "NGC",3)==0){
printf("Arquivo nao existe ou ja copiado!\n");
exit(1);
}
int tam_ip=retorne_quantidade(mensagem,2);
int tam_porta=retorne_quantidade(mensagem,3);
//armazena ip e porta de envio que estão no buffer "mensagem"
memcpy(ip_de_envio,&mensagem[4],tam_ip);
memcpy(porta_de_envio,&mensagem[4+tam_ip+1],tam_porta);
porta_de_envio_int = atoi(porta_de_envio);
//configura o host do cliente2(quem irá enviar o arquivo)
printf("%s: Configurando servidor do arquivo...\n", argv[0]);
inicializar_host(&cliente_servidor,ip_de_envio,porta_de_envio_int);
int tam_cliente = sizeof(cliente_servidor);
//abre o arquivo para escrita binária
FILE* arquivo = fopen(argv[2], "wb");
char cabecalho_recebido[MAX_MSG]="", cabecalho_envio[MAX_MSG]="", dados[MAX_MSG-(LIMITE_DADOS-1)]="";
char checksum[11]="";
int num_pacote = 0, num_pacote_recebido=0, termino_recebido=0;
/* cria o arquivo enquanto termino !=1 */
while(termino_recebido!=1){
//reseta/zera vetores
memset(mensagem,0x0,MAX_MSG);
memset(cabecalho_recebido,0x0, MAX_MSG);
memset(dados,0x0, MAX_MSG-(LIMITE_DADOS-1));
memset(checksum, 0x0, sizeof(checksum));
//recebe dados do cliente2
while((recebido = recvfrom(cliente_socket, mensagem, MAX_MSG+1, 0, (struct sockaddr *) &cliente_servidor, &tam_cliente))<0){
temporizador_de_dados(TEMPO_PADRAO);
}
//armazena num do pacote
memcpy(cabecalho_recebido, &mensagem[4], sizeof(mensagem)-4);
num_pacote_recebido = atoi(cabecalho_recebido);
//armazena num do término
memcpy(cabecalho_recebido, &cabecalho_recebido[digitos_numero(num_pacote_recebido)+1], sizeof(cabecalho_recebido)-(digitos_numero(num_pacote_recebido)+1));
termino_recebido = atoi(cabecalho_recebido);
//armazena checksum
memcpy(checksum, &mensagem[4+retorne_quantidade(mensagem,2)+1+1+1], sizeof(checksum)-1);
//se o termino == 1, então sai do loop
if(termino_recebido==1){
break;
}
//armazena os dados arquivo que estão no buffer
memcpy(dados, &mensagem[LIMITE_DADOS], sizeof(mensagem)-(LIMITE_DADOS-1));
//num do pacote e checksum estiverem corretos
if(num_pacote_recebido == num_pacote && checksum_correto(checksum,mensagem)==1){
//envia confirmação para o cliente2
memset(cabecalho_envio,0x0,MAX_MSG);
sprintf(cabecalho_envio,"ACK %d",num_pacote_recebido);
while((bind_cliente = sendto(cliente_socket, cabecalho_envio, sizeof(cabecalho_envio), 0,
(struct sockaddr *) &cliente_servidor, sizeof(cliente_servidor)))<0){
temporizador_de_dados(TEMPO_PADRAO);
}
printf("Para Cliente2: %s\n", cabecalho_envio);
//constroi os dados do pacote
if(strcmp(extensao,"txt")!=0){
fwrite(&dados, 1, sizeof(dados), arquivo);
}else{
fwrite(&dados, 1, strlen(dados), arquivo);
}
num_pacote++;
}//num_pacote diferente ou checksum incorreto
else{
//envia para o cliente2 erro no pacote
sprintf(cabecalho_envio,"NGC %d", num_pacote);
while((bind_cliente = sendto(cliente_socket, cabecalho_envio, sizeof(cabecalho_envio), 0,
(struct sockaddr *) &cliente_servidor, sizeof(cliente_servidor)))<0){
temporizador_de_dados(TEMPO_PADRAO);
}
}
}
fclose(arquivo); //fecha o arquivo
printf("\n%s: Arquivo recebido com sucesso!\n", argv[0]);
return 0;
}
/* fim main */
/* função para inicializar/configurar a estrutura sockaddr */
void inicializar_sockaddr_in(struct sockaddr_in *estrutura, int port){
estrutura->sin_family = AF_INET;
estrutura->sin_addr.s_addr = htonl(INADDR_ANY);
estrutura->sin_port = htons(port);
}
/* função para inicializar/configurar a estrutura sockaddr pelo ip */
void inicializar_host(struct sockaddr_in *estrutura, char* ip, int port){
struct hostent *host;
host = gethostbyname(ip);
//caso o host não seja encontrado: finaliza o programa
if(host==NULL) {
printf("-> Host desconhecido '%s' \n", ip);
exit(1);
}
estrutura->sin_family = host->h_addrtype;
memcpy((char *) &estrutura->sin_addr.s_addr,
host->h_addr_list[0], host->h_length);
estrutura->sin_port = htons(port);
printf("-> Configurando host '%s' (IP : %s) \n", host->h_name,
inet_ntoa(*(struct in_addr *)host->h_addr_list[0]));
}
/* função que retorna a quantidade de dígitos de um inteiro */
int digitos_numero(int num){
int contador = 0;
//faça enquanto num!=0
do{
//acrescenta 1 ao contador e divide o número por 10
contador++;
num/=10;
}while(num!=0);
return contador;
}
/* função de delay que é o temporizador */
void temporizador_de_dados(float tempo_aceitavel){
float t1=0, t2=0;
t1 = (float)clock()/(float)CLOCKS_PER_SEC;
//espera até se passar o tempo aceitável(em segundos)
while ( (t2-t1) < (tempo_aceitavel) ) {
t2 = (float)clock()/(float)CLOCKS_PER_SEC;
}
return;
}
/* essa função verifica algum(a) parametro/palavra no buffer ''mensagem'' e retorna o tamanho */
int retorne_quantidade(char* mensagem, int num){
//contador
int quantidade=0;
//enquanto o tamanho da mensagem > i
for(int i=0; i<strlen(mensagem);i++){
//se encontrar algum espaço
if(mensagem[i]==' '){
num--;
//se num==0, então sai do loop
if(num==0){
break;
}
//caso não seja o parametro certo, zera o contador
quantidade=0;
}//senão for espaço, então soma 1 no contador
else if(mensagem[i]!=' ')
quantidade++;
}
return quantidade;
}
/* confere se o checksum recebido está correto */
bool checksum_correto(char* checksum, char* mensagem){
int k=0, j=0;
//cria um checksum_confere
char checksum_confere[11]="";
//zera checksum_confere
for(k=0;k<10; k++){
checksum_confere[k]='0';
}
//gera um checksum condizente com os dados recebidos
for(k=LIMITE_DADOS;k<MAX_MSG;k++){
if(j>=10){
j=0;
}
//soma alguns bits às posições do checksum
checksum_confere[j]+=mensagem[k];
j++;
}
//confere se o checksum encontrado é igual ao recebido
if(strcmp(checksum,checksum_confere)==0){
printf("--- Checksum correto\n");
return true;
}
printf("--- Checksum ERROR\n");
return false;
}
|
package net.anatolich.subscriptions.security.domain.model;
import javax.persistence.Embeddable;
import lombok.AccessLevel;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
/**
* Value class to hold an identifier of the current user.
*/
@Embeddable
@NoArgsConstructor(access = AccessLevel.PROTECTED)
@Getter
@EqualsAndHashCode
@ToString
public class UserId {
private String username;
private UserId(String username) {
setUsername(username);
}
public static UserId of(String username) {
return new UserId(username);
}
private void setUsername(String username) {
if (username == null) {
throw new IllegalArgumentException("username must not be null");
}
if (username.isBlank()) {
throw new IllegalArgumentException("username must not be empty");
}
if (this.username != null) {
throw new IllegalStateException("username must not be changed");
}
this.username = username.toLowerCase();
}
}
|
from typing import List, Tuple, Dict
def find_highest_versions(file_list: List[Tuple[str, int]]) -> Dict[str, int]:
highest_versions = {}
for file, version in file_list:
if file in highest_versions:
highest_versions[file] = max(highest_versions[file], version)
else:
highest_versions[file] = version
return highest_versions |
#!/bin/bash
while [[ $# > 1 ]]
do
key="$1"
case $key in
-c|--config)
CONFIGFILE="$2"
shift
;;
*)
# unknown option
;;
esac
shift
done
source ${CONFIGFILE}
echo "Setting up swap space..."
fallocate -l 8G /swapfile
chmod 600 /swapfile
mkswap /swapfile
swapon /swapfile
echo "Setting up the Compute Node..."
echo "Setting up /etc/hosts..."
cat /vagrant/files/hosts >> /etc/hosts
echo "Setting up Openstack Repository for version ${VERSION}..."
if [ ${VERSION} == "kilo" ]; then
apt-get -qq -y --force-yes install ubuntu-cloud-keyring
echo "deb http://ubuntu-cloud.archive.canonical.com/ubuntu" "trusty-updates/kilo main" > /etc/apt/sources.list.d/cloudarchive-kilo.list
elif [ ${VERSION} == "liberty" ]; then
apt-get -qq -y --force-yes install software-properties-common
add-apt-repository -y cloud-archive:liberty
fi
apt-get -qq update && apt-get -qq -y --force-yes dist-upgrade
# Install ntp
echo "Installing and configuring NTP..."
apt-get -qq -y --force-yes install ntp
sed -e "s/CTRLIP/${CONTROLLERPRIVATEIP}/" /vagrant/files/other_ntp.conf.orig > /etc/ntp.conf
echo "Configuring sdb..."
apt-get -qq -y --force-yes install parted lvm2 qemu
parted /dev/sdb mklabel msdos
parted /dev/sdb mkpart primary 512 100%
pvcreate /dev/sdb1
vgcreate cinder-volumes /dev/sdb1
#cp /vagrant/files/cinder/lvm.conf /etc/lvm/lvm.conf
echo "Installing and Configuring Cinder..."
apt-get -qq -y --force-yes install cinder-volume python-mysqldb
sed -e "s/CTRLIP/${CONTROLLERPRIVATEIP}/" -e "s/RABBIT_PASS/${RABBITMQPWD}/" -e "s/CINDER_PASS/${CINDER_PASS}/" \
-e "s/BLOCKPRIVIP/${PRIVATEIP}/" /vagrant/files/cinder/cinder.conf.block.orig > /etc/cinder/cinder.conf
service tgt restart
service cinder-volume restart
rm -f /var/lib/cinder/cinder.sqlite
|
package org.multibit.hd.ui.models;
/**
* <p>Interface to provide the following to UI:</p>
* <ul>
* <li>Identification of generic Model</li>
* </ul>
*
* @since 0.0.1
*
*/
public interface Model<M> {
/**
* @return The value of the model (usually user data)
*/
M getValue();
/**
* @param value The value of the model
*/
void setValue(M value);
}
|
<filename>src/pages/Experience7/index.js<gh_stars>0
/**
* @module Experiences/Experience0
*/
import React, { Profiler } from 'react'
import { Observable, Subject } from 'rxjs'
const onRender = (id, phase, actualDuration) => {
console.log(id, phase, actualDuration)
}
const subject = new Subject()
subject.subscribe({
next: (v) => console.log(`observerA: ${v}`)
})
subject.subscribe({
next: (v) => console.log(`observerB: ${v}`)
})
const observable = new Observable((subscriber) => {
subscriber.next(1)
setTimeout(() => {
subscriber.next(2)
subscriber.complete()
}, 1000)
})
/**
* @function Experience
* @return {Object} Return the dom of the Experience
*/
const Experience = () => {
const handleClick = () => {
observable.subscribe(subject)
}
return (
<Profiler id="Experience" onRender={onRender}>
<button onClick={handleClick}>
Call the subject next and then look at the console
</button>
</Profiler>
)
}
export default Experience
|
#!/bin/bash
# This script will generate dummy PNG icons from a set of SVG files.
# Only run this once after cloning.
cd -P "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
# check utils
for i in inkscape convert; do
type $i &>/dev/null
[ $? -ne 0 ] && echo "ERROR: \`$i\` not found." >&2 && exit 1
done
_confirm_overwrite()
{
echo "You already have './css/$i' in your system."
echo -ne "Proceed anyway? [Y/n]: "
read YN
[ "$YN" != Y ] && exit 0
}
for i in arrows.png favicon.png icons.png navicon.png; do
[ -f ./css/$i ] && _confirm_overwrite $i
done
# icons
for i in ./svg/ico-*.svg; do
j=${i%%svg}png
inkscape --export-png=$j --export-dpi=200 --export-background-opacity=0 \
--without-gui -w 24 -h 24 $i
done
convert ./svg/ico-*.png -append ./css/icons.png
# favicon, taken from about icon
mv ./svg/ico-01-about.png ./css/favicon.png
# navicon
inkscape --export-png=./css/navicon.png --export-dpi=200 --export-background-opacity=0 \
--without-gui -w 24 -h 24 ./svg/navicon.svg
# arrows
for i in ./svg/arr-*.svg; do
j=${i%%svg}png
inkscape --export-png=$j --export-dpi=200 --export-background-opacity=0 \
--without-gui -w 24 -h 24 $i
done
convert ./svg/arr-*.png -append ./css/arrows.png
# cleanup
rm -fr ./svg/*.png
exit 0
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { GooseGameEditorComponentModule } from './goose-game/components/goose-game-editor/goose-game-editor.module';
import { MemoryGameEditorPageModule } from './memory-game/components/memory-game-editor/memory-game-editor.module';
import { QuizEditorPageModule } from './quiz/quiz-editor/quiz-editor.module';
@NgModule({
declarations: [],
imports: [
CommonModule,
GooseGameEditorComponentModule,
MemoryGameEditorPageModule,
QuizEditorPageModule
],
exports: [GooseGameEditorComponentModule, MemoryGameEditorPageModule, QuizEditorPageModule]
})
export class GamesComponentsModule { }
|
# Gemfile
gem 'devise'
# command line
bundle install
rails generate devise:install
rails generate devise User
rake db:migrate
# routes.rb
Rails.application.routes.draw do
devise_for :users
# other routes
end
# controller
class ApplicationController < ActionController::Base
before_action :authenticate_user!
end |
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
# Load and clean the data
data = pd.read_csv('patient_data.csv')
data.dropna(inplace=True)
# Define the X and y datasets
X = data.drop(columns=['has_heart_attack'])
y = data['has_heart_attack']
# Instantiate the model
model = RandomForestClassifier()
# Fit the model
model.fit(X, y)
# Use the model to predict the risk of heart attack
predictions = model.predict(X)
# Calculate the accuracy of the predictions
accuracy = np.mean(y == predictions)
print("Model Accuracy: %.2f" % accuracy) |
/* micropolisJS. Adapted by <NAME> from Micropolis.
*
* This code is released under the GNU GPL v3, with some additional terms.
* Please see the files LICENSE and COPYING for details. Alternatively,
* consult http://micropolisjs.graememcc.co.uk/LICENSE and
* http://micropolisjs.graememcc.co.uk/COPYING
*
*/
Micro.SpriteManager = function (map, SIM) {
this.sim = SIM;
this.spriteList = [];
this.map = map;
this.spriteCycle = 0;
}
Micro.SpriteManager.prototype = {
constructor: Micro.SpriteManager,
getSprite : function(type) {
var filteredList = this.spriteList.filter(function (s) {
return s.frame !== 0 && s.type === type;
});
if (filteredList.length === 0) return null;
return filteredList[0];
},
getSpriteList : function() {
return this.spriteList.slice();
},
getSpritesInView : function(startX, startY, lastX, lastY) {
var sprites = [];
startX = Micro.worldToPix(startX);
startY = Micro.worldToPix(startY);
lastX = Micro.worldToPix(lastX);
lastY = Micro.worldToPix(lastY);
return this.spriteList.filter(function(s) {
return (s.x + s.xOffset >= startX && s.y + s.yOffset >= startY) &&
!(s.x + s.xOffset >= lastX && s.y + s.yOffset >= lastY);
});
},
moveObjects : function() {
var messageManager = this.sim.messageManager;
var disasterManager = this.sim.disasterManager;
var blockMaps = this.sim.blockMaps;
this.spriteCycle += 1;
var list = this.spriteList.slice();
var i = list.length;
while(i--){
//for (var i = 0, l = list.length; i < l; i++) {
var sprite = list[i];
if (sprite.frame === 0) continue;
sprite.move(this.spriteCycle, messageManager, disasterManager, blockMaps);
}
this.pruneDeadSprites();
},
/*moveObjects : function(simData) {
var messageManager = simData.messageManager;
var disasterManager = simData.disasterManager;
var blockMaps = simData.blockMaps;
this.spriteCycle += 1;
var list = this.spriteList.slice();
for (var i = 0, l = list.length; i < l; i++) {
var sprite = list[i];
if (sprite.frame === 0)
continue;
sprite.move(this.spriteCycle, messageManager, disasterManager, blockMaps);
}
this.pruneDeadSprites();
},*/
makeSprite : function(type, x, y) {
this.spriteList.push(new constructors[type](this.map, this, x, y));
},
makeTornado : function(messageManager) {
var sprite = this.getSprite(Micro.SPRITE_TORNADO);
if (sprite !== null) {
sprite.count = 200;
return;
}
var x = Random.getRandom(Micro.worldToPix(this.map.width) - 800) + 400;
var y = Random.getRandom(Micro.worldToPix(this.map.height) - 200) + 100;
this.makeSprite(Micro.SPRITE_TORNADO, x, y);
messageManager.sendMessage(Messages.TORNADO_SIGHTED, {x: Micro.pixToWorld(x), y: Micro.pixToWorld(y)});
},
makeExplosion : function(x, y) {
if (this.map.testBounds(x, y)) this.makeExplosionAt(Micro.worldToPix(x), Micro.worldToPix(y));
},
makeExplosionAt : function(x, y) {
this.makeSprite(Micro.SPRITE_EXPLOSION, x, y);
},
generatePlane : function(x, y) {
if (this.getSprite(Micro.SPRITE_AIRPLANE) !== null) return;
this.makeSprite(Micro.SPRITE_AIRPLANE, Micro.worldToPix(x), Micro.worldToPix(y));
},
generateTrain : function(census, x, y) {
if (census.totalPop > 20 && this.getSprite(Micro.SPRITE_TRAIN) === null && Random.getRandom(25) === 0) this.makeSprite(Micro.SPRITE_TRAIN,Micro.worldToPix(x) + 8, Micro.worldToPix(y) + 8);
},
generateShip : function() {
// XXX This code is borked. The map generator will never
// place a channel tile on the edges of the map
var x,y;
if (Random.getChance(3)) {
for (x = 4; x < this.map.width - 2; x++) {
if (this.map.getTileValue(x, 0) === Tile.CHANNEL) {
this.makeShipHere(x, 0);
return;
}
}
}
if (Random.getChance(3)) {
for (y = 1; y < this.map.height - 2; y++) {
if (this.map.getTileValue(0, y) === Tile.CHANNEL) {
this.makeShipHere(0, y);
return;
}
}
}
if (Random.getChance(3)) {
for (x = 4; x < this.map.width - 2; x++) {
if (this.map.getTileValue(x, this.map.height - 1) === Tile.CHANNEL) {
this.makeShipHere(x, this.map.height - 1);
return;
}
}
}
if (Random.getChance(3)) {
for (y = 1; y < this.map.height - 2; y++) {
if (this.map.getTileValue(this.map.width - 1, y) === Tile.CHANNEL) {
this.makeShipHere(this.map.width - 1, y);
return;
}
}
}
},
getBoatDistance : function(x, y) {
var dist = 99999;
var pixelX = Micro.worldToPix(x) + 8;
var pixelY = Micro.worldToPix(y) + 8;
var sprite;
for (var i = 0, l = this.spriteList.length; i < l; i++) {
sprite = this.spriteList[i];
if (sprite.type === Micro.SPRITE_SHIP && sprite.frame !== 0) {
//var sprDist = Micro.absoluteValue(sprite.x - pixelX) + Micro.absoluteValue(sprite.y - pixelY);
var sprDist = Math.abs(sprite.x - pixelX) + Math.abs(sprite.y - pixelY);
dist = Math.min(dist, sprDist);
}
}
return dist;
},
makeShipHere : function(x, y) {
this.makeSprite(Micro.SPRITE_SHIP,Micro.worldToPix(x),Micro.worldToPix(y));
},
generateCopter : function(x, y) {
if (this.getSprite(Micro.SPRITE_HELICOPTER) !== null) return;
this.makeSprite(Micro.SPRITE_HELICOPTER,Micro.worldToPix(x),Micro.worldToPix(y));
},
makeMonsterAt : function(messageManager, x, y) {
this.makeSprite(Micro.SPRITE_MONSTER,
Micro.worldToPix(x),
Micro.worldToPix(y));
messageManager.sendMessage(Messages.MONSTER_SIGHTED, {x: x, y: y});
},
makeMonster : function(messageManager) {
var sprite = this.getSprite(Micro.SPRITE_MONSTER);
if (sprite !== null) {
sprite.soundCount = 1;
sprite.count = 1000;
sprite.destX = Micro.worldToPix(this.map.pollutionMaxX);
sprite.destY = Micro.worldToPix(this.map.pollutionMaxY);
}
var done = 0;
for (var i = 0; i < 300; i++) {
var x = Random.getRandom(this.map.width - 20) + 10;
var y = Random.getRandom(this.map.height - 10) + 5;
var tile = this.map.getTile(x, y);
if (tile.getValue() === Tile.RIVER) {
this.makeMonsterAt(messageManager, x, y);
done = 1;
break;
}
}
if (done === 0) this.makeMonsterAt(messageManager, 60, 50);
},
pruneDeadSprites : function(type) {
this.spriteList = this.spriteList.filter(function (s) { return s.frame !== 0; });
}
}
var constructors = {};
constructors[Micro.SPRITE_TRAIN] = Micro.TrainSprite;
constructors[Micro.SPRITE_SHIP] = Micro.BoatSprite;
constructors[Micro.SPRITE_MONSTER] = Micro.MonsterSprite;
constructors[Micro.SPRITE_HELICOPTER] = Micro.CopterSprite;
constructors[Micro.SPRITE_AIRPLANE] = Micro.AirplaneSprite;
constructors[Micro.SPRITE_TORNADO] = Micro.TornadoSprite;
constructors[Micro.SPRITE_EXPLOSION] = Micro.ExplosionSprite;
// return SpriteManager;
//});
|
import { Address } from '@graphprotocol/graph-ts'
import {
DistributedReward,
RemovedFundManager,
Whitelisted,
} from '../../generated/RewardsDistributor/RewardsDistributor'
import {
StakingRewards as StakingRewardsTemplate,
StakingRewardsWithPlatformToken as StakingRewardsWithPlatformTokenTemplate,
} from '../../generated/templates'
import { StakingRewards } from '../../generated/RewardsDistributor/StakingRewards'
import { StakingRewardsWithPlatformToken } from '../../generated/RewardsDistributor/StakingRewardsWithPlatformToken'
import {
RewardsDistributor,
StakingRewardsContract,
} from '../../generated/schema'
import { getOrCreateToken } from '../models/Token'
import { getOrCreateStakingRewardsContract } from '../models/StakingRewardsContract'
import { StakingRewardsContractType } from '../enums'
function getOrCreateRewardsDistributor(address: Address): RewardsDistributor {
let id = address.toHexString()
let rewardsDistributor = RewardsDistributor.load(id)
if (rewardsDistributor != null) {
return rewardsDistributor as RewardsDistributor
}
rewardsDistributor = new RewardsDistributor(id)
rewardsDistributor.fundManagers = []
rewardsDistributor.save()
return rewardsDistributor as RewardsDistributor
}
export function handleRemovedFundManager(event: RemovedFundManager): void {
let rewardsDistributor = getOrCreateRewardsDistributor(event.address)
rewardsDistributor.fundManagers = rewardsDistributor.fundManagers.filter(
(_managerId) => _managerId !== event.params._address,
)
rewardsDistributor.save()
}
export function handleWhitelisted(event: Whitelisted): void {
let rewardsDistributor = getOrCreateRewardsDistributor(event.address)
rewardsDistributor.fundManagers = rewardsDistributor.fundManagers.concat([
event.params._address,
])
rewardsDistributor.save()
}
export function handleDistributedReward(event: DistributedReward): void {
// The receipient may be a StakingRewards or StakingRewardsWithPlatformToken
// contract, which should be tracked here.
if (
StakingRewardsContract.load(event.params.recipient.toHexString()) == null
) {
let addr = event.address.toHexString()
let isEarnPool: boolean =
addr == '0x0d4cd2c24a4c9cd31fcf0d3c4682d234d9f94be4' ||
addr == '0x0d4cd2c24a4c9cd31fcf0d3c4682d234d9f94be4' ||
addr == '0x881c72d1e6317f10a1cdcbe05040e7564e790c80' ||
addr == '0x9b4aba35b35eee7481775ccb4055ce4e176c9a6f' ||
addr == '0xe6e6e25efda5f69687aa9914f8d750c523a1d261' ||
addr == '0xf7575d4d4db78f6ba43c734616c51e9fd4baa7fb'
// Ignore non-Earn pools
if (!isEarnPool) {
return
}
// Try a function that only exists on the `StakingRewardsWithPlatformToken` contract
{
let contract = StakingRewardsWithPlatformToken.bind(
event.params.recipient,
)
if (!contract.try_platformToken().reverted) {
// Track the contract and create the entity
{
StakingRewardsWithPlatformTokenTemplate.create(event.params.recipient)
getOrCreateStakingRewardsContract(
event.params.recipient,
StakingRewardsContractType.STAKING_REWARDS_WITH_PLATFORM_TOKEN,
)
}
// Create the staking token entity, but do not track it
{
let address = contract.stakingToken()
getOrCreateToken(address)
}
// Create the platform token entity, but do not track it
{
let address = contract.platformToken()
getOrCreateToken(address)
}
// Create the rewards token entity, but do not track it
{
let address = contract.rewardsToken()
getOrCreateToken(address)
}
return
}
}
// Try a function that exists on the `StakingRewards` contract
let contract = StakingRewards.bind(event.params.recipient)
if (!contract.try_rewardsToken().reverted) {
// Track the contract and create the entity
StakingRewardsTemplate.create(event.params.recipient)
getOrCreateStakingRewardsContract(
event.params.recipient,
StakingRewardsContractType.STAKING_REWARDS,
)
// Create the staking token entity, but do not track it
{
let address = contract.stakingToken()
getOrCreateToken(address)
}
// Create the rewards token entity, but do not track it
{
let address = contract.rewardsToken()
getOrCreateToken(address)
}
return
}
}
}
|
$gate->define('read-work', function($user, $work){
return $user->id === $work->user_id;
}); |
<reponame>df-service-e2e-test/x_khu2_9th_stress_test_5<gh_stars>1-10
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.nio;
import com.hazelcast.internal.serialization.InternalSerializationService;
import com.hazelcast.internal.serialization.SerializationServiceBuilder;
import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.SerializationConcurrencyTest;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import static com.hazelcast.nio.serialization.SerializationConcurrencyTest.FACTORY_ID;
import static com.hazelcast.nio.serialization.SerializationConcurrencyTest.Person;
import static com.hazelcast.nio.serialization.SerializationConcurrencyTest.PortableAddress;
import static com.hazelcast.nio.serialization.SerializationConcurrencyTest.PortablePerson;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Unit test that verifies that a packet can safely be stored in a byte-buffer and converted back
* again into a packet.
*/
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class PacketIOHelperTest extends HazelcastTestSupport {
private PacketIOHelper packetWriter;
private PacketIOHelper packetReader;
private final Person person
= new Person(111, 123L, 89.56d, "test-person", new SerializationConcurrencyTest.Address("street", 987));
private final PortablePerson portablePerson = new PortablePerson(222, 456L, "portable-person",
new PortableAddress("street", 567));
@Before
public void before() {
packetWriter = new PacketIOHelper();
packetReader = new PacketIOHelper();
}
private SerializationServiceBuilder createSerializationServiceBuilder() {
PortableFactory portableFactory = new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case 1:
return new PortablePerson();
case 2:
return new PortableAddress();
default:
throw new IllegalArgumentException();
}
}
};
return new DefaultSerializationServiceBuilder().addPortableFactory(FACTORY_ID, portableFactory);
}
@Test
public void testPacketWriteRead() throws IOException {
testPacketWriteRead(person);
}
@Test
public void testPacketWriteRead_usingPortable() throws IOException {
testPacketWriteRead(portablePerson);
}
private void testPacketWriteRead(Object originalObject) throws IOException {
InternalSerializationService ss = createSerializationServiceBuilder().build();
byte[] originalPayload = ss.toBytes(originalObject);
ByteBuffer buffer = ByteBuffer.allocate(originalPayload.length * 2);
Packet originalPacket = new Packet(originalPayload);
assertTrue(packetWriter.writeTo(originalPacket, buffer));
buffer.flip();
SerializationService ss2 = createSerializationServiceBuilder().build();
Packet clonedPacket = packetReader.readFrom(buffer);
assertNotNull(clonedPacket);
Object clonedObject = ss2.toObject(clonedPacket);
assertEquals(originalPacket, clonedPacket);
assertEquals(originalObject, clonedObject);
}
/**
* Checks if the packet can deal with a buffer that is very small, but the data is very large, which
* needs repeated calls to {@link PacketIOHelper#writeTo(Packet, ByteBuffer)} and
* {@link PacketIOHelper#readFrom(ByteBuffer)}.
*/
@Test
public void largeValue() {
Packet originalPacket = new Packet(generateRandomString(100000).getBytes());
Packet clonedPacket;
ByteBuffer bb = ByteBuffer.allocate(20);
boolean writeCompleted;
do {
writeCompleted = packetWriter.writeTo(originalPacket, bb);
bb.flip();
clonedPacket = packetReader.readFrom(bb);
bb.clear();
} while (!writeCompleted);
assertNotNull(clonedPacket);
assertPacketEquals(originalPacket, clonedPacket);
}
@Test
public void lotsOfPackets() {
List<Packet> originalPackets = new LinkedList<Packet>();
Random random = new Random();
for (int k = 0; k < 1000; k++) {
byte[] bytes = generateRandomString(random.nextInt(1000) + 8).getBytes();
Packet originalPacket = new Packet(bytes);
originalPackets.add(originalPacket);
}
ByteBuffer bb = ByteBuffer.allocate(20);
for (Packet originalPacket : originalPackets) {
Packet clonedPacket;
boolean writeCompleted;
do {
writeCompleted = packetWriter.writeTo(originalPacket, bb);
bb.flip();
clonedPacket = packetReader.readFrom(bb);
bb.clear();
} while (!writeCompleted);
assertNotNull(clonedPacket);
assertPacketEquals(originalPacket, clonedPacket);
}
}
/**
* Verifies that writing a Packet to a ByteBuffer and then reading it from the ByteBuffer, gives the same Packet (content).
*/
@Test
public void cloningOfPacket() {
Packet originalPacket = new Packet("foobarbaz".getBytes());
ByteBuffer bb = ByteBuffer.allocate(100);
boolean written = packetWriter.writeTo(originalPacket, bb);
assertTrue(written);
bb.flip();
Packet clonedPacket = packetReader.readFrom(bb);
assertNotNull(clonedPacket);
assertPacketEquals(originalPacket, clonedPacket);
}
private static void assertPacketEquals(Packet originalPacket, Packet clonedPacket) {
assertEquals(originalPacket.getFlags(), clonedPacket.getFlags());
assertArrayEquals(originalPacket.toByteArray(), clonedPacket.toByteArray());
}
}
|
class ForemanSubnetModule(ParametersMixin, ForemanTaxonomicEntityAnsibleModule):
def __init__(self, parameters):
super().__init__(parameters)
# Initialize the subnet module with the given parameters
# Additional initialization code as per requirements
def configure_subnet(self, subnet_id, subnet_config):
try:
# Perform subnet configuration update in the network management system
# Use subnet_id and subnet_config to update the subnet configuration
# Handle any potential errors or exceptions
pass
except Exception as e:
# Handle and log the exception
pass
def delete_subnet(self, subnet_id):
try:
# Delete the subnet configuration from the network management system using subnet_id
# Handle any potential errors or exceptions
pass
except Exception as e:
# Handle and log the exception
pass |
package com.goranzuri.anime.anidb.resolve.service;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
/**
* Created by GZuri on 11/20/2016.
*/
public class AnidbService {
private InputStream getResultsFromApi(String animeName) throws IOException {
StringBuilder result = new StringBuilder();
URL url = new URL("http://anisearch.outrance.pl/index.php?task=search&langs=en&query=" + URLEncoder.encode(animeName, "UTF-8"));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
return conn.getInputStream();
}
private Map<String, Integer> parseXmlResults(InputStream xmlResult) throws ParserConfigurationException, IOException, SAXException {
Map<String, Integer> animeList = new HashMap<String, Integer>();
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
//Get the DOM Builder
DocumentBuilder builder = factory.newDocumentBuilder();
//Load and Parse the XML document
//document contains the complete XML as a Tree.
Document document = builder.parse(xmlResult);
//Iterating through the nodes and extracting the data.
NodeList nodeList = document.getDocumentElement().getChildNodes();
for (int i = 0; i < nodeList.getLength(); i++) {
//We have encountered an <employee> tag.
Node node = nodeList.item(i);
if (node instanceof Element) {
NodeList childNodes = node.getChildNodes();
for(int j = 0; j < childNodes.getLength(); j++){
Node childNode = childNodes.item(j);
if (childNode.getAttributes().getNamedItem("lang").getNodeValue().equals("en")
&& (childNode.getAttributes().getNamedItem("type").getNodeValue().equals("official")
|| childNode.getAttributes().getNamedItem("type").getNodeValue().equals("main")
|| childNode.getAttributes().getNamedItem("type").getNodeValue().equals("syn"))){
Integer aid = Integer.parseInt(node.getAttributes().getNamedItem("aid").getNodeValue());
if (!animeList.containsKey(childNode.getTextContent()))
animeList.put(childNode.getTextContent(), aid);
}
}
}
}
return animeList;
}
public Map<String, Integer> getAnimeCandidates(String animeName) throws IOException, ParserConfigurationException, SAXException {
InputStream serviceResult = getResultsFromApi(animeName);
return parseXmlResults(serviceResult);
}
}
|
$(document).ready(function () {
$('#serviceForm').off().submit(e => {
e.preventDefault();
createServiceRequest();
});
getServiceRequest();
pagination.getFn(getServiceRequest);
});
let keyword = '';
async function createServiceRequest() {
const Service = new FormData($('#serviceForm')[0]);
const URL = base_url + 'Welcome/AddServiceRequest';
const options = {
method: 'POST',
body: Service
}
let response = await fetch(URL, options);
response = await response.json();
console.log(response);
if (response.status == 200) {
alert('Service Request Created');
window.location.href = base_url + 'Welcome/ServiceRequestView';
}
else
alert('Server Error');
}
async function getServiceRequest() {
pagination.paginationFn();
const URL = base_url + 'Welcome/GetServiceRequest/' + pagination.activeIndex;
let response = await fetch(URL);
response = await response.json();
if (response.status == 200 || response.status == 404) {
pagination.total = response.total;
pagination.limitPerPage = response.limit;
pagination.activeIndex = response.page;
pagination.paginationFn();
}
if (response.status == 200) {
let tableHTML = '';
for (value of response.data) {
let { ServiceRequestID, createdAt, description, requestType, servicePhoto, status, subject } = value;
let isVerified = status == 1 ? '<span>Pending</span>' : '<span>Completed</span>';
tableHTML += '<tr>' +
'<td>' + ServiceRequestID + '</td>' +
'<td>' + requestType + '</td>' +
'<td>' + subject + '</td>' +
'<td>' + description + '</td>' +
'<td><img src="' + servicePhoto + '" height="50px"></td>' +
'<td>' + fDate(createdAt) + '</td>' +
'<td><button class="btn btn-primary">' + isVerified + '</button></td>' +
'</tr>';
}
$('#serviceRequest').html(tableHTML);
}
} |
<gh_stars>0
"""NVIDIA Autonomous Driving Dataset.
Written by <NAME>
BoxParser written by <NAME>
Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
This is v2, removing to rgb because the data is rgb
"""
from copy import deepcopy
import os
from pathlib import Path
from PIL import Image
# import sys
# sys.path.append("/home/shuxuang/debug/ai-infra/moduluspy")
# sys.path.append("/home/shuxuang/nvcode/ai-infra/")
from modulus.multi_task_loader.dataset import (
LabelDataType,
SqliteDataset,
)
# from modulus.multi_task_loader.dataset.sqlite_dataset import (
# LabelDataType,
# SqliteDataset,
# )
from modulus.multi_task_loader.image_io import read_image
from modulus.multi_task_loader.task_parsers import apply_single_stm
import numpy as np
import torch
import torch.utils.data as data
import random
# import matplotlib.pyplot as plt ## add from Ismail
# import sys ## add from Ismail
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import sandbox.williamz.detr.datasets.transforms as T
from sandbox.williamz.detr.datasets.nvidia_utils import ConvertCocoPolysToMask
import IPython
"""
NVIDIA_CLASSES = (
"rider",
"road_sign",
"traffic_light",
"automobile",
"heavy_truck",
"person",
"hazard",
"vehicle_group",
"bicycle",
"motorcycle",
"stroller",
"person_group",
"misc_vehicle",
"unclassifiable_vehicle",
"headlight",
"other_animal",
"cycle_group",
"taillight",
)"""
NVIDIA_CLASSES_MAPPED = {
"automobile": "car",
"bicycle": "bicycle",
"heavy_truck": "car",
"heavy truck": "car",
"motorcycle": "bicycle",
"person": "person",
"person group": "person",
"person_group": "person",
"rider": "person",
"road sign": "road_sign",
"road_sign": "road_sign",
"traffic light": "traffic_light",
"traffic_light": "traffic_light",
"unclassifiable vehicle": "car",
"unclassifiable_vehicle": "car",
"vehicle_group": "car",
"stroller": "stroller",
"hazard": "hazard",
"headlight": "headlight",
"misc_vehicle": "car",
"other_animal": "other_animal",
"cycle_group": "bicycle",
"taillight": "taillight",
}
NVIDIA_CLASSES = (
"car", # 0
"bicycle", # 1
"person", # 2
"road_sign", # 3
"traffic_light", # 4
"stroller", # 5
"hazard", # 6
"headlight", # 7
"other_animal", # 8
"taillight", # 9
)
# 5 classes: 0, 1, 2, 3, 4
class_to_ind = dict(zip(NVIDIA_CLASSES, range(len(NVIDIA_CLASSES))))
class BoxParser:
"""Box parser."""
def __init__(self):
"""Constructor."""
# We will filter out rows that are not of this type, just to be sure,
# Note that something similar could also be achieved by supplying
# the appropriate SQL clause to `feature_conditions` when instantiating
# the `SqliteDataset`.
self._label_data_type = LabelDataType.from_string("SHAPE2D:BOX2D")
def __call__(self, rows, frame, *args, **kwargs):
"""Call method.
This is what `SqliteDataset.__getitem__` will be calling.
Argumentss:
rows (list): List of `Feature` tuples.
frame (Frame): `Frame` tuple.
Returns:
image (np.array): CHW image.
boxes (np.array): `np.float32` array of shape [num_objects * 4] containing
the vertices of the bounding boxes. The order is (left, top, right,
bottom).
classes (list): List of class names (str).
"""
# First, let's weed out potential uninteresting labels (as far as this parser
# is concerned).
rows = [
row for row in rows if row.label_data_type == self._label_data_type
]
boxes = np.array(
[coord for feature in rows for coord in feature.data["vertices"]],
dtype=np.float32,
)
class_names = [feature.label_name.strip().lower() for feature in rows]
# This is because labels are in the original labeling space (typically 1920 x 1208),
# but the export is typically a half res export (960 x 604) or a (960 x 604) center
# crop.
boxes = apply_single_stm(vertices=boxes, stm=frame.label_stm)
image = read_image(
image_path=frame.path, height=frame.original_height, width=frame.original_width
)
return image, boxes, class_names
class NVIDIADetection(data.Dataset):
"""NVIDIA Detection Dataset Object.
Input is image, target is annotation.
Arguments:
supervised_indices (list): List of indices for which we use labels
transform (Transform): The augmentation used
pseudo_labels (list): List of indices for which we use pseudo-labels
"""
def __init__(
self,
image_sets=None,
transform=None,
name="NVIDIA",
image_sets2=None,
mode='train',
camera='full'
):
"""Initialize the class."""
parser = BoxParser()
if mode == 'test':
if camera == 'full':
print("camera view full")
self.sqlite_dataset = SqliteDataset(
filename=os.path.join(image_sets, "dataset.sqlite"),
# when change to dataset-v9.sqlite, get the error: TypeError: 'InterchangeLabelData' object is not subscriptable
export_format_name="rgb_half-xavierisp",
export_path=os.path.join(image_sets, "frames"),
feature_parser=parser,
exclude_frames="UNLABELED",
feature_conditions=["features.label_data_type = 'BOX2D'"],
)
elif camera== 'forward_center':
print("camera view forward_center")
self.sqlite_dataset = SqliteDataset(
filename=os.path.join(image_sets, "dataset.sqlite"),
export_format_name="rgb_half-xavierisp",
export_path=os.path.join(image_sets, "frames"),
feature_parser=parser,
exclude_frames="UNLABELED",
feature_conditions=["features.label_data_type = 'BOX2D'"],
frame_conditions=["sequences.camera_location='forward center'"]
)
else:
if camera == 'full':
print("camera view full")
self.sqlite_dataset = SqliteDataset(
# filename=os.path.join(image_sets, "export.sqlite"),
filename=os.path.join(image_sets, "dataset_300k.sqlite"),
export_format_name="rgb_half-xavierisp",
export_path=os.path.join(image_sets2, "frames"),
feature_parser=parser,
exclude_frames="UNLABELED",
feature_conditions=["features.label_data_type = 'BOX2D'"],
)
elif camera== 'forward_center':
print("camera view forward_center")
self.sqlite_dataset = SqliteDataset(
# filename=os.path.join(image_sets, "export.sqlite"),
filename=os.path.join(image_sets, "dataset_300k.sqlite"),
export_format_name="rgb_half-xavierisp",
export_path=os.path.join(image_sets2, "frames"),
feature_parser=parser,
exclude_frames="UNLABELED",
feature_conditions=["features.label_data_type = 'BOX2D'"],
frame_conditions=["sequences.camera_location='forward center'"]
)
"""Constructor."""
self.transform = transform
self.prepare = ConvertCocoPolysToMask()
self.name = name
self.class_to_ind = dict(zip(NVIDIA_CLASSES, range(len(NVIDIA_CLASSES))))
def __getitem__(self, index):
"""Get an item in format image, label, semi (supervised, pseudo_labeled or unsupervised."""
img, target = self.pull_item(index)
return img, target
def __len__(self):
"""Return the number of items in the dataset."""
return len(self.sqlite_dataset)
def pull_item(self, index, mode='train'):
"""Pull item."""
img, ori_target = self.pull_image_and_anno(index)
# print(img.shape) # (3, 604, 960)
# print(target)
# turn img from np.ndarray to PIL
img = img.transpose(1,2,0) # (604, 960, 3)
orig_img = Image.fromarray(np.uint8(img*255), mode='RGB') # is rgb already
# to rgb # don't have to
# img = img[:, :, (2, 1, 0)]
# img = Image.fromarray(np.uint8(img*255), mode='RGB')
# img = Image.fromarray(np.uint8(img*255)).convert('RGB') # <PIL.Image.Image image mode=RGB size=960x604 at 0x7FB0479B4FD0>
# print(img)
# IPython.embed()
select_bbox = ori_target[:, 4] < 5
anno = {'bbox': ori_target[select_bbox, :4],
'category_id': ori_target[select_bbox, 4]} # change the before 3 target to ori_target
ori_target_ = ori_target[select_bbox]
image_id = index
target = {'image_id': image_id, 'annotations': anno}
# print(target)
img, target = self.prepare(orig_img, target) #orig_img
# print(img)
# print(target)
# img = img.transpose(1, 2, 0)
# height, width, _ = img.shape
if self.transform is not None:
img, target = self.transform(img, target)
if mode == 'train':
# print(img.size())
return img, target
elif mode =='test':
# print(img.size())
return img, ori_target_, target
elif mode =='vis':
print(img.size())
return orig_img, img, ori_target_, target
def pull_image_and_anno(self, index):
"""Pull image and its annotation."""
# Get an image, and its accompanying raster.
image, boxes, class_names = self.sqlite_dataset[index]
assert boxes.size // 4 == len(class_names)
# insert the boxes into an array of boxes with coordinates (x1, y1, x2, y2, class)
len_boxes = len(boxes)
new_boxes = np.zeros((len_boxes//2, 5))
for i in range(0, len_boxes, 2):
new_boxes[i//2, 0] = int(boxes[i, 0])
new_boxes[i//2, 1] = int(boxes[i, 1])
new_boxes[i//2, 2] = int(boxes[i+1, 0])
new_boxes[i//2, 3] = int(boxes[i+1, 1])
# adjusted_boxes = np.zeros_like(new_boxes)
adjusted_boxes = deepcopy(new_boxes)
adjusted_boxes[:, 0] = np.minimum(new_boxes[:, 0], new_boxes[:, 2])
adjusted_boxes[:, 1] = np.minimum(new_boxes[:, 1], new_boxes[:, 3])
adjusted_boxes[:, 2] = np.maximum(new_boxes[:, 0], new_boxes[:, 2])
adjusted_boxes[:, 3] = np.maximum(new_boxes[:, 1], new_boxes[:, 3])
for i, cl_name in enumerate(class_names):
# adjusted_boxes[i, 4] = float(class_to_ind[cl_name])
adjusted_boxes[i, 4] = float(class_to_ind[NVIDIA_CLASSES_MAPPED[cl_name]])
return image, adjusted_boxes
def target_transform(self, target, width, height):
"""Divide the coordinates by width and height."""
target[:, 0] /= width
target[:, 2] /= width
target[:, 1] /= height
target[:, 3] /= height
return target
def make_coco_transforms(image_set):
# MEANS = (104, 117, 123) as bgr
normalize = T.Compose([
T.ToTensor(),
# T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
# mean=(104, 117, 123) ==> (0.40784313725490196 0.4588235294117647 0.4823529411764706) transpose(2,1,0) = [0.482, 0.459, 0.408]
# T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # coco mean and std this shuold be in bgr
## before for 150 epochs
# T.Normalize([0.482, 0.459, 0.408], [1., 1., 1.]) # mean=(104, 117, 123) for not for rgb, bgr instead, after transpose [0.482, 0.459, 0.408]
# New implement from 12.11
# This is rgb already # mean=(104, 117, 123) ==> (0.40784313725490196 0.4588235294117647 0.4823529411764706)
T.Normalize([0.408, 0.459, 0.482], [1., 1., 1.])
])
print('NVdata Norm rgb: [0.408, 0.459, 0.482], [1., 1., 1.]')
scales = [480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
# scales = [480, 512, 544, 576, 604, 640, 672, 704, 736, 768, 800]
if image_set == 'train':
return T.Compose([
T.RandomHorizontalFlip(),
T.RandomSelect(
T.RandomResize(scales, max_size=1333),
T.Compose([
T.RandomResize([400, 500, 600]),
T.RandomSizeCrop(384, 600),
T.RandomResize(scales, max_size=1333),
])
),
normalize,
])
if image_set == 'test':
# print("604 960")
print("608 966")
# print("800 1333")
return T.Compose([
T.RandomResize([608], max_size=980), #800 1333, 604 960, not604, should be 608*966 604*960
# T.RandomResize([604], max_size=960),
# T.RandomResize([800], max_size=1333),
normalize,
])
raise ValueError(f'unknown {image_set}')
def make_coco_transforms_v2(image_set):
### keep only one scale as 960*608
normalize = T.Compose([
T.ToTensor(),
T.Normalize([0.408, 0.459, 0.482], [1., 1., 1.])
])
print('NVdata Norm rgb: [0.408, 0.459, 0.482], [1., 1., 1.]')
# scales = [608]
re_size = (608, 960)
if image_set == 'train':
print(re_size)
return T.Compose([
T.RandomHorizontalFlip(),
T.RandomSelect(
T.RandomResize(re_size),
T.Compose([
T.RandomResize([400, 500, 600]),
T.RandomSizeCrop(384, 600),
T.RandomResize(re_size),
])
),
normalize,
])
if image_set == 'test':
# print("608 960")
# print("800 1333")
return T.Compose([
# T.RandomResize([608], max_size=980), #800 1333, 604 960, not604, should be 608*966 604*960
T.RandomResize((604, 960)),
# T.RandomResize([800], max_size=1333),
normalize,
])
raise ValueError(f'unknown {image_set}')
def build_nvdataset(dataset_root, mode, camera):
# root0 = Path(dataset_root[0])
# assert root0.exists(), f'provided NVData path {root0} does not exist'
# root1 = Path(dataset_root[1])
# assert root1.exists(), f'provided NVData path {root1} does not exist'
dataset = NVIDIADetection(
# supervised_indices=None,
image_sets=dataset_root[0],
# transform=SSDAugmentation(data_config["min_dim"], MEANS),
transform=make_coco_transforms(mode),
# transform=make_coco_transforms_v2(mode),
image_sets2=dataset_root[1],
# mode=mode,
# change mode to test when using the new dataset since it has same folder as test
mode=mode,
camera=camera
)
return dataset
def build_nvdataset_large(dataset_root, mode, camera):
# root0 = Path(dataset_root[0])
# assert root0.exists(), f'provided NVData path {root0} does not exist'
# root1 = Path(dataset_root[1])
# assert root1.exists(), f'provided NVData path {root1} does not exist'
dataset = NVIDIADetection(
# supervised_indices=None,
image_sets=dataset_root[0],
# transform=SSDAugmentation(data_config["min_dim"], MEANS),
transform=make_coco_transforms(mode),
# transform=make_coco_transforms_v2(mode),
image_sets2=dataset_root[1],
# mode=mode,
# change mode to test when using the new dataset since it has same folder as test
mode='test',
camera=camera
)
return dataset |
#!/usr/bin/env bash
# Make sure we exit if there is a failure
set -e
function usage() {
echo "Usage: $0 [--disable-inlining] [--ipdse] [--ai-dce] [--devirt VAL1] [--inter-spec VAL2] [--intra-spec VAL2] [--help]"
echo " VAL1=none|dsa|cha_dsa"
echo " VAL2=none|aggressive|nonrec-aggressive"
}
#default values
INTER_SPEC="none"
INTRA_SPEC="none"
DEVIRT="dsa"
OPT_OPTIONS=""
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-inter-spec|--inter-spec)
INTER_SPEC="$2"
shift # past argument
shift # past value
;;
-intra-spec|--intra-spec)
INTRA_SPEC="$2"
shift # past argument
shift # past value
;;
-disable-inlining|--disable-inlining)
OPT_OPTIONS="${OPT_OPTIONS} --disable-inlining"
shift # past argument
;;
-ipdse|--ipdse)
OPT_OPTIONS="${OPT_OPTIONS} --ipdse"
shift # past argument
;;
-ai-dce|--ai-dce)
OPT_OPTIONS="${OPT_OPTIONS} --ai-dce"
shift # past argument
;;
-devirt|--devirt)
DEVIRT="$2"
shift # past argument
shift # past value
;;
-help|--help)
usage
exit 0
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
#check that the require dependencies are built
declare -a bitcode=("h264ref.bc")
for bc in "${bitcode[@]}"
do
if [ -a "$bc" ]
then
echo "Found $bc"
else
echo "Error: $bc not found. Try \"make\"."
exit 1
fi
done
export OCCAM_LOGLEVEL=INFO
export OCCAM_LOGFILE=${PWD}/slash/occam.log
rm -rf slash h264ref_slashed
# Build the manifest file
cat > h264ref.manifest <<EOF
{ "main" : "h264ref.bc"
, "binary" : "h264ref_slashed"
, "modules" : []
, "native_libs" : []
, "name" : "h264ref"
}
EOF
# Run OCCAM
cp ./h264ref ./h264ref_orig
SLASH_OPTS="--inter-spec-policy=${INTER_SPEC} --intra-spec-policy=${INTRA_SPEC} --devirt=${DEVIRT} --no-strip --stats $OPT_OPTIONS"
echo "============================================================"
echo "Running with options ${SLASH_OPTS}"
echo "============================================================"
slash ${SLASH_OPTS} --work-dir=slash h264ref.manifest
cp ./slash/h264ref_slashed .
|
import 'materialize-css/dist/js/materialize';
import '../scss/dashboard.scss';
import './components';
|
!function() {
function debug(str) {
//dump('mozIccManager: ' + str + '\n');
}
var iccs = {
111: {
_retryCount: 3,
cardState: 'ready',
iccInfo: {
iccid: true,
msisdn: '5555555555'
},
setCardLock: function() {
debug('setCardLock');
},
getCardLock: FFOS_RUNTIME.domRequest({ enabled: false }),
getCardLockRetryCount: function(type) {
var request = {};
setTimeout(function() {
request.result = {
retryCount: this._retryCount
};
request.onsuccess && request.onsuccess();
}.bind(this));
return request;
},
unlockCardLock: function() {
// simulate invalid input
if (this._retryCount > 1) {
var request = {};
setTimeout(function() {
request.error = {
retryCount: --this._retryCount,
lockType: 'pin'
};
request.onerror && request.onerror();
}.bind(this), 200);
return request;
}
this.cardState = 'ready';
return FFOS_RUNTIME.domRequest()();
},
addEventListener: function() {
debug('addEventListener');
},
removeEventListener: function() {
debug('removeEventListener');
},
},
222: {
_retryCount: 3,
cardState: 'ready',
iccInfo: {
iccid: true
},
setCardLock: function() {
debug('setCardLock');
},
getCardLock: FFOS_RUNTIME.domRequest({ enabled: false }),
getCardLockRetryCount: function(type) {
var request = {};
setTimeout(function() {
request.result = {
retryCount: this._retryCount
};
request.onsuccess && request.onsuccess();
}.bind(this));
return request;
},
unlockCardLock: function() {
this.cardState = 'ready';
return FFOS_RUNTIME.domRequest()();
},
addEventListener: function() {
debug('addEventListener');
},
removeEventListener: function() {
debug('removeEventListener');
},
}
};
var iccManager = {
addEventListener: function() {
debug('addEventListener');
},
removeEventListener: function() {
debug('removeEventListener');
},
getIccById: function(iccId) {
return iccs[iccId];
},
iccIds: [111]
};
if (window._shimDualSim) {
iccManager.iccId.push(222);
}
FFOS_RUNTIME.makeNavigatorShim('mozIccManager', iccManager, true);
}();
|
SHORT_COMMIT_ID=$(git rev-parse --short HEAD)
npm install -g appdmg
mkdir -p _publish
appdmg _release/appdmg.json _publish/Onivim2-$SHORT_COMMIT_ID.dmg
tar -C _release -cvzf _publish/Onivim2-$SHORT_COMMIT_ID-darwin.tar.gz Onivim2.app
|
package dijkstra;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import model.Grafo;
import model.Vertice;
public class Dijkstra {
private static Dijkstra dijkstra;
private Grafo grafo;
private Map<String, Coluna> colunas;
private Collection<Coluna> abertas;
private Coluna inicio;
private Coluna destino;
public static Dijkstra instance(Grafo grafo) {
if (dijkstra == null)
dijkstra = new Dijkstra(grafo);
dijkstra.grafo = grafo;
dijkstra.inicializaColunas();
return dijkstra;
}
private Dijkstra(Grafo grafo) {
}
private void inicializaColunas() {
colunas = new HashMap<>();
abertas = new LinkedList<Coluna>();
for (Vertice vertice : grafo.getVertices()) {
Coluna coluna = new Coluna(vertice);
abertas.add(coluna);
colunas.put(vertice.getRotulo(), coluna);
}
}
public Dijkstra de(String inicio) {
this.inicio = colunas.get(inicio);
if (this.inicio == null)
throw new NullPointerException("Origem não pertencente ao grafo.");
this.inicio.estimativa = 0;
return this;
}
public Dijkstra ate(String destino) {
this.destino = colunas.get(destino);
return this;
}
public List<Vertice> retornaCaminho() {
validaOrigemDestino();
constroiCaminhoEnquantoVerticeAberto();
return constroiCaminhoDeVertices();
}
private void validaOrigemDestino() {
validaOrigem();
validaDestino();
}
private void validaDestino() {
if (destino == null)
throw new IllegalStateException("Falta informar o fim.");
}
private void validaOrigem() {
if (inicio == null)
throw new IllegalStateException("Falta informar o início.");
}
private void constroiCaminhoEnquantoVerticeAberto() {
while (temVerticeAberto()) {
constroiCaminho();
}
}
private boolean temVerticeAberto() {
return !abertas.isEmpty();
}
private void constroiCaminho() {
Coluna atual = comMenorEstimativa();
fechar(atual);
for (Coluna sucessor : atual.getSucessoresAbertos()) {
trocaPrecedenteParaAtualSeMenorEstimativa(atual, sucessor);
}
}
private void trocaPrecedenteParaAtualSeMenorEstimativa(Coluna atual,
Coluna sucessor) {
double estimativaAnterior = sucessor.getEstimativa();
double novaEstimativa = atual.somaEstimativa(sucessor);
if (novaEstimativa < estimativaAnterior) {
sucessor.setEstimativa(novaEstimativa);
sucessor.setPrecedente(atual);
}
}
private void fechar(Coluna k) {
k.fechar();
abertas.remove(k);
}
private Coluna comMenorEstimativa() {
Coluna c = new Coluna(Vertice.NULO);
for(Coluna atual : abertas){
if(atual.estimativa <= c.estimativa)
c = atual;
}
return c;
}
private List<Vertice> constroiCaminhoDeVertices() {
List<Vertice> caminho = new ArrayList<>();
Coluna coluna = destino;
while (coluna.temPrecedente()) {
caminho.add(coluna.getVertice());
coluna = coluna.getPrecedente();
}
caminho.add(coluna.getVertice());
Collections.reverse(caminho);
return caminho;
}
public Collection<Coluna> retornaDistanciasParaDemaisVertices() {
validaOrigem();
constroiCaminhoEnquantoVerticeAberto();
return colunas.values();
}
public class Coluna {
private Vertice vertice;
private double estimativa = Double.POSITIVE_INFINITY;
private Coluna precedente;
private boolean aberto = true;
private Coluna(Vertice vertice) {
this.setVertice(vertice);
}
private double somaEstimativa(Coluna k) {
return estimativa
+ getVertice().getArestaCom(k.getVertice().getRotulo())
.getPeso();
}
private Collection<Coluna> getSucessoresAbertos() {
Collection<Coluna> sucessoresAbertos = new ArrayList<>();
getVertice().getAdjacentes().forEach(
v -> sucessoresAbertos.add(colunas.get(v.getRotulo())));
return sucessoresAbertos;
}
public Vertice getVertice() {
return vertice;
}
private void setVertice(Vertice vertice) {
this.vertice = vertice;
}
public boolean temPrecedente() {
return getPrecedente().getVertice() != Vertice.NULO;
}
public Coluna getPrecedente() {
if (precedente == null)
precedente = new Coluna(Vertice.NULO);
return precedente;
}
public void setPrecedente(Coluna precedente) {
this.precedente = precedente;
}
private void fechar() {
this.aberto = false;
}
public double getEstimativa() {
return estimativa;
}
public void setEstimativa(double estimativa) {
this.estimativa = estimativa;
}
@Override
public String toString() {
return "(" + getVertice().getRotulo() + ", " + estimativa + ", "
+ getPrecedente().getVertice() + ", "
+ (aberto ? "aberto" : "fechado") + ")";
}
}
}
|
<reponame>DawChihLiou/ci-boilerplate<filename>app/js/home/index.spec.js
import React from 'react';
import Home from './index';
import { shallow } from 'enzyme';
describe('<Home />', () => {
const home = shallow(<Home />);
it('should have one header', () => {
expect(home.find('h1').length).toBe(1);
});
it('should display "This is Home View"', () => {
expect(home.find('h1').text()).toEqual('This is Home View.')
})
});
|
<reponame>snowwayne1231/WerewolfHelper<gh_stars>0
// Import F7
import Framework7 from 'framework7/framework7.esm.bundle.js';
// Import F7 Styles
import 'framework7/css/framework7.bundle.css';
// Import Icons and App Custom Styles
import './css/icons.css';
import './css/app.css';
import './stylus/app.styl';
// Import Routes
import routes from './routes.js';
// Init Framework7
const app = new Framework7({
root: '#app',
id: 'snow.werewolf.f7', // App bundle ID
name: 'Werewolf f7', // App name
theme: 'auto', // Automatic theme detection
// App routes
routes: routes,
view: {
pushState: true,
},
});
window.app = app;
|
# -*- coding: utf-8 -*-
# @Time : 2022/3/7 19:18
# @Author : hyx
# @File : page.py
# @desc : web page implement
import json
import time
from urllib.parse import urlparse
import flybirds.core.global_resource as global_resource
import flybirds.core.global_resource as gr
import flybirds.utils.flybirds_log as log
import flybirds.utils.verify_helper as verify_helper
from flybirds.core.plugin.plugins.default.web.interception import \
get_case_response_body
from flybirds.utils import dsl_helper
from flybirds.utils.dsl_helper import is_number
__open__ = ["Page"]
class Page:
"""Web Page Class"""
name = "web_page"
instantiation_timing = "plugin"
def __init__(self):
page, context = self.init_page()
self.page = page
self.context = context
@staticmethod
def init_page():
context = Page.new_browser_context()
page = context.new_page()
request_interception = gr.get_web_info_value("request_interception",
True)
if request_interception:
page.route("**/*", handle_route)
# request listening events
page.on("request", handle_request)
ele_wait_time = gr.get_frame_config_value("wait_ele_timeout", 30)
page_render_timeout = gr.get_frame_config_value("page_render_timeout",
30)
page.set_default_timeout(float(ele_wait_time) * 1000)
page.set_default_navigation_timeout(float(page_render_timeout) * 1000)
return page, context
@staticmethod
def new_browser_context():
browser = gr.get_value('browser')
operation_module = gr.get_value("projectScript").custom_operation
create_browser_context = getattr(operation_module,
"create_browser_context")
context = create_browser_context(browser)
if context is not None:
log.info('[new_browser_context] successfully get BrowserContext '
'from custom operation')
return context
context = browser.new_context(record_video_dir="videos",
ignore_https_errors=True)
return context
def navigate(self, context, param):
param_dict = dsl_helper.params_to_dic(param, "urlKey")
url_key = param_dict["urlKey"]
schema_url_value = gr.get_page_schema_url(url_key)
self.page.goto(schema_url_value)
def return_pre_page(self, context):
self.page.go_back()
def sleep(self, context, param):
if is_number(param):
self.page.wait_for_timeout(float(param) * 1000)
else:
log.warn("default wait for timeout!")
self.page.wait_for_timeout(3 * 1000)
def cur_page_equal(self, context, param):
cur_url = self.page.url.split('?')[0]
if param.startswith(("http", "https")):
target_url = param.split('?')[0]
else:
schema_url = global_resource.get_page_schema_url(param)
target_url = schema_url
verify_helper.text_equal(target_url, cur_url)
def handle_request(request):
# interception request handle
parsed_uri = urlparse(request.url)
operation = parsed_uri.path.split('/')[-1]
if operation is not None:
interception_request = gr.get_value('interceptionRequest')
request_body = interception_request.get(operation)
if request_body is not None:
log.info(
f'[handle_request] start cache service:{operation}')
current_request_info = {'postData': request.post_data,
'url': request.url,
'updateTimeStamp': int(
round(time.time() * 1000))}
interception_request[operation] = current_request_info
gr.set_value("interceptionRequest", interception_request)
def handle_route(route):
abort_domain_list = gr.get_web_info_value("abort_domain_list", [])
parsed_uri = urlparse(route.request.url)
domain = parsed_uri.hostname
if abort_domain_list and domain in abort_domain_list:
route.abort()
return
resource_type = route.request.resource_type
if resource_type != 'fetch' and resource_type != 'xhr':
route.continue_()
return
# mock response data
operation = parsed_uri.path.split('/')[-1]
mock_case_id = None
if operation is not None:
interception_values = gr.get_value('interceptionValues')
mock_case_id = interception_values.get(operation)
if mock_case_id:
mock_body = get_case_response_body(mock_case_id)
if mock_body:
if not isinstance(mock_body, str):
mock_body = json.dumps(mock_body)
route.fulfill(status=200,
content_type="application/json;charset=utf-8",
body=mock_body)
else:
route.continue_()
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.core.reachedset;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collection;
import java.util.Set;
import org.sosy_lab.cpachecker.cfa.model.CFANode;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.waitlist.Waitlist.WaitlistFactory;
import org.sosy_lab.cpachecker.util.AbstractStates;
/**
* Advanced implementation of ReachedSet.
* It groups states by location and allows fast access to all states with
* the same location as a given one.
*/
public class LocationMappedReachedSet extends PartitionedReachedSet {
private static final long serialVersionUID = 1L;
public LocationMappedReachedSet(WaitlistFactory waitlistFactory) {
super(waitlistFactory);
}
@Override
public Collection<AbstractState> getReached(CFANode location) {
checkNotNull(location);
return getReachedForKey(location);
}
@Override
protected Object getPartitionKey(AbstractState pState) {
CFANode location = AbstractStates.extractLocation(pState);
checkNotNull(location, "Location information necessary for LocationMappedReachedSet");
return location;
}
@SuppressWarnings("unchecked")
public Set<CFANode> getLocations() {
// generic cast is safe because we only put CFANodes into it
return (Set<CFANode>)super.getKeySet();
}
}
|
<gh_stars>10-100
import { Component } from '@angular/core';
import { ResponsibleParty } from '../../../model/iso';
import { ContactList } from '../../../model/sml';
import { ConfigurationService } from '../../../services/ConfigurationService';
import { VocabularyType } from '../../../services/vocabulary/model';
import { ChildMetadata, ChildMetadataOptions } from '../base/ChildMetadata';
import { TypedModelComponent } from '../base/TypedModelComponent';
import { ResponsiblePartyComponent } from '../iso/gmd/ResponsiblePartyComponent';
@Component({
selector: 'sml-contact-list',
templateUrl: './ContactListComponent.html',
styleUrls: ['../styles/editor-component.scss']
})
export class ContactListComponent extends TypedModelComponent<ContactList> {
constructor(
private configuration: ConfigurationService
) {
super();
}
protected createModel() {
return new ContactList();
}
protected onRemove(index: number): void {
this.model.contacts.splice(index, 1);
}
protected onAdd() {
this.model.contacts.push(new ResponsibleParty());
}
protected openNewResponsiblePartyItem(item: ResponsibleParty) {
const newLocal = this.config.getConfigFor('sml:contact').getConfigFor('gmd:CI_ResponsibleParty');
this.configuration.getConfig().subscribe(smleConfig => {
let options: ChildMetadataOptions;
if (smleConfig.showContactVocabularySelection) {
options = { vocabularyType: VocabularyType.Contact };
}
this.openNewChild(new ChildMetadata(ResponsiblePartyComponent, item, newLocal, options));
});
}
}
|
<reponame>newave986/highlighter
import React, { useEffect, useState } from "react";
import { Route, Link, useHistory, useLocation } from "react-router-dom";
import './showResult.css';
import axios from "axios";
import Loading from './components/loading';
import logoImg from "./images/logo.png";
import Facebook_logo from "./images/facebook-logo.png";
import Youtube_logo from "./images/youtube-logo.png";
import Twitter_logo from "./images/twitter-logo.png";
import Instagram_logo from "./images/instagram-logo.png";
const EMOTIONS = ["Angry","Disgusting","Fearful", "Happy", "Sad", "Surpring", "Neutral"]; //0~6
// page 3
const ShowResult = (props) => {
const location = useLocation();
const videoIndex = location.state.videoIndex;
const checkedEmo = location.state.checkedEmo;
const [loader, setLoader] = useState(true);
const [showB, setShowB] = useState(false);
const [thumb, setThumb] = useState(0);
const emotionTags = checkedEmo.map((e, index) => (<li key={index}>#{EMOTIONS[e]}</li>));
const [Video, setVideo] = useState([]);
var fileDownload = require('js-file-download');
useEffect(() => {
axios.get("/api/getMainImg/", {
params: {
video_index: videoIndex,
}
})
.then(response => {
if (response.data == {}){
alert("Failed to show thumbnail");
}
else {
setLoader(false);
let thumbnail = response.data;
setThumb(thumbnail);
setShowB(true);
}
})
.catch(error => {
console.log(error)
})
}, [])
const handleDownload = (e) => {
e.preventDefault();
axios.get("/api/getVideo/",{
params: {
video_index: videoIndex,
},
responseType:'blob'
}).then(res => {
fileDownload(res.data,'highlight.mp4')
}).catch(error=>{
console.log(error)
})
}
return (
<>
<div id="mainbox">
<hr id="line1"></hr>
<hr id="line2"></hr>
<img id="logo" src={logoImg}/>
<div class="leftbar">
<div class="folder"><span>Upload & Share</span></div>
<br></br>
<div><img id = "snsLogoImg" src={ Facebook_logo }/><a class="goSns" target="_blank" href="https://facebook.com">Facebook</a></div>
<br></br>
<div><img id = "snsLogoImg" src={ Twitter_logo }/><a class="goSns" target="_blank" href="https://twitter.com">Twitter</a></div>
<br></br>
<div><img id = "snsLogoImg" src={ Instagram_logo }/><a class="goSns" target="_blank" href="https://instagram.com">Instagram</a></div>
<br></br>
<div><img id = "snsLogoImg" src={ Youtube_logo }/><a class="goSns" target="_blank" href="https://youtube.com">Youtube</a></div>
</div>
{/* 완성 영상 재생시키기
<video id="show_video" width="2600" height="2000" src={}} controls></video>
controls이 존재하면, 소리 조절(volume), 동영상 탐색(seek), 일시 정지(pause)/재시작(resume)을 할 수 있는 컨트롤러를 제공합니다.*/}
<div id="videoBox">
{thumb ? <img id="thumbnail" src={"data:image/png;base64,"+ thumb }></img> : ""}
</div>
<div class="emotionTags">
{ emotionTags }
</div>
<div id="loader">{loader ? <Loading/> : ""}</div>
<form onSubmit = { handleDownload }>
<button type="submit" id="downloadButton" class="btn"
style = {showB ? { visibility : "visible" } : { visibility: "hidden" }}>
Download</button>
</form>
</div>
</>
)
}
export default ShowResult; |
const router = require('express').Router();
const teamController = require('../controller/teamController')
/**
* @swagger
* /team/employs/ :
* get:
* tags:
* - "team"
* summary : Get all players of a team
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
*/
router.get("/employs", teamController.getTeamEmployee);
/**
* @swagger
* /team/{TName}/employs/ :
* get:
* tags:
* - "team"
* summary : Get a specified team's players
* parameters :
* - name : TName
* in : path
* schema :
* type : string
* example : "TSM"
*
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
*/
router.get("/:TName/employs", teamController.getTeamEmployeeWithTName);
/**
* @swagger
* /team/{TName}/employs :
* post:
* tags:
* - "team"
* security:
* - ApiKeyAuth: []
* summary : Insert a new player into a team
* parameters :
* - name : TName
* in : path
* schema :
* type : string
* example : "TSM"
* requestBody:
* content:
* application/json:
* schema :
* type : object
* properties :
* playername :
* type : string
* year :
* type : number
* month :
* type : number
* day :
* type : number
* example :
* playername : "Faker"
* year : 2021
* month : 22
* day : 22
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.post("/:TName/employs", teamController.postTeamEmploy);
/**
* @swagger
* /team/{TName}/employs :
* delete:
* tags:
* - "team"
* security:
* - ApiKeyAuth: []
* summary : Delete a player inside a team
* parameters :
* - name : TName
* in : path
* schema :
* type : string
* example : "TSM"
*
* requestBody:
* content:
* application/json:
* schema :
* type : object
* properties :
* playername :
* type : string
* example :
* playername : "Faker"
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.delete("/:TName/employs", teamController.deleteTeamEmploy);
/**
* @swagger
* /team/{tname}/ :
* get:
* tags:
* - "team"
* summary : Get a specified team
* parameters :
* - name : tname
* in : path
* schema :
* type : string
* example : "TSM"
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
*/
router.get("/:tname", teamController.getTeam);
/**
* @swagger
* /team/ :
* get:
* tags:
* - "team"
* summary : Get all teams
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
*/
router.get("/", teamController.getAllTeam);
/**
* @swagger
* /team/ :
* post:
* tags:
* - "team"
* security:
* - ApiKeyAuth: []
* summary : Post a team
* requestBody:
* content:
* application/json:
* schema :
* type : object
* properties :
* tname :
* type : string
* description :
* type : string
* year :
* type : number
* month :
* type : number
* day :
* type : number
* example :
* tname : "TSM"
* description : "TSN is a NA team"
* year : 2021
* month : 22
* day : 22
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.post("/", teamController.postTeam);
/**
* @swagger
* /team/{tname} :
* delete:
* tags:
* - "team"
* security:
* - ApiKeyAuth: []
* summary : Delete a team
* parameters :
* - name : tname
* in : path
* schema :
* type : string
* example : "TSM"
* responses :
* 200:
* description : Succesfully
* 500:
* description : Error occured
* 401:
* description : Error occured
* responseBody :
* content :
* application/json:
* schema :
* type : object
* properties :
* message :
* type : string
* example :
* message : "Authentication failed"
*/
router.delete("/:tname", teamController.deleteTeam);
module.exports = router;
|
CREATE TABLE products(
id SERIAL PRIMARY KEY,
name VARCHAR(255) NOT NULL,
description TEXT NOT NULL,
price DECIMAL(7,2) NOT NULL,
discount FLOAT NOT NULL
); |
<filename>server/src/main/java/com/decathlon/ara/postman/bean/Info.java
package com.decathlon.ara.postman.bean;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Wither;
@Data
@Wither
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class Info {
/**
* Name of the Postman collection.
*/
private String name;
}
|
<filename>pinax/apps/blog/management.py
from django.conf import settings
from django.db.models import signals
from django.utils.translation import ugettext_noop as _
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("blog_friend_post", _("Friend Posted to Blog"), _("a friend of yours posted to their blog"), default=2)
notification.create_notice_type("blog_post_comment", _("New Comment on Blog Post"), _("a comment was made on one of your blog posts"), default=2)
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes as notification app not found"
|
timing_test(function() {
at(0, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 0, 0)'},
{'transform':'matrix(1, 0, 0, 1, 0, 0)'},
{'transform':'matrix(1, 0, 0, 1, 0, 0)'},
{'transform':'matrix(1, 0, 0, 1, 0, 0)'}]);
}, "Autogenerated");
at(0.4, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 40, 0)'},
{'transform':'matrix(0.9510565162951535, 0.3090169943749474, -0.3090169943749474, 0.9510565162951535, 0, 0)'},
{'transform':'matrix(0.9510565162951535, 0.3090169943749474, -0.3090169943749474, 0.9510565162951535, 40, 0)'},
{'transform':'matrix(0.9510565162951535, 0.3090169943749474, -0.3090169943749474, 0.9510565162951535, 38.04226065180614, 12.360679774997896)'}]);
}, "Autogenerated");
at(0.8, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 80, 0)'},
{'transform':'matrix(0.8090169943749475, 0.5877852522924731, -0.5877852522924731, 0.8090169943749475, 0, 0)'},
{'transform':'matrix(0.8090169943749475, 0.5877852522924731, -0.5877852522924731, 0.8090169943749475, 80, 0)'},
{'transform':'matrix(0.8090169943749475, 0.5877852522924731, -0.5877852522924731, 0.8090169943749475, 64.7213595499958, 47.022820183397855)'}]);
}, "Autogenerated");
at(1.2000000000000002, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 120, 0)'},
{'transform':'matrix(0.587785252292473, 0.8090169943749475, -0.8090169943749475, 0.587785252292473, 0, 0)'},
{'transform':'matrix(0.587785252292473, 0.8090169943749475, -0.8090169943749475, 0.587785252292473, 120, 0)'},
{'transform':'matrix(0.587785252292473, 0.8090169943749475, -0.8090169943749475, 0.587785252292473, 70.53423027509676, 97.0820393249937)'}]);
}, "Autogenerated");
at(1.6, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 160, 0)'},
{'transform':'matrix(0.30901699437494745, 0.9510565162951535, -0.9510565162951535, 0.30901699437494745, 0, 0)'},
{'transform':'matrix(0.30901699437494745, 0.9510565162951535, -0.9510565162951535, 0.30901699437494745, 160, 0)'},
{'transform':'matrix(0.30901699437494745, 0.9510565162951535, -0.9510565162951535, 0.30901699437494745, 49.44271909999159, 152.16904260722455)'}]);
}, "Autogenerated");
at(2, function() {
assert_styles(
'.anim',
[{'transform':'matrix(1, 0, 0, 1, 200, 0)'},
{'transform':'matrix(0.00000000000000006123031769111886, 1, -1, 0.00000000000000006123031769111886, 0, 0)'},
{'transform':'matrix(0.00000000000000006123031769111886, 1, -1, 0.00000000000000006123031769111886, 200, 0)'},
{'transform':'matrix(0.00000000000000006123031769111886, 1, -1, 0.00000000000000006123031769111886, 0.000000000000012246063538223773, 200)'}]);
}, "Autogenerated");
}, "Autogenerated checks.");
|
// https://github.com/jekyll/github-metadata/blob/master/docs/site.github.md
// https://octokit.github.io/rest.js/v18#repos-get-latest-release
const fs = require('fs');
const path = require('path');
const { Octokit } = require('@octokit/rest');
const NodeCache = require('node-cache');
// Does the cache persist over several runs? 🤔
const myCache = new NodeCache( { stdTTL: 600 } );
module.exports = (options = {}, context) => ({
async extendPageData ($page) {
const { owner, repo } = options;
$page.githubMetadata = await getGithubMetadata({ owner, repo });
}
});
const getGithubMetadata = async ({ owner = '', repo = '' }) => {
let githubToken = process.env.GITHUB_TOKEN || '';
try {
githubToken = fs.readFileSync(path.resolve(__dirname, './github-token'), 'utf-8');
} catch (error) {
console.log('no github token found');
}
const octokit = new Octokit({
auth: githubToken || undefined,
});
const repoUrl = `https://github.com/${owner}/${repo}`;
const resolvers = {
latest_release: async () => {
const fromCache = myCache.get('latest_release');
if (fromCache) {
return fromCache;
}
const { data } = await octokit.rest.repos.getLatestRelease({ owner, repo });
if (data) {
myCache.set('latest_release', data);
}
return data;
},
releases: async () => {
const fromCache = myCache.get('releases');
if (fromCache) {
return fromCache;
}
const { data } = await octokit.rest.repos.listReleases({ owner, repo });
if (data) {
myCache.set('releases', data);
}
return data;
},
releases_url: async () => `${repoUrl}/releases`,
}
const keys = Object.keys(resolvers);
const vls = await Promise.allSettled(keys.map(_ => resolvers[_]()));
return vls.reduce((acc, cur, i) => {
if (cur.status === 'rejected') {
console.log(cur);
}
return {
...acc,
[keys[i]]: cur.value,
}
}, {});
};
|
export * from './grid';
export * from './make-theme';
export * from './media';
export * from './mixins';
export * from './native';
export * from './pagenav';
export * from './tokens/palette';
export * from './type';
|
<reponame>chayakornwc/Admin
import React, { Component } from 'react'
import ReportFilter from '../../../components/Report/ReportFilter';
import ReportTable from '../../../components/Report/ReportTable';
import {loadOrders} from '../../../redux/actions/courseorderActions';
import {loadCourse} from '../../../redux/actions/courseActions'
import Loader from '../../../components/Utils/loader';
import {connect} from 'react-redux';
import PropTypes from 'prop-types';
class Report extends Component {
constructor(props){
super(props)
this.handleRage = this.handleRage.bind(this)
this.renderToRedirect = this.renderToRedirect.bind(this)
this.handleSearchtermChange = this.handleSearchtermChange.bind(this)
}
static contextTypes = {
router: PropTypes.object
}
renderToRedirect(id){
this.context.router.history.push(`/report/personal_attends/${id}`);
}
handleRage(start, end){
if(start && end){
this.props.dispatch(loadOrders(start,end))
}
}
handleSearchtermChange(start, end, affiliation, course){
this.props.dispatch(loadOrders(start,end,affiliation,course));
}
componentDidMount(){
this.props.dispatch(loadOrders())
this.props.dispatch(loadCourse())
}
render() {
const {courseOrders, courses} = this.props
return (
<div>
<ReportFilter
course={courses.data}
onSearchTermChange={this.handleSearchtermChange}/>
{courseOrders.isLoading && <div style={{display:'flex', justifyContent:'center'}}><Loader/></div>}
<ReportTable renderToRedirect={this.renderToRedirect} data={courseOrders.data} />
</div>
)
}
}
function mapStateToProps(state){
return{
courseOrders:state.courseOrderReducers.courseOrders,
courses:state.courseReducer.courses,
}
}
export default connect(mapStateToProps)(Report); |
package kata.java;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Optional;
import java.util.stream.IntStream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
public class LinkedArrayDequeTest {
private LinkedArrayDeque deque;
@Before
public void setUp() throws Exception {
deque = new LinkedArrayDeque();
}
@Test
public void createDeque() throws Exception {
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void enqueueItemInFront() throws Exception {
deque.addFront(10);
assertThat(deque.removeBack(), is(Optional.of(10)));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void addManyItemsInFront() throws Exception {
deque.addFront(10);
deque.addFront(20);
deque.addFront(30);
assertThat(deque.removeBack(), is(Optional.of(10)));
assertThat(deque.removeBack(), is(Optional.of(20)));
assertThat(deque.removeBack(), is(Optional.of(30)));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void addItemsMoreThanSegmentSize() throws Exception {
IntStream.range(0, 40).forEach(deque::addFront);
IntStream.range(0, 40).forEach(i -> assertThat(deque.removeBack(), is(Optional.of(i))));
assertThat(deque.removeBack(), is(Optional.empty()));
}
@Test
public void removeItemsFront() throws Exception {
deque.addFront(10);
deque.addFront(20);
deque.addFront(30);
assertThat(deque.removeFront(), is(Optional.of(30)));
assertThat(deque.removeFront(), is(Optional.of(20)));
assertThat(deque.removeFront(), is(Optional.of(10)));
assertThat(deque.removeFront(), is(Optional.empty()));
}
}
|
<gh_stars>10-100
#!/usr/bin/python3
# Note: Always use unittest.sh to run the tests!
import unittest
from helpers.chordInterval import *
from helpers.storage import Storage
import datetime
import imp
class TestStorage(unittest.TestCase):
def test_property_get(self):
storage = Storage()
# insert some keys a and b
storage.put("a", "a")
storage.put("b", "b")
storage.put("a", "abc")
self.assertEqual(storage.get("a")[0], "a") # check if item value is correct
self.assertEqual(storage.get("a")[1], "abc")
self.assertEqual(len(storage.get("a")), 2) # two elements for key a
self.assertEqual(len(storage.data), 2) # 2 keys in total (a and b)
longTimeAgo = datetime.datetime.today() - datetime.timedelta(2) # insert item from two days ago
storage.put("a", "long", timeOfInsert=longTimeAgo.isoformat()) #insert one more item for a which is expired
self.assertEqual(storage.get("a")[2], "long") # check if expired item was inserted
self.assertEqual(len(storage.get("a")), 3) # check total items for key a
storage.clean_old() # after a cleanup the expired item should be removed
self.assertEqual(len(storage.get("a")), 2) # check total items for key a after expired item was removed
self.assertEqual(storage.get("b")[0] ,"b")
storage2 = Storage()
storage2.put(1, 1)
storage2.put(2, 2)
storage2.put(3, 3)
storage2.put(4, 4)
storage2.put(5, 5)
storage2.put(6, 6)
storage3 = Storage()
storage3.put(1, 1)
storage3.put(1, 1)
storage3.put(1, 1)
storage2.merge(storage3.data)
self.assertEqual(len(storage2.get(1)) ,4)
self.assertEqual(len (storage2.get_storage_data_between(1,4)), 3)
self.assertEqual(len (storage2.data),6)
storage2.delete_storage_data_between(1,4)
self.assertEqual(len (storage2.data), 3)
if __name__ == '__main__':
unittest.main()
|
package main
import (
"bytes"
"fmt"
"html/template"
"io"
"net/http"
"github.com/murphybytes/saml/examples/svcprovider/generated"
"github.com/pkg/errors"
)
type homepageHandler struct{}
func newHomepageHandler() http.Handler {
return &homepageHandler{}
}
func (h *homepageHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
page, errs := generated.Asset(homePagePath)
if errs != nil {
w.WriteHeader(http.StatusInternalServerError)
}
contentTypeHeader(w)
reader := bytes.NewReader(page)
_, errs = io.Copy(w, reader)
if errs != nil {
w.WriteHeader(http.StatusInternalServerError)
}
}
func writeServerError(w http.ResponseWriter, err error, details string) {
fmt.Println(errors.Wrap(err, details))
w.WriteHeader(http.StatusInternalServerError)
}
func handleCallbackError(w http.ResponseWriter, errorText string) {
errorPage, err := generated.Asset(errorPagePath)
if err != nil {
writeServerError(w, err, "reading error template")
return
}
errorPageTemplate, err := template.New("").Parse(string(errorPage))
if err != nil {
writeServerError(w, err, "parsing error page template")
return
}
err = errorPageTemplate.Execute(w, errorText)
if err != nil {
writeServerError(w, err, "writing error page template")
}
}
func contentTypeHeader(w http.ResponseWriter) {
w.Header().Set("Content-Type", "text/html; charset=UTF-8")
}
|
SELECT state, COUNT(*)
FROM orders
GROUP BY state; |
<gh_stars>0
module DataImport
class MyDramaList
module Extractor
module Helpers
extend ActiveSupport::Concern
private
def original_for(src)
src.sub(/_[a-z0-9]+\./, '_f.')
end
end
end
end
end
|
<gh_stars>1-10
import { Colleague } from '../dist'
class Tester extends Colleague {
test() {
this.emit('log', 'test emitted!')
}
}
export default Tester
|
<gh_stars>1-10
//
// NSObject+DefaultValue.h
// Example
//
// Created by zhangferry on 2021/3/14.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
typedef NS_OPTIONS(NSUInteger, YYPropertyType) {
YYPropertyTypeNone = 1 << 0,
YYPropertyTypeNSString = 1 << 1,
YYPropertyTypeNSNumber = 1 << 2,
YYPropertyTypeNSArray = 1 << 3,
YYPropertyTypeNSInterger = 1 << 4,
YYPropertyTypeAll = 0xFF
};
@interface NSObject (DefaultValue)
- (YYPropertyType)provideDefaultValueType;
@end
NS_ASSUME_NONNULL_END
|
#!/bin/bash
if [ -z ${1+x} ]
then
echo "Please define semver to release. eg: ./dockerPushImages.sh 1.0.1"
exit 1
else
echo "pushing versions to dockerhub '$1'"
../ringface-gui/dockerImagesPush.sh $1
../ringface-classifier/dockerImagePush.sh $1
../ringface-connector/dockerImagesPush.sh $1
fi |
class MessageFormatter {
private $message;
private $dateCreated;
private $posterName;
public function setMessage($message) {
$this->message = $message;
}
public function setDateCreated($dateCreated) {
$this->dateCreated = $dateCreated;
}
public function setPosterName($posterName) {
$this->posterName = $posterName;
}
public function getFormattedMessage() {
$formattedMessage = $this->message . "<br /><span class='timestamp'>" . date("g:i:s A", strtotime($this->dateCreated)) . "</span><span class='stamp'>from " . $this->posterName . "</span>";
return $formattedMessage;
}
}
// Example usage
$messageFormatter = new MessageFormatter();
$messageFormatter->setMessage("Hello, world!");
$messageFormatter->setDateCreated("2022-01-01 12:00:00");
$messageFormatter->setPosterName("John Doe");
echo $messageFormatter->getFormattedMessage(); |
import { helper } from '@ember/component/helper';
import { htmlSafe } from '@ember/string';
export function eeoHtmlSafe([str]/*, hash*/) {
return htmlSafe(str);
}
export default helper(eeoHtmlSafe);
|
<reponame>maztohir/sample-sql-translator
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from dataclasses import dataclass
from typing import List
from typing import Optional
from rfmt.blocks import ChoiceBlock as CB
from rfmt.blocks import IndentBlock as IB
from rfmt.blocks import LineBlock as LB
from rfmt.blocks import StackBlock as SB
from rfmt.blocks import TextBlock as TB
from rfmt.blocks import WrapBlock as WB
from .utils import with_commas
from .query import SQLQuery
from .query_impl import SQLOrderLimitOffset
from .expr import SQLExpr
from .const import SQLConstant
from .const import SQLNumber
from .ident import SQLIdentifierPath
from .node import SQLNode
from .node import SQLNodeList
from .expr_funcs import SQLFuncExpr, SQLCustomFuncs
@dataclass(frozen=True)
class SQLArrayLiteral(SQLExpr):
args: SQLNodeList
def sqlf(self, compact):
compact_sql = LB([
TB('['),
LB(with_commas(True, self.args)),
TB(']')
])
if compact:
return compact_sql
return CB([
compact_sql,
LB([
TB('['),
WB(with_commas(compact, self.args, tail=']'))
]),
])
@staticmethod
def consume(lex) -> 'Optional[SQLArrayLiteral]':
if not lex.consume('['):
return None
exprs: List[SQLExpr] = []
while True:
exprs.append(SQLExpr.parse(lex))
if not lex.consume(','):
break
lex.expect(']')
return SQLArrayLiteral(SQLNodeList(exprs))
@dataclass(frozen=True)
class SQLArraySelect(SQLExpr):
query: SQLQuery
def sqlf(self, compact):
compact_sql = LB([
TB('ARRAY('), self.query.sqlf(True), TB(')')
])
if compact:
return compact_sql
return CB([
compact_sql,
SB([
TB('ARRAY('),
IB(self.query.sqlf(compact)),
TB(')')
])
])
@staticmethod
def consume(lex) -> 'Optional[SQLArraySelect]':
if not lex.consume('ARRAY'):
return None
lex.expect('(')
query = SQLQuery.parse(lex)
lex.expect(')')
return SQLArraySelect(query)
@dataclass(frozen=True)
class SQLArrayAgg(SQLExpr):
is_distinct: bool
expr: SQLNode
nulls: Optional[str]
order_limit_offset: Optional[SQLOrderLimitOffset]
analytic: Optional[SQLNode]
offset: Optional[int]
def sqlf(self, compact):
lines = [TB('ARRAY_AGG(')]
if self.is_distinct:
lines.append(TB('DISTINCT '))
lines.append(self.expr.sqlf(True))
if self.nulls:
lines.append(TB(self.nulls) + ' NULLS')
if self.order_limit_offset:
lines.append(self.order_limit_offset.sqlf(True))
if self.analytic:
lines.append(self.analytic.sqlf(True))
lines.append(TB(')'))
if self.offset:
lines.append(TB('[OFFSET('))
lines.append(TB(' ') )
lines.append(self.offset.sqlf(compact))
lines.append(TB(')]'))
compact_sql = LB(lines)
if compact:
return compact_sql
stack = [TB('ARRAY_AGG(')]
indent = []
if self.is_distinct:
indent.append(
LB([TB('DISTINCT '), self.expr.sqlf(compact)]))
else:
indent.append(self.expr.sqlf(compact))
if self.nulls:
indent.append(TB(self.nulls) + ' NULLS')
if self.order_limit_offset:
indent.append(self.order_limit_offset.sqlf(compact))
if self.analytic:
indent.append(self.analytic.sqlf(compact))
stack.append(IB(SB(indent)))
stack.append(TB(')'))
if self.offset:
stack.append(TB('[OFFSET('))
stack.append(TB(' ') )
stack.append(self.offset.sqlf(compact))
stack.append(TB(')]'))
return CB([
compact_sql,
SB(stack)
])
@staticmethod
def consume(lex) -> 'Optional[SQLArrayAgg]':
if not lex.consume('ARRAY_AGG'):
return None
lex.expect('(')
is_distinct = bool(lex.consume('DISTINCT'))
expr = SQLExpr.parse(lex)
nulls = None
if lex.consume('IGNORE'):
nulls = 'IGNORE'
lex.expect('NULLS')
elif lex.consume('RESPECT'):
nulls = 'RESPECT'
lex.expect('NULLS')
order_limit_offset = SQLOrderLimitOffset.consume(lex)
analytic = SQLAnalytic.consume(lex)
lex.expect(')')
offset = None
if lex.consume('['):
lex.expect('OFFSET')
lex.consume('(')
offset = SQLNumber.parse(lex)
lex.consume(')')
lex.expect(']')
return SQLArrayAgg(is_distinct, expr, nulls,
order_limit_offset, analytic, offset)
@dataclass(frozen=True)
class SQLStringAgg(SQLExpr):
name: str
is_distinct: bool
expr: SQLNode
delimiter: Optional[str]
nulls: Optional[str]
order_limit_offset: Optional[SQLOrderLimitOffset]
analytic: Optional[SQLNode]
analytic_name: Optional[str]
number: Optional[int]
def sqlf(self, compact):
lines = [TB('{}('.format(self.name))]
if self.is_distinct:
lines.append(TB('DISTINCT '))
lines.append(self.expr.sqlf(True))
if self.delimiter:
lines.append(TB(', '+ self.delimiter.as_sql(compact)))
if self.nulls:
lines.append(TB(self.nulls) + ' NULLS')
if self.order_limit_offset:
lines.append(self.order_limit_offset.sqlf(True))
if self.analytic:
lines.append(self.analytic.sqlf(True))
lines.append(TB(')'))
if self.number:
if self.analytic_name:
lines.append(TB('[{}('.format(self.analytic_name)))
lines.append(TB(' ') )
lines.append(self.number.sqlf(compact))
lines.append(TB(')]'))
else:
lines.append(TB('['))
lines.append(self.number.sqlf(compact))
lines.append(TB(']'))
compact_sql = LB(lines)
if compact:
return compact_sql
stack = [TB('{}('.format(self.name))]
indent = []
if self.is_distinct:
args = [TB('DISTINCT '), self.expr.sqlf(compact)]
else:
args = [self.expr.sqlf(compact)]
if self.delimiter:
args.append(TB(', ' + self.delimiter.as_sql(compact)))
indent.append(LB(args))
if self.nulls:
indent.append(TB(self.nulls) + ' NULLS')
if self.order_limit_offset:
indent.append(self.order_limit_offset.sqlf(compact))
if self.analytic:
indent.append(self.analytic.sqlf(compact))
stack.append(IB(SB(indent)))
stack.append(TB(')'))
if self.number:
if self.analytic_name:
stack.append(TB('[{}('.format(self.analytic_name)))
stack.append(TB(' ') )
stack.append(self.number.sqlf(compact))
stack.append(TB(')]'))
else:
stack.append(TB('['))
stack.append(self.number.sqlf(compact))
stack.append(TB(']'))
return CB([
compact_sql,
SB(stack)
])
@staticmethod
def consume(lex) -> 'Optional[SQLStringAgg]':
name = lex.consume('STRING_AGG') or lex.consume('SPLIT')
if not name:
return None
lex.expect('(')
is_distinct = bool(lex.consume('DISTINCT'))
expr = SQLExpr.parse(lex)
delimiter = None
if lex.consume(','):
delimiter = SQLConstant.consume(lex)
nulls = None
if lex.consume('IGNORE'):
nulls = 'IGNORE'
lex.expect('NULLS')
elif lex.consume('RESPECT'):
nulls = 'RESPECT'
lex.expect('NULLS')
order_limit_offset = SQLOrderLimitOffset.consume(lex)
analytic = SQLAnalytic.consume(lex)
lex.expect(')')
analytic_name = None
number = None
if lex.consume('['):
analytic_name = SQLConstant.consume(lex)
lex.consume('(')
number = SQLNumber.parse(lex)
lex.consume(')')
lex.expect(']')
return SQLStringAgg(name, is_distinct, expr, delimiter, nulls,
order_limit_offset, analytic, analytic_name, number)
@dataclass(frozen=True)
class SQLExprWithAnalytic(SQLExpr):
function: SQLExpr
analytic: SQLNode
def sqlf(self, compact):
compact_sqlf = LB([self.function.sqlf(compact), TB(' '),
self.analytic.sqlf(compact)])
if compact:
return compact_sqlf
return CB([
compact_sqlf,
SB([self.function.sqlf(compact),
self.analytic.sqlf(compact)])
])
@staticmethod
def parse(lex) -> 'SQLExpr':
# Try alternatives first
expr: SQLExpr = (SQLConstant.consume(lex) or
SQLArrayLiteral.consume(lex) or
SQLArrayAgg.consume(lex) or
SQLStringAgg.consume(lex) or
SQLArraySelect.consume(lex) or
SQLCustomFuncs.consume(lex) or
SQLIdentifierPath.parse(lex))
# If it is a SQLIdentifierPath, it may be
# a normal function call.
if isinstance(expr, SQLIdentifierPath) and lex.consume('('):
# Parse as a function
func_args: List[SQLExpr] = []
if not lex.consume(')'):
while True:
func_args.append(SQLExpr.parse(lex))
if not lex.consume(','):
break
lex.expect(')')
# Turn it into a function
expr = SQLFuncExpr(expr.names, SQLNodeList(func_args))
window = SQLAnalytic.consume(lex)
if window:
expr = SQLExprWithAnalytic(expr, window)
return expr
@dataclass(frozen=True)
class SQLAnalytic(SQLExpr):
partition_by: SQLNodeList
order_by: SQLNodeList
range_desc: str
def sqlf(self, compact):
lines = []
lines.append(TB('OVER ('))
if self.partition_by:
lines.append(TB('PARTITION BY '))
lines.extend(with_commas(True, self.partition_by, ','))
if self.order_by:
lines.append(TB(' ORDER BY '))
lines.extend(with_commas(True, self.order_by, ','))
if self.range_desc:
lines.append(TB(self.range_desc))
lines.append(TB(')'))
if compact:
return LB(lines)
full_sql = [
TB('OVER ('),
]
if self.partition_by:
full_sql.append(
IB(
SB([
TB('PARTITION BY'), IB(
WB(with_commas(True, self.partition_by)))
])))
if self.order_by:
full_sql.append(
IB(
SB([
TB('ORDER BY'), IB(
WB(with_commas(True, self.order_by)))
])))
if self.range_desc:
full_sql.append(IB(TB(self.range_desc)))
full_sql.append(TB(')'))
r = CB([LB(lines), SB(full_sql)])
return r
@staticmethod
def consume(lex) -> 'Optional[SQLAnalytic]':
if not lex.consume('OVER'):
return None
lex.expect('(')
partition_by = []
if lex.consume('PARTITION'):
lex.expect('BY')
while True:
partition_by.append(SQLExpr.parse(lex))
if not lex.consume(','):
break
order_by = []
if lex.consume('ORDER'):
lex.expect('BY')
while True:
oby = SQLExpr.parse(lex)
order = None
if lex.consume('ASC'):
order = 'ASC'
elif lex.consume('DESC'):
order = 'DESC'
# TODO(scannell): Capture ASC/DESC
order_by.append(oby)
if not lex.consume(','):
break
win_spec = (lex.consume('ROWS') or lex.consume('RANGE'))
# If window specified, parse it out
if win_spec:
if lex.consume('BETWEEN'):
win_spec += (' BETWEEN ' +
SQLAnalytic._parse_frame_boundary(lex))
lex.expect('AND')
win_spec += ' AND ' + SQLAnalytic._parse_frame_boundary(lex)
else:
win_spec += ' ' + SQLAnalytic._parse_frame_boundary(lex)
lex.expect(')')
# Return analytics function
return SQLAnalytic(SQLNodeList(partition_by),
SQLNodeList(order_by),
win_spec)
@staticmethod
def _parse_frame_boundary(lex):
if lex.consume('UNBOUNDED'):
typ = lex.consume('PRECEDING') or lex.consume('FOLLOWING') or lex.error('Expected PRECEDING or FOLLOWING')
return 'UNBOUNDED {}'.format(typ)
if lex.consume('CURRENT'):
lex.expect('ROW')
return 'CURRENT ROW'
num = SQLNumber.consume(lex)
num_typ = (lex.consume('PRECEDING') or lex.consume('FOLLOWING') or
lex.error('Expected PRECEDING or FOLLOWING'))
return '{} {}'.format(num, num_typ)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_phonelink_off_outline = void 0;
var ic_phonelink_off_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0zm0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M22 6V4H7.39l2 2zm2 13V9c0-.55-.45-1-1-1h-6c-.55 0-1 .45-1 1v3.61l2 2V10h4v7h-1.61l2.93 2.93c.39-.13.68-.49.68-.93zM2.06 1.51L.65 2.92l1.82 1.82C2.18 5.08 2 5.52 2 6v11H0v3h17.73l2.35 2.35 1.41-1.41L2.06 1.51zM4 17V6.27L14.73 17H4z"
},
"children": []
}]
};
exports.ic_phonelink_off_outline = ic_phonelink_off_outline; |
<filename>pecado-ims/pecado-ims-web/src/main/java/me/batizhao/ims/service/UserRoleService.java
package me.batizhao.ims.service;
import com.baomidou.mybatisplus.extension.service.IService;
import me.batizhao.ims.api.domain.UserRole;
import java.util.List;
/**
* @author batizhao
* @since 2020-09-14
**/
public interface UserRoleService extends IService<UserRole> {
/**
* 更新用户角色
* @param userRoles
* @return
*/
Boolean updateUserRoles(List<UserRole> userRoles);
}
|
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V7
module Resources
# A currency constant.
# @!attribute [r] resource_name
# @return [::String]
# Output only. The resource name of the currency constant.
# Currency constant resource names have the form:
#
# `currencyConstants/{code}`
# @!attribute [r] code
# @return [::String]
# Output only. ISO 4217 three-letter currency code, e.g. "USD"
# @!attribute [r] name
# @return [::String]
# Output only. Full English name of the currency.
# @!attribute [r] symbol
# @return [::String]
# Output only. Standard symbol for describing this currency, e.g. '$' for US Dollars.
# @!attribute [r] billable_unit_micros
# @return [::Integer]
# Output only. The billable unit for this currency. Billed amounts should be multiples of
# this value.
class CurrencyConstant
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
|
@Component({
selector: 'app-confirmation-button',
template: `
<button (click)="showConfirmation()">Confirm</button>
<div *ngIf="showDialog">
Are you sure you want to proceed?
<button (click)="confirm()">Yes</button>
<button (click)="cancel()">No</button>
</div>
`
})
export class ConfirmationButtonComponent {
showDialog = false;
showConfirmation() {
this.showDialog = true;
}
confirm() {
// perform the action
this.showDialog = false;
}
cancel() {
this.showDialog = false;
}
} |
<gh_stars>1-10
def linha():
print()
print('=' * 80)
print()
linha()
ano = int(input('Ano de nascimento: '))
idade = 2021 - ano
print()
print(f'Você tem {idade} anos de idade.')
if idade > 18:
print('Já passou do tempo de se alistar')
p = 2021 - (ano + 18)
print(f'Faz {p} anos que você se alistou ou que deveria ter se alistado!')
elif idade == 18:
print('Você deve se alistar esse ano!')
else:
print(f'Você ainda é muito novo para se alistar!')
p = (ano + 18) - 2021
print(f'Você poderá se alistar daqui {p} anos.')
linha()
|
<reponame>biril/backbone-proxy
/*jshint browser:true, devel:true */
/*global define:false */
define(['backbone', 'backbone-proxy'], function (Backbone, BackboneProxy) {
'use strict';
return {
run: function () {
var User, user, UserProxy, userProxy;
User = Backbone.Model.extend({
defaults: {
name: 'Anna',
age: 23
}
});
user = new User();
UserProxy = BackboneProxy.extend(user);
userProxy = new UserProxy();
user.on('change', function () {
alert('ok');
});
userProxy.set({ name: 'Betty' });
}
};
});
|
import { createStore, combineReducers, applyMiddleware } from 'redux';
import thunk from 'redux-thunk';
import messageReducer from './pf-lib/message/messageReducer';
import modalReducer from './pf-lib/modal/modalReducer';
import movieReducer from './movieRating/movieReducers';
import addRatingReducer from './addRating/addRatingReducer';
import { createForms } from 'react-redux-form';
import { initialFormUserState } from './addRating/addRatingReducer.js'
import { initialRatingsFormUserState } from './movieRating/movieReducers.js'
export default createStore(
combineReducers({
modalReducer,
messageReducer,
movieReducer,
addRatingReducer,
...createForms({
display: initialFormUserState,
ratings: initialRatingsFormUserState,
})
}),
applyMiddleware(thunk)
);
|
module.exports = {
publicPath: "/fiks-validator/"
};
|
#!/bin/sh
# https://docs.celeryproject.org/en/latest/userguide/workers.html
set -o errexit
set -o nounset
celery -A orm_blog.taskapp worker \
--loglevel=${CELERY_LEVEL:-INFO} \
--concurrency=${CELERY_CONCURRENCY:-2}
|
class Params:
def __init__(self):
self._params = {}
def load_preprocessor_params(self, preprocessor_type):
if preprocessor_type == "standard":
return {"mean": 0, "std": 1}
elif preprocessor_type == "min_max":
return {"min_val": 0, "max_val": 1}
else:
return {} # Return empty dictionary for unknown preprocessor types
# Example usage
params = Params()
preprocessor_type = "standard"
params_data = params.load_preprocessor_params(preprocessor_type)
print(params_data) |
<reponame>mvakili/ngx-magic
import { Directive, Input, Renderer2, ElementRef } from '@angular/core';
import {OrderDirection} from './../models/enum';
@Directive({
selector: '[setDirection]'
})
export class DirectionDirective {
constructor(private renderer: Renderer2, private el: ElementRef) { }
_direction: number;
@Input('setDirection')
set direction(direction: number) {
this._direction = direction;
this.renderer.removeClass(this.el.nativeElement, 'ion-arrow-down-b');
this.renderer.removeClass(this.el.nativeElement, 'ion-arrow-up-b');
if (this._direction != null) {
if (this._direction === OrderDirection.Descending) {
this.renderer.addClass(this.el.nativeElement, 'ion-arrow-up-b');
}
if (this._direction === OrderDirection.Ascending) {
this.renderer.addClass(this.el.nativeElement, 'ion-arrow-down-b');
}
}
}
}
|
import React, { useState } from 'react';
function Fibonacci() {
const [input, setInput] = useState('');
const [result, setResult] = useState('');
function handleChange(e) {
setInput(e.target.value);
}
function handleSubmit(e) {
e.preventDefault();
let n = input;
let arr = [0, 1];
for(let i = 2; i < n; i++) {
arr.push(arr[i-2] + arr[i-1])
}
setResult(arr.join(", "));
}
return (
<div>
<form onSubmit={handleSubmit}>
<input type="number" value={input} onChange={handleChange} />
<button type="submit">Generate</button>
</form>
<h2>Fibonacci Series: {result}</h2>
</div>
);
}
export default Fibonacci; |
<reponame>seek-oss/scoobie
import 'braid-design-system/reset';
import 'loki/configure-react';
import React from 'react';
import { ReactNode } from 'react';
import {
BraidArgs,
MdxArgs,
defaultArgTypes,
defaultArgs,
} from '../storybook/controls';
import {
BraidStorybookProvider,
MdxStorybookProvider,
withRouter,
} from '../storybook/decorators';
import BlockquoteMarkdown from '../storybook/markdown/blockquote.mdx';
import CodeMarkdown from '../storybook/markdown/code.mdx';
import HeadingMarkdown from '../storybook/markdown/heading.mdx';
import ImageExternalMarkdown from '../storybook/markdown/image-external.mdx';
import ImageInternalMarkdown from '../storybook/markdown/image-internal.mdx';
import InlineReactMarkdown from '../storybook/markdown/inline-react.mdx';
import ListMarkdown from '../storybook/markdown/list.mdx';
import MermaidErMarkdown from '../storybook/markdown/mermaid-er.mdx';
import MermaidFlowchartMarkdown from '../storybook/markdown/mermaid-flowchart.mdx';
import MermaidSequenceMarkdown from '../storybook/markdown/mermaid-sequence.mdx';
import TableMarkdown from '../storybook/markdown/table.mdx';
import { MdxProvider } from './MdxProvider';
export default {
args: {
braidThemeName: defaultArgs.braidThemeName,
mdxSize: defaultArgs.mdxSize,
},
argTypes: {
braidThemeName: defaultArgTypes.braidThemeName,
mdxSize: defaultArgTypes.mdxSize,
},
component: MdxProvider,
decorators: [withRouter],
title: 'MDX/MdxProvider',
};
type Args = { children: ReactNode } & BraidArgs & MdxArgs;
const Provider = ({ braidThemeName, children, mdxSize }: Args) => (
<BraidStorybookProvider braidThemeName={braidThemeName}>
<MdxStorybookProvider mdxSize={mdxSize}>{children}</MdxStorybookProvider>
</BraidStorybookProvider>
);
export const Blockquote = (args: Args) => (
<Provider {...args}>
<BlockquoteMarkdown />
</Provider>
);
export const Code = (args: Args) => (
<Provider {...args}>
<CodeMarkdown />
</Provider>
);
export const Heading = (args: Args) => (
<Provider {...args}>
<HeadingMarkdown />
</Provider>
);
export const ImageExternal = (args: Args) => (
<Provider {...args}>
<ImageExternalMarkdown />
</Provider>
);
ImageExternal.parameters = { loki: { skip: true } };
ImageExternal.storyName = 'Image › External';
export const ImageInternal = (args: Args) => (
<Provider {...args}>
<ImageInternalMarkdown />
</Provider>
);
ImageInternal.storyName = 'Image › Internal';
export const InlineReact = (args: Args) => (
<Provider {...args}>
<InlineReactMarkdown />
</Provider>
);
export const Lists = (args: Args) => (
<Provider {...args}>
<ListMarkdown />
</Provider>
);
export const MermaidEr = (args: Args) => (
<Provider {...args}>
<MermaidErMarkdown />
</Provider>
);
MermaidEr.storyName = 'Mermaid › ER';
export const MermaidFlowchart = (args: Args) => (
<Provider {...args}>
<MermaidFlowchartMarkdown />
</Provider>
);
MermaidFlowchart.storyName = 'Mermaid › Flowchart';
export const MermaidSequence = (args: Args) => (
<Provider {...args}>
<MermaidSequenceMarkdown />
</Provider>
);
MermaidSequence.storyName = 'Mermaid › Sequence';
export const Table = (args: Args) => (
<Provider {...args}>
<TableMarkdown />
</Provider>
);
|
#! /bin/bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A script that updates the bindgen library manually.
#
# Please refer to the file README.md
#
# Requirements:
# - bash
# - awk
# - tr
# - llvm-dev package (for llvm-config that bindgen needs)
set -eo pipefail
# The directory into which the rust bindings file will be written. If left
# unset, the current directory is the default.
OUTPUT_DIR=${OUTPUT_DIR:-.}
# The list of unicode source headers to generate bindings for. This list is
# intended to be kept in sync with the static variable by the same name in the
# build.rs file.
readonly BINDGEN_SOURCE_MODULES=(
"ubrk"
"ucal"
"uclean"
"ucnv"
"ucol"
"udat"
"udatpg"
"udata"
"uenum"
"ufieldpositer"
"uformattable"
"ulistformatter"
"umisc"
"umsg"
"unum"
"unumberformatter"
"upluralrules"
"uset"
"ustring"
"utext"
"utrans"
"unorm2"
)
# Types for which to generate the bindings. Expand this list if you need more.
# The syntax is regex. This list is intended to be kept in sync with the static
# variable by the same name in the build.rs file.
readonly BINDGEN_ALLOWLIST_TYPES=(
"UAcceptResult"
"UBool"
"UBreakIterator"
"UBreakIteratorType"
"UCalendar.*"
"UChar.*"
"UCol.*"
"UCollation.*"
"UCollator"
"UConverter.*"
"UData.*"
"UDate.*"
"UDateTime.*"
"UDateFormat.*"
"UDisplayContext.*"
"UEnumeration.*"
"UErrorCode"
"UField.*"
"UFormat.*"
"UFormattedList.*"
"ULOC.*"
"ULineBreakTag"
"UListFormatter.*"
"ULoc.*"
"UMessageFormat"
"UNUM.*"
"UNumber.*",
"UParseError"
"UPlural.*"
"USentenceBreakTag"
"USet"
"UText"
"UTransDirection"
"UTransPosition"
"UTransliterator"
"UWordBreak"
"UNorm.*"
)
# Functions for which to generate the bindings. Expand this list if you need
# more. This list is intended to be kept in sync with the static variable by
# the same name in the build.rs file.
readonly BINDGEN_ALLOWLIST_FUNCTIONS=(
"u_.*"
"ubrk_.*"
"ucal_.*"
"ucnv_.*"
"ucol_.*"
"udat_.*"
"udatpg_.*"
"udata_.*"
"uenum_.*"
"ufieldpositer_.*"
"ufmt_.*"
"ulistfmt_.*"
"uloc_.*"
"umsg_.*"
"unum_.*"
"unumf_.*"
"uplrules_.*"
"utext_.*"
"utrans_.*"
"unorm2_.*"
)
function check_requirements() {
icu-config --version &> /dev/null || \
(echo "The generator requires icu-config to be in PATH; see README.md"; exit 1)
bindgen --version &> /dev/null || \
(echo "The generator requires bindgen to be in PATH; see README.md"; exit 1)
awk --version &> /dev/null || \
(echo "The generator requires awk to be installed; see README.md"; exit 1)
llvm-config --version &> /dev/null || \
(echo "The generator requires llvm-config (package llvm-dev) to be installed; see README.md"; exit 1)
}
# Generates a temporary header file to be supplied to bindgen for binding generation.
# The file is automatically removed upon exit, so if you are debugging you may want
# to remove that part.
function generate_header_file() {
MAIN_HEADER_FILE="$(mktemp --suffix=.h)"
echo "MAIN_HEADER_FILE=${MAIN_HEADER_FILE}"
trap "rm ${MAIN_HEADER_FILE}" EXIT
readonly ICU_INCLUDE_DIR="$(icu-config --prefix)/include/unicode"
echo "// Automatically generated by run_bindgen.sh, DO NOT EDIT. " > ${MAIN_HEADER_FILE}
for module in ${BINDGEN_SOURCE_MODULES[@]}; do
echo "#include \"${ICU_INCLUDE_DIR}/${module}.h\"" >> "${MAIN_HEADER_FILE}"
done
}
function main() {
check_requirements
generate_header_file
# Joins all with a |, so ("a" "b") become "a|b"
local _allowlist_types_concat="$(\
echo ${BINDGEN_ALLOWLIST_TYPES[@]} | tr ' ' '|')"
local _functions_concat="$(\
echo ${BINDGEN_ALLOWLIST_FUNCTIONS[@]} | tr ' ' '|')"
set -x
# Example: "67.1", "66.0.1"
local _icu_version="$(icu-config --version)"
# Example: "67"
local _icu_version_major="${_icu_version%.*}"
local _icu_version_major="${_icu_version_major%.*}"
# Respectful code hack.
local _allowlist="$(echo "d2hpdGVsaXN0Cg==" | base64 -d -)"
bindgen \
--default-enum-style=rust \
--no-doc-comments \
--with-derive-default \
--with-derive-hash \
--with-derive-partialord \
--with-derive-partialeq \
--"${_allowlist}"-type="${_allowlist_types_concat}" \
--"${_allowlist}"-function="${_functions_concat}" \
--opaque-type="" \
--output="${OUTPUT_DIR}/lib_${_icu_version_major}.rs" \
"${MAIN_HEADER_FILE}" \
-- \
$(icu-config --cppflags)
}
main
|
<filename>commonlib/src/main/java/com/common/biz/message/MessageApi.java
package com.common.biz.message;
/**
* @author Administrator
*/
public interface MessageApi {
}
|
package com.backend.fitpet.model;
import java.io.Serializable;
import java.util.Date;
/**
* Created by David on 7/11/2015.
*/
public class Pet implements Serializable {
private String name;
private double price;
private Date expiryDate;
private String description;
private boolean enabled;
private Date createDate;
private Date updateDate;
private String avatar;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
this.price = price;
}
public Date getExpiryDate() {
return expiryDate;
}
public void setExpiryDate(Date expiryDate) {
this.expiryDate = expiryDate;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public String getAvatar() {
return avatar;
}
public void setAvatar(String avatar) {
this.avatar = avatar;
}
}
|
<gh_stars>0
/*
* Copyright (c) 2019 Ford Motor Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package com.ford.labs.daab.publishers.job.jenkins;
import com.ford.labs.daab.config.event.properties.EventProperties;
import com.ford.labs.daab.config.event.properties.job.JenkinsJob;
import com.ford.labs.daab.config.event.properties.job.JenkinsJobProperties;
import com.ford.labs.daab.config.event.properties.job.JobProperties;
import com.ford.labs.daab.event.JobEvent;
import com.ford.labs.daab.publishers.EventPublishingService;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.util.Base64Utils;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Optional;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Collections.emptyList;
@Service
public class JenkinsJobPublisher {
private final EventPublishingService eventPublishingService;
private final WebClient client;
private final EventProperties eventProperties;
public JenkinsJobPublisher(
EventPublishingService eventPublishingService,
WebClient client,
EventProperties eventProperties) {
this.eventPublishingService = eventPublishingService;
this.client = client;
this.eventProperties = eventProperties;
}
@Scheduled(fixedRate = 30000)
public void pollJobs() {
for (var job : getJenkinsJobs()) {
makeRequest(job)
.flatMap(response -> buildJenkinsJobEvent(job, response))
.onErrorResume(error -> {
var event = new JobEvent();
event.setId("job.jenkins." + job.getId());
event.setName(job.getName());
event.setStatus(JobEvent.Status.UNKNOWN);
event.setTime(null);
return Mono.just(event);
})
.flatMap(eventPublishingService::publish)
.block();
}
}
private Mono<JenkinsJobResponse> makeRequest(JenkinsJob job) {
return client.get()
.uri(job.getUrl())
.header("Authorization", buildBasicAuthHeader())
.retrieve()
.bodyToMono(JenkinsJobResponse.class);
}
private Mono<JenkinsBuildResponse> makeBuildRequest(String url) {
return client.get()
.uri(url)
.header("Authorization", buildBasicAuthHeader())
.retrieve()
.bodyToMono(JenkinsBuildResponse.class);
}
private String buildBasicAuthHeader() {
return String.format(
"Basic %s",
Base64Utils.encodeToString((String.format(
"%s:%s",
this.eventProperties.getJob().getJenkins().getUsername(),
this.eventProperties.getJob().getJenkins().getToken())).getBytes(UTF_8)
)
);
}
private List<JenkinsJob> getJenkinsJobs() {
return Optional.of(this.eventProperties)
.map(EventProperties::getJob)
.map(JobProperties::getJenkins)
.map(JenkinsJobProperties::getJobs)
.orElse(emptyList());
}
private Mono<JobEvent> buildJenkinsJobEvent(JenkinsJob job, JenkinsJobResponse jenkinsJobResponse) {
var event = new JobEvent();
event.setId("job.jenkins." + job.getId());
event.setName(job.getName());
event.setStatus(getBuildStatus(jenkinsJobResponse));
if (jenkinsJobResponse.getBuilds().size() == 0) {
return Mono.just(event);
}
String lastBuildUrl = jenkinsJobResponse.getBuilds().get(0).getUrl();
event.setUrl(lastBuildUrl);
return getBuildTime(lastBuildUrl)
.map(time -> time.format(DateTimeFormatter.ISO_DATE_TIME))
.map(formattedTime -> {
event.setTime(formattedTime);
return event;
});
}
private Mono<OffsetDateTime> getBuildTime(String lastBuildUrl) {
return makeBuildRequest(String.format("%sapi/json", lastBuildUrl))
.map(JenkinsBuildResponse::getTimestamp)
.map(Instant::ofEpochMilli)
.map(instant -> instant.atOffset(ZoneOffset.UTC));
}
private JobEvent.Status getBuildStatus(JenkinsJobResponse jenkinsJobResponse) {
if (jenkinsJobResponse.getColor().equals("disabled")) {
return JobEvent.Status.DISABLED;
}
if (jenkinsJobResponse.getColor().endsWith("_anime")) {
return JobEvent.Status.IN_PROGRESS;
}
if (jenkinsJobResponse.getColor().equals("blue")) {
return JobEvent.Status.SUCCESS;
}
return JobEvent.Status.FAILURE;
}
}
|
def jaccard_index(str1, str2):
s1 = set(str1)
s2 = set(str2)
x = len(s1.intersection(s2))
y = len(s1.union(s2))
return x/y |
<filename>src/reader/test_cases/test_unbound_bible_import.py<gh_stars>1-10
from . import TestReader
from reader.importer.unbound_bible import UnboundBibleTextImporter
from reader.models import Author, Work, Division, Verse
from reader.importer.batch_import import JSONImportPolicy
from reader import language_tools
class TestUnboundBibleImport(TestReader):
def setUp(self):
self.importer = UnboundBibleTextImporter()
def test_import_file(self):
work = Work(title="LXX (Septuagint)", title_slug="lxx")
work.save()
self.importer.work = work
self.importer.import_file(self.get_test_resource_file_name("lxx_a_accents_utf8.txt"))
genesis = Division.objects.filter(work=work)[0]
chapter_1 = Division.objects.filter(work=work)[1]
chapter_2 = Division.objects.filter(work=work)[2]
chapter_3 = Division.objects.filter(work=work)[3]
self.assertEquals(Division.objects.filter(work=work).count(), 4)
self.assertEquals(Verse.objects.count(), 65)
self.assertEquals(Verse.objects.filter(indicator="1")[0].content, language_tools.normalize_unicode("ἐν ἀρχῇ ἐποίησεν ὁ θεὸς τὸν οὐρανὸν καὶ τὴν γῆν"))
self.assertEquals(Verse.objects.filter(division=chapter_1).count(), 31)
self.assertEquals(Verse.objects.filter(division=chapter_2).count(), 25)
self.assertEquals(Verse.objects.filter(division=chapter_3).count(), 9)
self.assertEquals(genesis.title, "Genesis")
self.assertEquals(genesis.title_slug, "genesis")
# Make sure the sequence numbers increase
num = 0
for book in Division.objects.filter(readable_unit=False).order_by('sequence_number'):
num = num + 1
self.assertEquals(book.sequence_number, num, str(book) + " does not have the expected sequence number (%i versus expected %i)" % (book.sequence_number, num))
for chapter in Division.objects.filter(parent_division=book).order_by('sequence_number'):
num = num + 1
self.assertEquals(chapter.sequence_number, num, str(chapter) + " does not have the expected sequence number (%i versus expected %i)" % (chapter.sequence_number, num))
def test_import_file_work_not_precreated(self):
self.importer.import_file(self.get_test_resource_file_name("lxx_a_accents_utf8.txt"))
self.assertEquals(self.importer.work.title, "Greek OT: LXX")
def test_import_file_with_policy(self):
import_policy_file = self.get_test_resource_file_name("unbound_bible_import_policy.json")
import_policy = JSONImportPolicy()
import_policy.load_policy(import_policy_file)
self.importer.import_policy = import_policy.should_be_processed
self.importer.import_file(self.get_test_resource_file_name("lxx_a_accents_utf8.txt"))
self.assertEquals(self.importer.work.title, "Septuagint (LXX)")
self.assertEquals(self.importer.work.language, "Greek")
def test_load_book_names(self):
book_names = self.importer.load_book_names(self.get_test_resource_file_name("book_names.txt"))
self.assertEquals(book_names["01O"], "Genesis")
def test_find_and_load_book_names_same_dir(self):
book_names = self.importer.find_and_load_book_names(self.get_test_resource_file_name("lxx_a_accents_utf8.txt"))
self.assertEquals(book_names["01O"], "Genesis")
def test_find_and_load_book_names_constructor_arg(self):
self.importer = UnboundBibleTextImporter(book_names_file=self.get_test_resource_file_name("book_names.txt"))
book_names = self.importer.find_and_load_book_names()
self.assertEquals(book_names["01O"], "Genesis")
def test_get_name_from_comment(self):
name = self.importer.get_name_from_comment("#name\tGreek NT: Westcott/Hort, UBS4 variants")
self.assertEquals(name, "Greek NT: Westcott/Hort, UBS4 variants")
def test_get_name_from_comment_truncated(self):
name = self.importer.get_name_from_comment("#name\tGreek OT: LXX [A] Accented")
self.assertEquals(name, "Greek OT: LXX")
|
#!/bin/bash
# lokeshjindal15
# use /system/bin/sh with Asimbench disk image
# use /bin/bash with arm_ubuntu_natty_headless disk image
#
# This is a tricky script to understand. When run in M5, it creates
# a checkpoint after Linux boot up, but before any benchmarks have
# been run. By playing around with environment variables, we can
# detect whether the checkpoint has been taken.
# - If the checkpoint hasn't been taken, the script allows M5 to checkpoint the system,
# re-read this script into a new tmp file, and re-run it. On the
# second execution of this script (checkpoint has been taken), the
# environment variable is already set, so the script will exit the
# simulation
# - When we restore the simulation from a checkpoint, we can
# specify a new script for M5 to execute in the full-system simulation,
# and it will be executed as if a checkpoint had just been taken.
#
# Original Author:
# Joel Hestness, hestness@cs.utexas.edu
# while at AMD Research and Advanced Development Lab
# Date:
# 10/5/2010
#
#*********************************
# Modified by:
# Lokesh Jindal
# March, 2015
# lokeshjindal15@cs.wisc.edu
#*********************************
#################################################################################
# Tips:
# 1. If restoring from a previous ckpt created using this script and
# want to create a second ckpt using this script,
# make sure you rename RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES to a different variable that was not defined
# in the script used to create the first ckpt.
# 2. make sure to check what to use
# /bin/bash for ARM based disk image
# /system/bin/sh for x86 base disk image
# or something else. mount and check your disk image...
# 3. while reading a supplied script and writing to location '/tmp/runscript12.sh'
# use appropriate directory ('tmp').
# again mount and check your disk image...
#################################################################################
# Test if the RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES environment variable is already set
echo "***** Start TUX12 ckpt script! *****"
if [ "${RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES+set}" != set ]
then
# Signal our future self that it's safe to continue
echo "RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES not set! So setting up network. Then reload self, don't set up network and do this again n again n again n ..."
export RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES=1
else
echo "Else part - RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES is set! So reload self and execute!"
echo "Else part - Loading new script..."
/sbin/m5 readfile > /tmp/runscript12.sh
chmod 755 /tmp/runscript12.sh
# Execute the new runscript
if [ -s /tmp/runscript12.sh ]
then
#/system/bin/sh /data/runscript12.sh
echo "Else part - executing newly loaded script ..."
/bin/bash /tmp/runscript12.sh
else
echo "Else part - Script not specified. Dropping into shell..."
echo "Else part - Exiting..."
/sbin/m5 exit
fi
fi
#############################################################################
# MODIFY IN THIS SECTION
# Add what you want to do after booting/restoring from a primary checkpoint
# and before taking the desired checkpoint
#busybox sleep 600
echo "Setting up network now ..."
echo "1. RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES is $RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES"
ifconfig eth0 10.0.0.14
ifconfig lo up
ifconfig eth0 hw ether 00:90:00:00:00:0d
#############################################################################
#THIS IS WHERE EXECUTION BEGINS FROM AFTER RESTORING FROM CKPT CREATED USING THIS SCRIPT
# Test if we previously okayed ourselves to run this script
echo "2. RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES is $RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES"
if [ "$RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES" -eq 1 ]
then
# Signal our future self not to recurse infinitely
export RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES=2
echo "3. RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES is $RUNSCRIPT_VAR_TUX12_NPB_CKPT_16NODES"
# Read the script for the checkpoint restored execution
echo "Loading new script..."
/sbin/m5 readfile > /tmp/runscript12.sh
chmod 755 /tmp/runscript12.sh
# Execute the new runscript
if [ -s /tmp/runscript12.sh ]
then
#/system/bin/sh /data/runscript12.sh
echo "executing newly loaded script ..."
/bin/bash /tmp/runscript12.sh
else
echo "Script not specified. Dropping into shell..."
fi
fi
echo "Fell through script. Exiting..."
/sbin/m5 exit
|
gpu=$1
shift
CUDA_VISIBLE_DEVICES="$gpu" nohup /data/anaconda/envs/py35/bin/python $@ & |
const grpc = require('@grpc/grpc-js');
var protoLoader = require('@grpc/proto-loader');
const PROTO_PATH = './news.proto';
const options = {
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true,
};
var packageDefinition = protoLoader.loadSync(PROTO_PATH, options);
const NewsService = grpc.loadPackageDefinition(packageDefinition).NewsService;
const client = new NewsService(
'localhost:50051',
grpc.credentials.createInsecure()
);
module.exports = client;
|
#!/bin/bash
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -xe
TAG_NAME=$1
BRANCH=$2
REPO=kubeflow/pipelines
if [[ -z "$BRANCH" || -z "$TAG_NAME" ]]; then
echo "Usage: release.sh <release-tag> <release-branch>" >&2
exit 1
fi
# Checking out the repo's release branch
clone_dir=$(mktemp -d)
git clone "git@github.com:${REPO}.git" "$clone_dir"
cd "$clone_dir"
git checkout "$BRANCH"
echo "$TAG_NAME" > ./VERSION
# Run the release script in cloned repo
"hack/release-imp.sh" $TAG_NAME
# Checking-in the component changes
git add --all
git commit --message "Updated version to $TAG_NAME"
git tag -a "$TAG_NAME" -m "Kubeflow Pipelines $TAG_NAME release"
# Pushing the changes upstream
read -p "Do you want to push the version change and tag $TAG_NAME tag to upstream? [y|n]"
if [ "$REPLY" != "y" ]; then
exit
fi
git push --set-upstream origin "$BRANCH"
git push origin "$TAG_NAME"
|
#!/bin/bash
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if ! [[ "$2" =~ ^(git@)?(www.)?github.com(:|/)Bitcreds/BCRS(.git)?$ ]]; then
exit 0
fi
while read LINE; do
set -- A $LINE
if [ "$4" != "refs/heads/master" ]; then
continue
fi
if ! ./contrib/verify-commits/verify-commits.sh $3 > /dev/null 2>&1; then
echo "ERROR: A commit is not signed, can't push"
./contrib/verify-commits/verify-commits.sh
exit 1
fi
done < /dev/stdin
|
<filename>core/src/test/java/org/mammon/math/util/PrimeFactorsTest.java<gh_stars>1-10
package org.mammon.math.util;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class PrimeFactorsTest {
private final BigInteger n;
private final List<BigInteger> expectedFactors = new ArrayList<BigInteger>();
public PrimeFactorsTest(Integer n, Integer[] expectedFactors) {
this.n = BigInteger.valueOf(n);
for (Integer factor : expectedFactors) {
this.expectedFactors.add(BigInteger.valueOf(factor));
}
}
@Test
public void shouldCorrectlyDeterminePrimeFactors() {
List<BigInteger> factors = PrimeFactors.of(n);
assertEquals(expectedFactors, factors);
}
@Parameters
public static Collection<Object[]> data() {
List<Object[]> data = new ArrayList<Object[]>();
data.add(new Object[] { 2, new Integer[] { 2 } });
data.add(new Object[] { 3, new Integer[] { 3 } });
data.add(new Object[] { 4, new Integer[] { 2 } });
data.add(new Object[] { 5, new Integer[] { 5 } });
data.add(new Object[] { 6, new Integer[] { 2, 3 } });
data.add(new Object[] { 7, new Integer[] { 7 } });
data.add(new Object[] { 8, new Integer[] { 2 } });
data.add(new Object[] { 9, new Integer[] { 3 } });
data.add(new Object[] { 10, new Integer[] { 2, 5 } });
return data;
}
}
|
import React from "react";
import { useFetch } from "./useFetch";
import { CountryTable } from "./CountryTable";
const App = () => {
const { loading, error, data } = useFetch(
"https://example.com/countries.json"
);
if (loading) return <p>Loading...</p>;
if (error) return <p>Error!</p>;
return (
<div>
<CountryTable countries={data} />
</div>
);
};
export default App; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.