text stringlengths 1 1.05M |
|---|
#!/bin/sh
# Checking if user is root otherwise throw error
if [ "$(id -u)" -ne 0 ]; then
echo "This script must be run as root"
exit 1
fi
# Availbale Options
if [ ! "$#" -ge 1 ]; then
echo "Usage: $0 {size}"
echo "(Default path: /var/lib/swap)"
printf '%s\n' "$@"
echo "---------------------------------------"
echo "Available options:"
printf '%s\n' "$@"
echo "size - Size of swap ( Example - 1G,2G or 1024M)"
echo "path - Path to create a swapfile"
exit 1
fi
SWAP_SIZE=$1
# Default swap file
SWAP_FILE=/var/lib/swap
if [ ! -z "$2" ]; then
SWAP_FILE="$2"
fi
# Checking if swap already exists in ./etc/fstab
grep -q "swap" /etc/fstab
if ! grep -q "swap" /etc/fstab; then
sudo dd if=/dev/zero of=/"$SWAP_FILE" bs="$SWAP_SIZE" count="$SWAP_SIZE"
chmod 600 "$SWAP_FILE"
mkswap "$SWAP_FILE"
swapon "$SWAP_FILE"
echo "$SWAP_FILE none swap sw 0 0" | tee /etc/fstab -a
sysctl vm.swappiness=10
sysctl vm.vfs_cache_pressure=50
echo "vm.swappiness=10" | tee /etc/sysctl.conf -a
echo "vm.vfs_cache_pressure=50" | tee /etc/sysctl.conf -a
else
echo 'swapfile found. No changes made.'
fi
echo '----------------------'
echo 'Checking list of swap'
echo '----------------------'
swapon -s
|
import Users from './components/Users'
import User from './components/User'
export default [
{
path: '/',
component: Users
}, {
path: '/users/:id',
component: User
}
]
|
import requests
def send_request(url):
response = requests.get(url)
if response.status_code == 200:
print("Success! The website responded with: ")
print(response.text)
else:
print("Error! The website responded with: ")
print("Status code: ", response.status_code) |
<reponame>hispindia/BIHAR-2.7
package org.hisp.dhis.mapping;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static junit.framework.Assert.assertEquals;
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.indicator.Indicator;
import org.hisp.dhis.indicator.IndicatorGroup;
import org.hisp.dhis.indicator.IndicatorService;
import org.hisp.dhis.indicator.IndicatorType;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitLevel;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.MonthlyPeriodType;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.user.User;
import org.junit.Test;
/**
* @author <NAME>
* @version $Id$
*/
public class MappingStoreTest
extends DhisSpringTest
{
private MappingStore mappingStore;
private OrganisationUnit organisationUnit;
private OrganisationUnitLevel organisationUnitLevel;
private IndicatorGroup indicatorGroup;
private IndicatorType indicatorType;
private Indicator indicator;
private PeriodType periodType;
private Period period;
private MapLegendSet mapLegendSet;
// -------------------------------------------------------------------------
// Fixture
// -------------------------------------------------------------------------
@Override
public void setUpTest()
{
mappingStore = (MappingStore) getBean( MappingStore.ID );
organisationUnitService = (OrganisationUnitService) getBean( OrganisationUnitService.ID );
indicatorService = (IndicatorService) getBean( IndicatorService.ID );
periodService = (PeriodService) getBean( PeriodService.ID );
organisationUnit = createOrganisationUnit( 'A' );
organisationUnitLevel = new OrganisationUnitLevel( 1, "Level" );
organisationUnitService.addOrganisationUnit( organisationUnit );
organisationUnitService.addOrganisationUnitLevel( organisationUnitLevel );
indicatorGroup = createIndicatorGroup( 'A' );
indicatorService.addIndicatorGroup( indicatorGroup );
indicatorType = createIndicatorType( 'A' );
indicatorService.addIndicatorType( indicatorType );
indicator = createIndicator( 'A', indicatorType );
indicatorService.addIndicator( indicator );
periodType = periodService.getPeriodTypeByName( MonthlyPeriodType.NAME );
period = createPeriod( periodType, getDate( 2000, 1, 1 ), getDate( 2000, 2, 1 ) );
periodService.addPeriod( period );
mapLegendSet = createMapLegendSet( 'A', indicator );
mappingStore.addMapLegendSet( mapLegendSet );
}
// -------------------------------------------------------------------------
// MapView tests
// -------------------------------------------------------------------------
@Test
public void testAddGetMapView()
{
MapView mapView = new MapView( "MapViewA", new User(), MappingService.MAP_VALUE_TYPE_INDICATOR, indicatorGroup,
indicator, new DataElementGroup(), new DataElement(), MappingService.MAP_DATE_TYPE_FIXED, periodType,
period, organisationUnit, organisationUnitLevel, MappingService.MAPLEGENDSET_TYPE_AUTOMATIC, 1, 1, "", "A",
"B", mapLegendSet, 5, 20, "1", "1", 1 );
int idA = mappingStore.addMapView( mapView );
assertEquals( mapView, mappingStore.getMapView( idA ) );
assertEquals( indicatorGroup, mappingStore.getMapView( idA ).getIndicatorGroup() );
assertEquals( indicator, mappingStore.getMapView( idA ).getIndicator() );
assertEquals( periodType, mappingStore.getMapView( idA ).getPeriodType() );
assertEquals( period, mappingStore.getMapView( idA ).getPeriod() );
}
} |
lynx -dump -source https://microprediction.github.io/optimizer-elo-ratings/ > source.html |
import asyncio
import logging
import sys
from abc import abstractmethod
from typing import Any, Callable, Generator, List, Optional, Tuple, Union
from aiohttp import ClientResponse, ClientSession, hdrs
from aiohttp.typedefs import StrOrURL
from yarl import URL as YARL_URL
from .retry_options import ExponentialRetry, RetryOptionsBase
if sys.version_info >= (3, 8):
from typing import Protocol
else:
from typing_extensions import Protocol
class _Logger(Protocol):
"""
_Logger defines which methods logger object should have
"""
@abstractmethod
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: pass
@abstractmethod
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None: pass
# url itself or list of urls for changing between retries
_RAW_URL_TYPE = Union[StrOrURL, YARL_URL]
_URL_TYPE = Union[_RAW_URL_TYPE, List[_RAW_URL_TYPE], Tuple[_RAW_URL_TYPE, ...]]
class _RequestContext:
def __init__(
self,
request: Callable[..., Any], # Request operation, like POST or GET
method: str,
urls: Tuple[StrOrURL, ...],
logger: _Logger,
retry_options: RetryOptionsBase,
raise_for_status: bool = False,
**kwargs: Any
) -> None:
self._request = request
self._method = method
self._urls = urls
self._logger = logger
self._retry_options = retry_options
self._kwargs = kwargs
self._trace_request_ctx = kwargs.pop('trace_request_ctx', {})
self._raise_for_status = raise_for_status
self._response: Optional[ClientResponse] = None
def _is_status_code_ok(self, code: int) -> bool:
return code not in self._retry_options.statuses and code < 500
async def _do_request(self) -> ClientResponse:
current_attempt = 0
while True:
self._logger.debug("Attempt {} out of {}".format(current_attempt, self._retry_options.attempts))
if current_attempt > 0:
retry_wait = self._retry_options.get_timeout(current_attempt)
await asyncio.sleep(retry_wait)
current_attempt += 1
try:
response: ClientResponse = await self._request(
self._method,
self._urls[current_attempt - 1],
**self._kwargs,
trace_request_ctx={
'current_attempt': current_attempt,
**self._trace_request_ctx,
},
)
except Exception as e:
if current_attempt < self._retry_options.attempts:
is_exc_valid = any([isinstance(e, exc) for exc in self._retry_options.exceptions])
if is_exc_valid:
continue
raise e
if self._is_status_code_ok(response.status) or current_attempt == self._retry_options.attempts:
if self._raise_for_status:
response.raise_for_status()
self._response = response
return response
def __await__(self) -> Generator[Any, None, ClientResponse]:
return self.__aenter__().__await__()
async def __aenter__(self) -> ClientResponse:
return await self._do_request()
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
if self._response is not None:
if not self._response.closed:
self._response.close()
def _url_to_urls(url: _URL_TYPE, attempts: int) -> Tuple[StrOrURL, ...]:
if isinstance(url, str) or isinstance(url, YARL_URL):
return (url,) * attempts
if isinstance(url, list):
urls = tuple(url)
elif isinstance(url, tuple):
urls = url
else:
raise ValueError("you can pass url only by str or list/tuple")
if len(urls) == 0:
raise ValueError("you can pass url by str or list/tuple with attempts count size")
if len(urls) < attempts:
return urls + (urls[-1],) * (attempts - len(url))
return urls
class RetryClient:
def __init__(
self,
logger: Optional[_Logger] = None,
retry_options: RetryOptionsBase = ExponentialRetry(),
raise_for_status: bool = False,
*args: Any, **kwargs: Any
) -> None:
self._client = ClientSession(*args, **kwargs)
self._closed = False
if logger is None:
logger = logging.getLogger("aiohttp_retry")
self._logger: _Logger = logger
self._retry_options: RetryOptionsBase = retry_options
self._raise_for_status = raise_for_status
def __del__(self) -> None:
if not self._closed:
self._logger.warning("Aiohttp retry client was not closed")
def _request(
self,
method: str,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
if retry_options is None:
retry_options = self._retry_options
if raise_for_status is None:
raise_for_status = self._raise_for_status
return _RequestContext(
request=self._client.request,
method=method,
urls=_url_to_urls(url, retry_options.attempts),
logger=self._logger,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def request(
self,
method: str,
url: StrOrURL,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=method,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def get(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_GET,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def options(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_OPTIONS,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def head(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None, **kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_HEAD,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def post(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_POST,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def put(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_PUT,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def patch(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_PATCH,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
def delete(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any
) -> _RequestContext:
return self._request(
method=hdrs.METH_DELETE,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs
)
async def close(self) -> None:
await self._client.close()
self._closed = True
async def __aenter__(self) -> 'RetryClient':
return self
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
await self.close()
|
<reponame>hhhyang/javatest
package com.javatest.webflux.router;
import com.javatest.webflux.handler.TestHandler;
import org.springdoc.core.annotations.RouterOperation;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.MediaType;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.RouterFunctions;
import org.springframework.web.reactive.function.server.ServerResponse;
import static org.springframework.web.reactive.function.server.RequestPredicates.accept;
import static org.springframework.web.reactive.function.server.RequestPredicates.GET;
@Configuration
public class RouterConfig {
@Bean
@RouterOperation(beanClass = TestHandler.class, beanMethod = "hello")
public RouterFunction<ServerResponse> monoRouterFunction(TestHandler testHandler){
return RouterFunctions.route(GET("/api/user").and(accept(MediaType.APPLICATION_JSON)),testHandler::hello)
.andRoute(GET("/api/user2"),testHandler::hello2);
}
}
|
#include "data-structure/sparse-table.hpp"
#pragma region fast_lca
class fast_lca {
sparse_table<pair<int, int>> st;
vector<int> pos;
public:
fast_lca() = default;
fast_lca(const vector<vector<int>> &tree, int s = 0) { init(tree, s); }
template<typename T>
fast_lca(const vector<vector<pair<int, T>>> &tree, int s = 0) {
int n = tree.size();
vector<vector<int>> _tree(n);
for (int u = 0; u < n; u++)
for (auto [v, w] : tree[u])
_tree[u].push_back(v);
init(_tree, s);
}
void init(const vector<vector<int>> &tree, int s = 0) {
int n = tree.size();
pos.resize(n);
vector<pair<int, int>> A;
A.reserve(2 * n);
const auto dfs = [&](const auto &self, int u, int d = 0, int p = -1) -> void {
pos[u] = A.size();
A.emplace_back(d, u);
for (int v : tree[u]) {
if (v == p)
continue;
self(self, v, d + 1, u);
A.emplace_back(d, u);
}
};
dfs(dfs, s);
st.init(A.begin(), A.end());
}
int query(int u, int v) {
if (pos[u] > pos[v])
swap(u, v);
auto [d, a] = st.query(pos[u], pos[v]);
return a;
}
int operator()(int u, int v) { return query(u, v); }
};
#pragma endregion fast_lca |
#!/bin/sh
export ROLE_ARN="arn:aws:iam::${ACCOUNT_ID}:role/otter-appliance-ecs-fargate"
export AWS_STS_ASSUME_ROLE_OUTPUT="$(aws sts assume-role --role-arn $ROLE_ARN --role-session-name otter)"
export AWS_ACCESS_KEY_ID="$(echo $AWS_STS_ASSUME_ROLE_OUTPUT | jq '.Credentials.AccessKeyId' -r)"
export AWS_SECRET_ACCESS_KEY="$(echo $AWS_STS_ASSUME_ROLE_OUTPUT | jq '.Credentials.SecretAccessKey' -r)"
export AWS_SESSION_TOKEN="$(echo $AWS_STS_ASSUME_ROLE_OUTPUT | jq '.Credentials.SessionToken' -r)"
# Application Entrypoint
./app.py
|
def sum_of_primes(n):
sum = 0
count = 0
i = 2
while count < n:
if is_prime(i):
sum += i
count += 1
i += 1
return sum |
function makeData() {
"use strict";
var blueSet = makeRandomData(20);
blueSet.forEach(function(d) { d.color = "blue"; });
var redSet = makeRandomData(20);
redSet.forEach(function(d) { d.color = "red"; });
return [blueSet, redSet];
}
function run(div, data, Plottable) {
"use strict";
var svg = div.append("svg").attr("height", 500);
var xScale = new Plottable.Scale.Linear();
var yScale = new Plottable.Scale.Linear();
var xAxis = new Plottable.Axis.Numeric(xScale, "bottom");
var yAxis = new Plottable.Axis.Numeric(yScale, "left");
var title = new Plottable.Component.TitleLabel("Hover over points");
var plot = new Plottable.Plot.Scatter(xScale, yScale)
.addDataset(data[0])
.addDataset(data[1])
.project("r", 10)
.project("fill", "color")
.project("x", "x", xScale)
.project("y", "y", yScale);
var chart = new Plottable.Component.Table([
[null, title],
[yAxis, plot],
[null, xAxis]]);
chart.renderTo(svg);
var hoverCircle = plot._foregroundContainer.append("circle")
.attr({
"stroke": "black",
"fill": "none",
"r": 15
})
.style("visibility", "hidden");
var hover = new Plottable.Interaction.Hover();
hover.onHoverOver(function(hoverData) {
var color = hoverData.data[0].color.toUpperCase();
var xString = hoverData.data[0].x.toFixed(2);
var yString = hoverData.data[0].y.toFixed(2);
title.text(color + ": [ " + xString + ", " + yString + " ]");
hoverCircle.attr({
"cx": hoverData.pixelPositions[0].x,
"cy": hoverData.pixelPositions[0].y
}).style("visibility", "visible");
});
hover.onHoverOut(function(hoverData) {
title.text("Hover over points");
hoverCircle.style("visibility", "hidden");
});
plot.registerInteraction(hover);
}
|
#!/bin/bash
# Run_Fibonacci.sh
# Check Environment
if [ -z ${IMPERAS_HOME} ]; then
echo "IMPERAS_HOME not set. Please check environment setup."
exit
fi
${IMPERAS_ISS} --verbose --output imperas.log \
--program ../../../Applications/fibonacci/fibonacci.RISCV32-O0-g.elf \
--processorvendor riscv.ovpworld.org --processorname riscv --variant RV32IMAC \
--numprocessors 1 \
--override iss/cpu0/verbose=1 \
"$@" \
-argv 39
|
<reponame>cfh0081/golib<filename>common/common_test.go
package common
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestRunFuncName(t *testing.T) {
name := RunFuncName()
expected := `github.com/cfh0081/golib/common.TestRunFuncName`
assert.Equal(t, name, expected)
}
|
#!/usr/bin/env bash
# Check if the file exists
if [[ -f ./scripts/release-after-hook.sh ]]; then
# Execute the file
./scripts/release-after-hook.sh
else
echo "File 'release-after-hook.sh' not found."
fi |
#! /bin/sh
#PBS -l nodes=1:ppn=1
#PBS -l walltime=1:00:00
#PBS -j oe
if [ -n "$PBS_JOBNAME" ]
then
source "${PBS_O_HOME}/.bash_profile"
cd "$PBS_O_WORKDIR"
module load gcc/5.3.0
fi
prefix=../../gekko-output/no-data-run-3
ecoevolity --seed 40451262 --prefix ../../gekko-output/no-data-run-3 --ignore-data --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../configs/gekko-conc044-rate002-nopoly-varonly.yml 1>../../gekko-output/no-data-run-3-gekko-conc044-rate002-nopoly-varonly.out 2>&1
|
package io.cattle.platform.servicediscovery.deployment.impl;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants;
import io.cattle.platform.servicediscovery.api.util.ServiceDiscoveryUtil;
import io.cattle.platform.servicediscovery.deployment.ServiceDeploymentPlanner;
import io.cattle.platform.servicediscovery.deployment.ServiceDeploymentPlannerFactory;
import io.cattle.platform.servicediscovery.deployment.impl.DeploymentManagerImpl.DeploymentServiceContext;
import java.util.List;
import java.util.Map;
public class ServiceDeploymentPlannerFactoryImpl implements ServiceDeploymentPlannerFactory {
@Override
public ServiceDeploymentPlanner createServiceDeploymentPlanner(List<Service> services, List<DeploymentUnit> units,
DeploymentServiceContext context) {
if (services.isEmpty()) {
return null;
}
Service service = services.get(0);
Map<String, String> serviceLabels = ServiceDiscoveryUtil.getServiceLabels(service, context.allocatorService);
String globalService = serviceLabels.get(ServiceDiscoveryConstants.LABEL_SERVICE_GLOBAL);
if (service.getKind().equalsIgnoreCase(ServiceDiscoveryConstants.KIND.EXTERNALSERVICE.name())
|| service.getKind().equalsIgnoreCase(ServiceDiscoveryConstants.KIND.DNSSERVICE.name())) {
return new ExternalServiceDeploymentPlanner(services, units, context);
} else if (globalService != null) {
return new GlobalServiceDeploymentPlanner(services, units, context);
} else {
return new DefaultServiceDeploymentPlanner(services, units, context);
}
}
}
|
<gh_stars>10-100
# ************************************************************
# Sequel Pro SQL dump
# Version 4541
#
# http://www.sequelpro.com/
# https://github.com/sequelpro/sequelpro
#
# Host: 172.16.58.3 (MySQL 5.7.22-0ubuntu0.16.04.1)
# Database: ethos-panel
# Generation Time: 2018-06-14 06:12:07 +0000
# ************************************************************
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
# Dump of table blockinfo
# ------------------------------------------------------------
DROP TABLE IF EXISTS `blockinfo`;
CREATE TABLE `blockinfo` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`date` date DEFAULT NULL,
`BlockReward` int(11) DEFAULT NULL,
`Difficulty` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
# Dump of table hash
# ------------------------------------------------------------
DROP TABLE IF EXISTS `hash`;
CREATE TABLE `hash` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`userid` int(11) NOT NULL,
`date` datetime NOT NULL,
`rig` varchar(20) NOT NULL,
`hash` decimal(10,2) NOT NULL,
`miner_hashes` varchar(100) DEFAULT NULL,
`temp` varchar(100) DEFAULT NULL,
`fanrpm` varchar(100) DEFAULT NULL,
`rack_loc` varchar(50) DEFAULT NULL,
`ip` varchar(50) DEFAULT NULL,
`uptime` int(11) DEFAULT NULL,
`gpus` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `main` (`userid`,`date`,`rig`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
# Dump of table news
# ------------------------------------------------------------
DROP TABLE IF EXISTS `news`;
CREATE TABLE `news` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`date` datetime DEFAULT CURRENT_TIMESTAMP,
`content` text,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
# Dump of table remoteconf
# ------------------------------------------------------------
DROP TABLE IF EXISTS `remoteconf`;
CREATE TABLE `remoteconf` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`userid` int(11) NOT NULL,
`conf` text,
PRIMARY KEY (`id`),
UNIQUE KEY `userid` (`userid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
# Dump of table users
# ------------------------------------------------------------
DROP TABLE IF EXISTS `users`;
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(55) NOT NULL,
`password` varchar(100) NOT NULL DEFAULT '',
`dataorigin` tinyint(4) NOT NULL DEFAULT '0',
`datahash` varchar(50) DEFAULT NULL,
`url` varchar(55) DEFAULT '',
`resethash` varchar(255) DEFAULT NULL,
`emailnotifications` tinyint(4) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<reponame>faizanu94/repeat-element<gh_stars>10-100
'use strict';
module.exports = function repeat(ele, num) {
var res = [ele];
while (res.length < num) {
res = res.concat(res);
}
return res.slice(0, num);
};
|
def reverseStringInPlace(my_string):
input_list = list(my_string)
# Reverse list of characters
input_list = input_list[::-1]
# Convert list back to string
rev_str = "".join(input_list)
return rev_str |
def generate_metric_output(instance_id, metric_data):
output = f"Instance ID: {instance_id}\n"
output += f"Database Name: {metric_data['dbname']}\n"
output += f"Measurement Interval: {metric_data['measure_interval']} seconds\n"
output += f"Number of Commits: {metric_data['n_commit']}\n"
output += f"Number of Rollbacks: {metric_data['n_rollback']}"
return output
# Test the function
metric_data = {
'dbname': 'example_db',
'measure_interval': 60,
'n_commit': 100,
'n_rollback': 20
}
instance_id = 'ABC123'
print(generate_metric_output(instance_id, metric_data)) |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --gres=gpu:1 # request GPU generic resource
#SBATCH --cpus-per-task=2 #Maximum of CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=8000M # memory per node
#SBATCH --time=0-01:30 # time (DD-HH:MM)
#SBATCH --output=./job_script_output/Camera1_Sep_13_1500_1600_Prescribed_Behavior_0_%N-%j.out # %N for node name, %j for jobID
## Main processing command
## -v: path to the raw video file
## -o: directory to save processed video
python ./process_video_low_frequent_frame.py -v ../ROM_raw_videos_clips/Sep_13/Camera1_Sep_13_1500_1600_Prescribed_Behavior_0.mp4 -o ../ROM_raw_videos_clips_processed_camera2/Sep_13
|
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.project.mips;
import javafx.application.Platform;
import javafx.scene.Node;
import net.jamsimulator.jams.collection.Bag;
import net.jamsimulator.jams.event.SimpleEventBroadcast;
import net.jamsimulator.jams.gui.JamsApplication;
import net.jamsimulator.jams.gui.editor.FileEditor;
import net.jamsimulator.jams.gui.editor.FileEditorHolder;
import net.jamsimulator.jams.gui.editor.FileEditorTab;
import net.jamsimulator.jams.gui.mips.editor.MIPSFileEditor;
import net.jamsimulator.jams.gui.mips.editor.element.MIPSFileElements;
import net.jamsimulator.jams.gui.project.ProjectTab;
import net.jamsimulator.jams.project.FilesToAssemble;
import net.jamsimulator.jams.project.Project;
import net.jamsimulator.jams.project.mips.event.FileAddToAssembleEvent;
import net.jamsimulator.jams.project.mips.event.FileIndexChangedFromAssembleEvent;
import net.jamsimulator.jams.project.mips.event.FileRemoveFromAssembleEvent;
import net.jamsimulator.jams.utils.FileUtils;
import net.jamsimulator.jams.utils.Validate;
import org.json.JSONArray;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.Collectors;
public class MIPSFilesToAssemble extends SimpleEventBroadcast implements FilesToAssemble {
public static final String FILE_NAME = "files_to_assemble.json";
private final MIPSProject project;
private final List<File> files;
private final Map<File, MIPSFileElements> fileElements;
private final Bag<String> globalLabels;
public MIPSFilesToAssemble(MIPSProject project) {
this.project = project;
files = new ArrayList<>();
fileElements = new HashMap<>();
globalLabels = new Bag<>();
}
public Optional<MIPSFileElements> getFileElements(File file) {
return Optional.ofNullable(fileElements.get(file));
}
@Override
public Project getProject() {
return project;
}
@Override
public boolean supportsGlobalLabels() {
return true;
}
@Override
public Bag<String> getGlobalLabels() {
return globalLabels;
}
@Override
public List<File> getFiles() {
return Collections.unmodifiableList(files);
}
@Override
public boolean containsFile(File file) {
return files.contains(file);
}
@Override
public void addFile(File file, boolean refreshGlobalLabels) {
Validate.notNull(file, "File cannot be null!");
if (files.contains(file)) return;
FileAddToAssembleEvent.Before before = callEvent(new FileAddToAssembleEvent.Before(file));
if (before.isCancelled()) return;
MIPSFileElements elements = new MIPSFileElements(project);
elements.setFilesToAssemble(this);
try {
String text = FileUtils.readAll(file);
elements.refreshAll(text);
files.add(file);
fileElements.put(file, elements);
if (refreshGlobalLabels) {
refreshGlobalLabels();
}
callEvent(new FileAddToAssembleEvent.After(file));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
public void addFile(File file, MIPSFileElements elements, boolean refreshGlobalLabels) {
Validate.notNull(file, "File cannot be null!");
Validate.notNull(elements, "Elements cannot be null!");
FileAddToAssembleEvent.Before before = callEvent(new FileAddToAssembleEvent.Before(file));
if (before.isCancelled()) return;
if (files.contains(file)) return;
files.add(file);
fileElements.put(file, elements);
elements.setFilesToAssemble(this);
if (refreshGlobalLabels) {
refreshGlobalLabels();
}
callEvent(new FileAddToAssembleEvent.After(file));
}
@Override
public void addFile(File file, FileEditorHolder holder, boolean refreshGlobalLabels) {
Validate.notNull(file, "File cannot be null!");
Validate.notNull(holder, "List cannot be null!");
Optional<FileEditorTab> tab = holder.getFileDisplayTab(file, true);
if (tab.isEmpty() || !(tab.get().getDisplay() instanceof MIPSFileEditor)) {
addFile(file, refreshGlobalLabels);
return;
}
addFile(file, ((MIPSFileEditor) tab.get().getDisplay()).getElements(), refreshGlobalLabels);
}
@Override
public void removeFile(File file) {
Validate.notNull(file, "File cannot be null!");
FileRemoveFromAssembleEvent.Before before = callEvent(new FileRemoveFromAssembleEvent.Before(file));
if (before.isCancelled()) return;
if (!files.contains(file)) return;
files.remove(file);
var elements = fileElements.remove(file);
elements.setFilesToAssemble(null);
refreshDeletedDisplay(file, elements);
refreshGlobalLabels();
callEvent(new FileRemoveFromAssembleEvent.After(file));
}
@Override
public boolean moveFileToIndex(File file, int index) {
if (!files.contains(file) || index < 0 || index >= files.size()) return false;
int old = files.indexOf(file);
var before =
callEvent(new FileIndexChangedFromAssembleEvent.Before(file, old, index));
if (before.isCancelled()) return false;
index = before.getNewIndex();
if (index < 0 || index >= files.size()) return false;
files.remove(file);
files.add(index, file);
callEvent(new FileIndexChangedFromAssembleEvent.After(file, old, index));
return true;
}
@Override
public void refreshGlobalLabels() {
Set<String> toUpdate = new HashSet<>(globalLabels);
globalLabels.clear();
for (MIPSFileElements elements : fileElements.values()) {
globalLabels.addAll(elements.getExistingGlobalLabels());
}
toUpdate.addAll(globalLabels);
ProjectTab tab = JamsApplication.getProjectsTabPane().getProjectTab(project).orElse(null);
if (tab == null) return;
Node node = tab.getProjectTabPane().getWorkingPane().getCenter();
if (!(node instanceof FileEditorHolder holder)) return;
fileElements.forEach((file, elements) -> {
elements.seachForLabelsUpdates(toUpdate);
Optional<FileEditorTab> fTab = holder.getFileDisplayTab(file, true);
if (fTab.isPresent()) {
FileEditor display = fTab.get().getDisplay();
if (display instanceof MIPSFileEditor) {
elements.update(((MIPSFileEditor) display));
}
}
});
}
public void load(File folder) throws IOException {
File file = new File(folder, FILE_NAME);
if (!file.isFile()) return;
String value = FileUtils.readAll(file);
JSONArray array = new JSONArray(value);
for (Object element : array) {
file = new File(project.getFolder(), element.toString());
if (!file.isFile()) continue;
addFile(file, false);
}
Platform.runLater(this::refreshGlobalLabels);
}
public void save(File folder) throws IOException {
Validate.notNull(folder, "Folder cannot be null!");
File file = new File(folder, FILE_NAME);
JSONArray array = new JSONArray();
Path projectPath = project.getFolder().toPath();
files.stream().map(target -> projectPath.relativize(target.toPath())).forEach(array::put);
Writer writer = new FileWriter(file);
writer.write(array.toString(1));
writer.close();
}
@Override
public void checkFiles() {
var toRemove = files.stream().filter(target -> !target.isFile()).collect(Collectors.toList());
for (File file : toRemove) {
removeFile(file);
}
}
private void refreshDeletedDisplay(File file, MIPSFileElements elements) {
ProjectTab tab = JamsApplication.getProjectsTabPane().getProjectTab(project).orElse(null);
if (tab == null) return;
Node node = tab.getProjectTabPane().getWorkingPane().getCenter();
if (!(node instanceof FileEditorHolder holder)) return;
Optional<FileEditorTab> fTab = holder.getFileDisplayTab(file, true);
elements.seachForLabelsUpdates(globalLabels);
if (fTab.isPresent()) {
FileEditor display = fTab.get().getDisplay();
if (display instanceof MIPSFileEditor) {
elements.update(((MIPSFileEditor) display));
}
}
}
}
|
<reponame>masaponto/gifweather
package com.example.masato.weatherforecast.model.weekweather;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/**
* Created by masato on 17/03/20.
*/
public class WeekWeatherEntity {
@Expose
@SerializedName("status")
private String status;
@Expose
@SerializedName("feed")
private Feed feed;
@Expose
@SerializedName("items")
private List<Item> items;
public String getStatus() {
return status;
}
public Feed getFeed() {
return feed;
}
public List<Item> getItems() {
return items;
}
}
|
<filename>ALP/lacos/enquanto/L03_ex05.cpp
/*
5- Ler 10 valores inteiros, um de cada vez, e contar quantos deles estão no intervalo [10,20] e
quantos deles estão fora do intervalo, ao final mostrar estas informações.
*/
#include <iostream>
using namespace std;
int main() {
int n=0, i=0, dentro=0, fora=0;
while(i<10) {
cout << "\n digite numero " <<i+1 <<"\n";
cin >> n;
if(n>=10 && n<=20)
dentro += 1;
else
fora += 1;
i+= 1;
}
cout << "\n dentro do intervalo " <<dentro <<"\n";
cout << "\n fora do intervalo " <<fora <<"\n";
}
|
import {
ɵComponentDef as ComponentDef,
ɵɵdefineComponent as defineComponent
} from '@angular/core';
import { fakeAsync, tick } from '@angular/core/testing';
import { stringify } from 'querystring';
import { defer, Observable, Subject } from 'rxjs';
import { ConnectState, connectState } from '..';
describe('connectState', () => {
it('should automatically unsubscribe from observables on destroy', () => {
const spy = jest.fn();
@ConnectState()
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
constructor() {
}
ngOnDestroy() { }
state = connectState(this, {
value: new Observable<string>(subscriber => {
subscriber.next('foo');
return {
unsubscribe: spy,
};
}),
});
static ɵfac = () => new TestComponent();
}
const component = TestComponent.ɵfac();
TestComponent.ɵcmp.onDestroy!.call(component);
expect(spy).toHaveBeenCalledTimes(1);
});
describe('reload', () => {
it('should reload all observables when called without parameters', () => {
const spy = jest.fn(() => 'foo');
const spy2 = jest.fn(() => 'foo');
@ConnectState()
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
constructor() {
}
ngOnDestroy() { }
state = connectState(this, {
value: defer(spy),
value2: defer(spy2),
});
static ɵfac = () => new TestComponent();
}
const component = TestComponent.ɵfac();
expect(spy).toHaveBeenCalledTimes(1);
expect(spy2).toHaveBeenCalledTimes(1);
component.state.reload();
expect(spy).toHaveBeenCalledTimes(2);
expect(spy2).toHaveBeenCalledTimes(2);
TestComponent.ɵcmp.onDestroy!.call(component);
});
it('should reload specific obervable based on the key name', () => {
const spy = jest.fn(() => 'foo');
const spy2 = jest.fn(() => 'foo');
@ConnectState()
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
constructor() {
}
ngOnDestroy() { }
state = connectState(this, {
value: defer(spy),
value2: defer(spy2),
});
static ɵfac = () => new TestComponent();
}
const component = TestComponent.ɵfac();
expect(spy).toHaveBeenCalledTimes(1);
expect(spy2).toHaveBeenCalledTimes(1);
component.state.reload('value');
expect(spy).toHaveBeenCalledTimes(2);
expect(spy2).toHaveBeenCalledTimes(1);
TestComponent.ɵcmp.onDestroy!.call(component);
});
});
it('should expose loading state', () => {
const value = new Subject<string>();
@ConnectState()
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
constructor() {
}
ngOnDestroy() { }
state = connectState(this, {
value,
});
static ɵfac = () => new TestComponent();
}
const component = TestComponent.ɵfac();
expect(component.state.loading.value).toEqual(true);
value.next('foo');
expect(component.state.loading.value).toEqual(false);
TestComponent.ɵcmp.onDestroy!.call(component);
});
it('should apply symbol to decorated class definition', () => {
@ConnectState()
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
}
const ownPropertySymbols = Object.getOwnPropertySymbols(TestComponent.ɵcmp);
const decoratorAppliedSymbol = ownPropertySymbols.find(
symbol => symbol.toString() === 'Symbol(__decoratorApplied)'
);
expect(decoratorAppliedSymbol).toBeDefined();
});
it('should throw if directive/component not decorated with ConnectState or UntilDestroy', () => {
class TestComponent {
static ɵcmp: ComponentDef<TestComponent> = defineComponent({
vars: 0,
decls: 0,
type: TestComponent,
selectors: [[]],
template: () => {}
});
ngOnDestroy() { }
state = connectState(this, {});
static ɵfac = () => new TestComponent();
}
expect(() => TestComponent.ɵfac()).toThrow(/untilDestroyed operator cannot be used inside directives or components or providers that are not decorated with UntilDestroy decorator/);
});
});
|
'use strict';
var expect = require("chai").expect;
var fs = require('fs');
var rewire = require('rewire');
var logger = require('../../../lib/log/logger');
var attributeValidator = require('../../../lib/validation/attributeValidator');
var attributeValidatorPrivate = rewire('../../../lib/validation/attributeValidator');
describe('Validating Policy JSON properties',function(){
var fakePolicy;
beforeEach(function(){
fakePolicy = JSON.parse(fs.readFileSync(__dirname+'/../fakePolicy.json', 'utf8'));
});
it('Should validate the policy JSON successfully',function(){
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result).to.be.empty;
});
});
it('Should fail to validate the policy as instance_min_count is greater than instance_max_count',function(){
fakePolicy.instance_min_count = 10;
fakePolicy.instance_max_count = 5;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0]).to.have.property('stack').and.equal('instance_min_count 10 is higher or equal to instance_max_count 5 in policy_json');
expect(result[0]).to.have.property('message').and.equal('instance_min_count and instance_max_count values are not compatible');
expect(result[0]).to.have.property('property').and.equal('instance_min_count');
});
});
it('Should fail to validate the policy as end_date is before start_date',function(){
fakePolicy.schedules.specific_date[0].start_date_time = '2016-06-19T10:30';
fakePolicy.schedules.specific_date[0].end_date_time = '2014-06-19T13:30';
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('specific_date.start_date_time');
expect(result[0].message).to.equal('specific_date.start_date_time and specific_date.end_date_time values are not compatible');
expect(result[0].stack).to.equal('start_date_time 2016-06-19T10:30 is same or after end_date_time 2014-06-19T13:30 in specific_date :[0]');
});
});
it('Should fail to validate the policy as instance_min_count is greater than instance_max_count in specific_date',function(){
fakePolicy.schedules.specific_date[0].instance_min_count = 10;
fakePolicy.schedules.specific_date[0].instance_max_count = 2;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('specific_date.instance_min_count');
expect(result[0].message).to.equal('specific_date.instance_min_count and specific_date.instance_max_count values are not compatible');
expect(result[0].stack).to.equal('instance_min_count 10 is higher or equal to instance_max_count 2 in specific_date :[0]');
});
});
it('Should fail to validate the policy as initial_min_instance_count is greater than instance_max_count in specific_date',function(){
fakePolicy.schedules.specific_date[0].initial_min_instance_count = 5;
fakePolicy.schedules.specific_date[0].instance_max_count = 4;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('specific_date.initial_min_instance_count');
expect(result[0].message).to.equal('specific_date.initial_min_instance_count and specific_date.instance_max_count values are not compatible');
expect(result[0].stack).to.equal('initial_min_instance_count 5 is higher than instance_max_count 4 in specific_date :[0]');
});
});
it('Should fail to validate the policy as initial_min_instance_count is less than instance_min_count in specific_date',function(){
fakePolicy.schedules.specific_date[0].initial_min_instance_count = 1;
fakePolicy.schedules.specific_date[0].instance_min_count = 2;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('specific_date.initial_min_instance_count');
expect(result[0].message).to.equal('specific_date.initial_min_instance_count and specific_date.instance_min_count values are not compatible');
expect(result[0].stack).to.equal('initial_min_instance_count 1 is lower than instance_min_count 2 in specific_date :[0]');
});
});
it('Should fail to validate the Policy with overlapping time range in specific date',function(){
fakePolicy.schedules.specific_date[1].start_date_time = '2015-06-04T10:53';
fakePolicy.schedules.specific_date[1].end_date_time = '2015-06-21T23:45';
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('specific_date.start_date_time');
expect(result[0].stack).to.equal('Date range of specific_date[1] is overlapped with date range of specific_date[0]');
expect(result[0].message).to.equals('specific_date.start_date_time and specific_date.end_date_time ranges are overlapping');
});
});
it('should fail to validate policy with overlapping time range in days of week in recurring schedule',function(){
fakePolicy.schedules.recurring_schedule[2].days_of_week = [4,5,2];
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.start_time');
expect(result[0].message).to.equal('recurring_schedule.start_time and recurring_schedule.end_time ranges are overlapping');
expect(result[0].stack).to.equal('days_of_week based time range of recurring_schedule[0] is overlapped with time range of recurring_schedule[2]');
});
});
it('should fail to validate policy with overlapping time range in days of month in recurring schedule',function(){
fakePolicy.schedules.recurring_schedule[1].days_of_month = [12,20,24];
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.start_time');
expect(result[0].message).to.equal('recurring_schedule.start_time and recurring_schedule.end_time ranges are overlapping');
expect(result[0].stack).to.equal('days_of_month based time range of recurring_schedule[1] is overlapped with time range of recurring_schedule[3]');
});
});
it('should validate the policy successfully if days of month in recurring schedule is overlapping but start_time and end_time in overlapped date are non-overlapping',function(){
fakePolicy.schedules.recurring_schedule[1].days_of_week = [4,5,2];
fakePolicy.schedules.recurring_schedule[1].start_time = '19:00'
fakePolicy.schedules.recurring_schedule[1].end_time = '23:15';
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result).to.be.empty;
});
});
it('should fail to validate the policy if start_time is after end_time in recurring schedule',function(){
fakePolicy.schedules.recurring_schedule[1].start_time = '23:00'
fakePolicy.schedules.recurring_schedule[1].end_time = '13:15';
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.start_time');
expect(result[0].message).to.equal('recurring_schedule.start_time and recurring_schedule.end_time values are not compatible');
expect(result[0].stack).to.equal('start_time 23:00 is same or after end_time 13:15 in recurring_schedule :[1]');
});
});
it('should fail to validate the policy if start_date is after end_date in recurring schedule',function(){
fakePolicy.schedules.recurring_schedule[1].start_date = '2016-06-12'
fakePolicy.schedules.recurring_schedule[1].end_date = '2016-06-02';
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.start_date');
expect(result[0].message).to.equal('recurring_schedule.start_date and recurring_schedule.end_date values are not compatible');
expect(result[0].stack).to.equal('start_date 2016-06-12 is after end_date 2016-06-02 in recurring_schedule :[1]');
});
});
it('Should fail to validate the policy as instance_min_count is greater than instance_max_count in recurring_schedule',function(){
fakePolicy.schedules.recurring_schedule[0].instance_min_count = 10;
fakePolicy.schedules.recurring_schedule[0].instance_max_count = 2;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.instance_min_count');
expect(result[0].message).to.equal('recurring_schedule.instance_min_count and recurring_schedule.instance_max_count values are not compatible');
expect(result[0].stack).to.equal('instance_min_count 10 is higher or equal to instance_max_count 2 in recurring_schedule :[0]');
});
});
it('Should fail to validate the policy as initial_min_instance_count is greater than instance_max_count in recurring_schedule',function(){
fakePolicy.schedules.recurring_schedule[0].initial_min_instance_count = 5;
fakePolicy.schedules.recurring_schedule[0].instance_max_count = 4;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.initial_min_instance_count');
expect(result[0].message).to.equal('recurring_schedule.initial_min_instance_count and recurring_schedule.instance_max_count values are not compatible');
expect(result[0].stack).to.equal('initial_min_instance_count 5 is higher than instance_max_count 4 in recurring_schedule :[0]');
});
});
it('Should fail to validate the policy as initial_min_instance_count is less than instance_min_count in recurring_schedule',function(){
fakePolicy.schedules.recurring_schedule[0].initial_min_instance_count = 1;
fakePolicy.schedules.recurring_schedule[0].instance_min_count = 2;
attributeValidator.validatePolicy(fakePolicy,function(result){
expect(result[0].property).to.equal('recurring_schedule.initial_min_instance_count');
expect(result[0].message).to.equal('recurring_schedule.initial_min_instance_count and recurring_schedule.instance_min_count values are not compatible');
expect(result[0].stack).to.equal('initial_min_instance_count 1 is lower than instance_min_count 2 in recurring_schedule :[0]');
});
});
});
|
<gh_stars>1-10
from .dumpling import Dumpling, DumplingDriver
from .dumplingchef import DumplingChef
from .dumplingeater import DumplingEater
from .exceptions import (
InvalidDumpling, InvalidDumplingPayload, NetDumplingsError,
)
from .dumplinghub import DumplingHub
from .dumplingkitchen import DumplingKitchen
from ._version import __version__
# Workaround to avoid F401 "imported but unused" linter errors.
(
Dumpling,
DumplingDriver,
DumplingChef,
DumplingEater,
DumplingHub,
DumplingKitchen,
InvalidDumpling,
InvalidDumplingPayload,
NetDumplingsError,
__version__,
)
|
#!/bin/bash
set -exv
(cd frontend;./build_deploy.sh)
(cd api;./build_deploy.sh)
(cd acceptance-test;./build_deploy.sh) |
#!/bin/bash
file="node_moduless"
if [ ! -d "$file" ]
then
echo "Installing node modules"
npm install && npm start
echo "APP started!"
fi |
#!/usr/bin/env bash
# Copyright (c) 2018 WSO2 Inc. (http:www.wso2.org) All Rights Reserved.
#
# WSO2 Inc. licenses this file to you under the Apache License,
# Version 2.0 (the "License"); you may not use this file except
# in compliance with the License.
# You may obtain a copy of the License at
#
# http:www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname ${BASH_SOURCE})/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd ${SCRIPT_ROOT}/../..; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)}
# generate the code with:
# --output-base because this script should also be able to run inside the vendor dir of
# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir
# instead of the $GOPATH directly. For normal projects this can be dropped.
${CODEGEN_PKG}/generate-groups.sh "deepcopy,client,informer,lister" \
github.com/wso2/product-vick/system/controller/pkg/client github.com/wso2/product-vick/system/controller/pkg/apis \
"vick:v1alpha1 istio/networking:v1alpha3" \
--go-header-file ${SCRIPT_ROOT}/hack/boilerplate.go.txt
# --output-base "$(dirname ${BASH_SOURCE})/../../.." \
|
<reponame>ByteExceptionM/Nameless-Java-API<filename>src/com/namelessmc/java_api/exception/CannotReportSelfException.java
package com.namelessmc.java_api.exception;
import com.namelessmc.java_api.ApiError;
public class CannotReportSelfException extends ApiErrorException {
private static final long serialVersionUID = 1L;
public CannotReportSelfException() {
super(ApiError.CANNOT_REPORT_YOURSELF);
}
}
|
<reponame>GabrielMajeri/tutoriat-pa-2019-2020
word = input()
# Remove the newline \n character read from the input
word = word.strip()
uppercase = 0
lowercase = 0
for character in word:
if character.isupper():
uppercase += 1
else:
lowercase += 1
if uppercase > lowercase:
word = word.upper()
else:
word = word.lower()
print(word) |
package main
import (
"encoding/json"
"os"
stream "github.com/dynacrypt/go-bloxroute/stream"
"github.com/rs/zerolog"
)
var log = zerolog.New(os.Stderr).Level(zerolog.WarnLevel).With().Timestamp().Logger()
func main() {
accountID := os.Getenv("ACCOUNT_ID")
if accountID == "" {
log.Fatal().Msg("ACCOUNT_ID not set in environment!")
}
secretHash := os.Getenv("SECRET_HASH")
if secretHash == "" {
log.Fatal().Msg("SECRET_HASH not set in environment!")
}
url := os.Getenv("WS_URL")
s, err := stream.NewStream(
stream.Account(accountID, secretHash),
stream.URL(url),
stream.OnConnect(func() { log.Info().Msg("Connected to tx stream") }),
stream.OnReconnect(func() { log.Info().Msg("Reconnected to tx stream") }),
stream.OnError(func(err error) { log.Error().Msg(err.Error()) }),
)
if err != nil {
log.Fatal().Msg(err.Error())
}
ch, err := s.Start()
enc := json.NewEncoder(os.Stdout)
for tx := range ch {
if err != enc.Encode(tx) {
log.Fatal().Msg(err.Error())
}
}
}
|
<filename>src/symbiotes/effects/columns.js<gh_stars>1-10
import { desksActions } from '@symbiotes/desks'
import { cardsActions } from '@symbiotes/cards'
import { post, del } from '@lib/request'
import { addColumn as createColumn } from '../helpers'
export const addColumn = (name, deskId) => {
return dispatch => {
return post('/desks/columns', {
name: name,
deskId: deskId
})
.then(res => {
dispatch(createColumn(name, deskId, res.id))
})
.catch(err => console.log(err))
}
}
export const deleteColumn = columnId => {
return dispatch => {
return del(`/desks/columns/${columnId}`).then(() => {
dispatch(desksActions.deleteColumn(columnId))
})
}
}
export const moveColumn = (deskId, columns, token) => {
return dispatch => {
dispatch(desksActions.updateDesk({ id: deskId, columns: columns }))
return post(
`/desks/${deskId}/moveColumn`,
{ columns: columns },
token
).catch(err => {
dispatch(cardsActions.setError(err))
})
}
}
|
import type { ApiInterfaceRx } from '@polkadot/api/types';
import type { Hash } from '@polkadot/types/interfaces';
import type { Observable } from '@polkadot/x-rxjs';
import type { DeriveProposalImage } from '../types';
export declare function preimage(instanceId: string, api: ApiInterfaceRx): (hash: Hash) => Observable<DeriveProposalImage | undefined>;
|
<gh_stars>0
package com.wagner.springmvcintro.sample2;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping("/v1")
public class FormV1Controller {
@RequestMapping("/showForm")
public String showForm() {
return "v1/helloworld-form";
}
@RequestMapping("/processForm")
public String processForm() {
return "v1/helloworld";
}
} |
package com.solid.lsp.structure;
import java.util.Optional;
public class MechanicalDuck extends Duck {
private Optional<Battery> battery;
public MechanicalDuck(Optional<Battery> battery){
this.battery = battery;
}
@Override
public String walk(){
if (!battery.isPresent() || !battery.get().isCharge()){
return "Error! It is not walking because of needing a battery... ";
}
return "Walking... ";
}
}
|
<filename>internal/gitaly/config/prometheus/config.go
package prometheus
import (
"time"
grpcprometheus "github.com/grpc-ecosystem/go-grpc-prometheus"
"github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitaly/v14/internal/middleware/limithandler"
)
// Config contains additional configuration data for prometheus
type Config struct {
// ScrapeTimeout is the allowed duration of a Prometheus scrape before timing out.
ScrapeTimeout time.Duration `toml:"scrape_timeout"`
// GRPCLatencyBuckets configures the histogram buckets used for gRPC
// latency measurements.
GRPCLatencyBuckets []float64 `toml:"grpc_latency_buckets"`
}
// DefaultConfig returns a new config with default values set.
func DefaultConfig() Config {
return Config{
ScrapeTimeout: 10 * time.Second,
GRPCLatencyBuckets: []float64{0.001, 0.005, 0.025, 0.1, 0.5, 1.0, 10.0, 30.0, 60.0, 300.0, 1500.0},
}
}
// Configure configures latency buckets for prometheus timing histograms
func (c *Config) Configure() {
if len(c.GRPCLatencyBuckets) == 0 {
return
}
log.WithField("latencies", c.GRPCLatencyBuckets).Info("grpc prometheus histograms enabled")
grpcprometheus.EnableHandlingTimeHistogram(func(histogramOpts *prometheus.HistogramOpts) {
histogramOpts.Buckets = c.GRPCLatencyBuckets
})
grpcprometheus.EnableClientHandlingTimeHistogram(func(histogramOpts *prometheus.HistogramOpts) {
histogramOpts.Buckets = c.GRPCLatencyBuckets
})
limithandler.EnableAcquireTimeHistogram(c.GRPCLatencyBuckets)
}
|
<gh_stars>0
package org.floric.studies.dellogics.model.classicalplanning;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Data
@AllArgsConstructor
public class Domain {
private Set<Action> actions;
private Set<State> states;
private Set<StateTransitionFunction> transitionFunctions;
}
|
<reponame>gisprogrammer/wsp.geo.pl
/**
* @example Inline Data
*
* This example creates a simple store that auto-loads its data from an ajax
* proxy. A global variable called "userStore" is created which is an instance of
* {@link Ext.data.Store}. Feel free to experiment with the "userStore" object on the console command line.
*/
Ext.define('User', {
extend: 'Ext.data.Model',
fields: ['firstName', 'lastName']
});
var userStore;
Ext.require('Ext.data.Store');
Ext.onReady(function() {
userStore = Ext.create('Ext.data.Store', {
model: 'User',
data: [
{firstName: 'Ed', lastName: 'Spencer'},
{firstName: 'Tommy', lastName: 'Maintz'},
{firstName: 'Aaron', lastName: 'Conran'},
{firstName: 'Jamie', lastName: 'Avins'}
]
});
});
|
<html>
<head>
<title> About Us </title>
</head>
<body>
<h1> About Us </h1>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
</p>
<p>
Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
</p>
</body>
</html> |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-EmojiConstants_Example/EmojiConstants.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-EmojiConstants_Example/EmojiConstants.framework"
fi
|
<filename>openliberty-domino/bundles/org.openntf.openliberty.domino.reverseproxy/src/main/java/org/openntf/openliberty/domino/reverseproxy/ReverseProxyConfigProvider.java
/*
* Copyright © 2018-2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openntf.openliberty.domino.reverseproxy;
/**
* Represents a service that can provide configuration settings for a reverse
* proxy instance.
*
* @author <NAME>
* @since 2.1.0
*/
public interface ReverseProxyConfigProvider {
ReverseProxyConfig createConfiguration();
}
|
<filename>.storybook/main.js
module.exports = {
stories: ["../src/**/*.story.tsx"],
addons: ["@storybook/addon-links", "@storybook/addon-essentials"],
webpackFinal: async config => {
config.stats = "errors-only"
config.externals = {
redis: "redis",
request: "request",
}
return config
},
}
|
def subset_sum(nums, s):
n = len(nums)
dp_table = [ [False] * (s + 1) for _ in range(n + 1)]
# initialize top row as true
for i in range(s+1):
dp_table[0][i] = True
# fill up the dp_table using bottom-up approach
for i in range(1, n+1):
for j in range(1, s+1):
# if current element is greater than sum s
if nums[i-1] > j:
dp_table[i][j] = dp_table[i-1][j]
# else, we can include the element or not include the element
else:
dp_table[i][j] = dp_table[i-1][j] or dp_table[i-1][j-nums[i-1]]
# return true is sum s is reachable
return dp_table[n][s] |
<reponame>fagossa/katas-java
package com.trip.original.user;
public class CollaboratorCallException extends RuntimeException {
public CollaboratorCallException(String message) {
super(message);
}
}
|
<reponame>ProjectD8/MediaWiki-Content-Replicator<gh_stars>0
/*
* Copyright 2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mediawiki.content.replicator;
public class ProgressData
{
protected int operationLimit;
protected int operationProgress;
protected int pageLimit;
protected int pageProgress;
protected int projectLimit;
protected int projectProgress;
protected boolean operationMonitored;
protected boolean pageMonitored;
protected boolean projectMonitored;
public ProgressData()
{
}
public ProgressData(ProgressMonitor progress)
{
save(progress);
}
public void save(ProgressMonitor progress)
{
operationLimit = progress.getOperationLimit();
operationProgress = progress.getOperationProgress();
operationMonitored = progress.isOperationMonitored();
pageLimit = progress.getPageLimit();
pageProgress = progress.getPageProgress();
pageMonitored = progress.isPageMonitored();
projectLimit = progress.getProjectLimit();
projectProgress = progress.getProjectProgress();
projectMonitored = progress.isProjectMonitored();
}
public void restore(ProgressMonitor progress)
{
progress.initProgress(operationMonitored, pageMonitored, projectMonitored);
progress.setOperationLimit(operationLimit);
progress.setOperationProgress(operationProgress);
progress.setPageLimit(pageLimit);
progress.setPageProgress(pageProgress);
progress.setProjectLimit(projectLimit);
progress.setProjectProgress(projectProgress);
}
}
|
<filename>shared/source/threading/SysCurrentThreadId.cpp
#include "Common.h"
#include "SysCurrentThreadId.h"
#if COMPILER != COMPILER_MICROSOFT && PLATFORM != PLATFORM_APPLE
#ifdef USE_GETTID
#include <unistd.h>
#include <sys/syscall.h>
#endif
#endif
uint32_t GetSysCurrentThreadId(void)
{
#if COMPILER == COMPILER_MICROSOFT
BUILD_BUG_ON(sizeof(uint32_t) < sizeof(DWORD));
return (uint32_t)GetCurrentThreadId();
#elif PLATFORM == PLATFORM_APPLE
BUILD_BUG_ON(sizeof(uint32_t) < sizeof(mach_port_t));
return (uint32_t)pthread_mach_thread_np(pthread_self());
#else
#ifdef USE_GETTID
#ifdef SYS_gettid
BUILD_BUG_ON(sizeof(uint32_t) < sizeof(pid_t));
return (uint32_t)syscall(SYS_gettid);
#else
#error "SYS_gettid unavailable on this system"
#endif
#else
BUILD_BUG_ON(sizeof(uint32_t) < sizeof(pthread_t));
return (uint32_t)pthread_self();
#endif
#endif
}
|
<gh_stars>0
package cmd
import (
"context"
"fmt"
"io/ioutil"
"net/url"
"os"
"strings"
"github.com/go-kit/kit/log"
"github.com/mitchellh/cli"
"github.com/pkg/errors"
"github.com/replicatedhq/ship/pkg/e2e"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func init() {
shipReleaseCommand.AddCommand(Cmd())
}
func Cmd() *cobra.Command {
vip := viper.New()
var stdoutLogger log.Logger
stdoutLogger = log.NewLogfmtLogger(os.Stdout)
stdoutLogger = log.With(stdoutLogger, "ts", log.DefaultTimestampUTC)
cmd := &cobra.Command{
Use: "create",
Short: "API client for creating ship releases",
Long: `
`,
RunE: func(cmd *cobra.Command, args []string) error {
releaser := &Releaser{
viper: vip,
logger: stdoutLogger,
ui: &cli.ColoredUi{
OutputColor: cli.UiColorNone,
ErrorColor: cli.UiColorRed,
WarnColor: cli.UiColorYellow,
InfoColor: cli.UiColorGreen,
Ui: &cli.BasicUi{
Reader: os.Stdin,
Writer: os.Stdout,
ErrorWriter: os.Stderr,
},
},
}
err := releaser.Release(context.Background())
if err != nil {
return errors.Wrap(err, "promote release")
}
return nil
},
}
cmd.Flags().String("vendor-token", "", "Token to use to communicate with https://g.replicated.com")
cmd.Flags().String("graphql-api-address", "https://g.replicated.com/graphql", "upstream g. address")
cmd.Flags().String("spec-file", "", "spec file to promote")
cmd.Flags().String("channel-id", "", "channel id to promote")
cmd.Flags().String("semver", "", "semver of the release")
cmd.Flags().String("release-notes", "", "release notes")
cmd.Flags().String("log-level", "off", "log level")
vip.BindPFlags(cmd.Flags())
vip.BindPFlags(cmd.PersistentFlags())
vip.AutomaticEnv()
vip.SetEnvKeyReplacer(strings.NewReplacer("-", "_"))
return cmd
}
type Releaser struct {
viper *viper.Viper
logger log.Logger
ui cli.Ui
}
func (r *Releaser) getParams() (token, specContents, semver, channelID, gqlAddr string, err error) {
token = r.viper.GetString("vendor-token")
if token == "" {
err = errors.New("param vendor-token is required")
return
}
specFile := r.viper.GetString("spec-file")
if specFile == "" {
err = errors.New("param spec-file is required")
return
}
specBytes, err := ioutil.ReadFile(specFile)
if err != nil {
err = errors.Wrapf(err, "read file %s", specFile)
return
}
specContents = string(specBytes)
semver = r.viper.GetString("semver")
if semver == "" {
err = errors.New("param semver is required")
return
}
channelID = r.viper.GetString("channel-id")
if channelID == "" {
err = errors.New("param channel-id is required")
return
}
gqlAddr = r.viper.GetString("graphql-api-address")
return
}
func (r *Releaser) Release(ctx context.Context) error {
token, specContents, semver, channelID, gqlAddr, err := r.getParams()
if err != nil {
return errors.Wrap(err, "load params")
}
gqlServer, err := url.Parse(gqlAddr)
if err != nil {
return errors.Wrapf(err, "parse graphql-api-address URL \"%s\"", gqlAddr)
}
client := &e2e.GraphQLClient{
GQLServer: gqlServer,
Token: <PASSWORD>,
Logger: r.logger,
}
data, err := client.PromoteRelease(
string(specContents),
channelID,
semver,
r.viper.GetString("release-notes"),
)
r.ui.Info(fmt.Sprintf("received data %+v", data))
if err != nil {
return errors.Wrapf(err, "promote release")
}
return nil
}
|
def print_range():
i = 0
while i < 10:
print(i)
i += 1 |
module Quickbooks
module Service
class Account < BaseService
include ServiceCrud
def delete(account)
account.active = false
update(account, :sparse => true)
end
private
def default_model_query
"SELECT * FROM ACCOUNT"
end
def model
Quickbooks::Model::Account
end
end
end
end
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author <NAME>
* @version 1.3
* @date Sat Apr 29 13:22:47 EDT 2017
* @see LICENSE (MIT style license file).
*
* @see introcs.cs.princeton.edu/java/92symbolic/Polynomial.java.html
*/
package scalation.calculus
import scala.math.abs
import scalation.linalgebra.VectorD
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Poly` class provides simple operations on polynomials.
* <p>
* Poly (2, 3) => 3 x + 2
* <p>
* Note, reverse order of coefficients, i.e., coefficients for smallest terms first.
* @param c the coefficients of the polynomial
*/
case class Poly (c: VectorD)
{
private val DEBUG = true // debug flag
val deg = c.size - 1 // degree of the polynomial
if (DEBUG) println (s"Poly ($c) has degree $deg")
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Apply/evaluate the polynomial at 'x'.
* @param x the value of the variable
*/
def apply (x: Double): Double =
{
var sum = 0.0
for (i <- deg to 0 by -1) sum = x * sum + c(i)
sum
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' polynomial and the 'q' polynomial.
* @param q the other polynomial
*/
def + (q: Poly): Poly =
{
if (deg < q.deg) Poly (c + q.c ++ q.c.slice (deg+1, q.deg+1))
else Poly (q.c + c ++ c.slice (q.deg+1, deg+1))
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Subtract the 'q' polynomial from 'this' polynomial.
* @param q the other polynomial
*/
def - (q: Poly): Poly =
{
if (deg < q.deg) Poly (c - q.c ++ q.c.slice (deg+1, q.deg+1))
else Poly (-q.c + c ++ c.slice (q.deg+1, deg+1))
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' polynomial and the 'q' polynomial.
* @param q the other polynomial
*/
def * (q: Poly): Poly =
{
val cc = new VectorD (deg + q.deg + 1)
for (i <- 0 to deg; j <- 0 to q.deg) cc(i+j) += c(i) * q.c(j)
Poly (cc)
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the derivative of 'this' polynomial, returning the result as a polynomial.
*/
def derivative: Poly = Poly ((for (i <- 1 to deg) yield i * c(i)) :_*)
def Ⅾ : Poly = Poly ((for (i <- 1 to deg) yield i * c(i)) :_*)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Integrate 'this' polynomial, returning the result as a polynomial.
* Note, the arbitrary constant 'c' for the indefinite integral is set to 1.
*/
def integrate: Poly = Poly (1.0 +: (for (i <- 0 to deg) yield c(i) / (i+1.0)) :_*)
def ∫ : Poly = Poly (1.0 +: (for (i <- 0 to deg) yield c(i) / (i+1.0)) :_*)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Integrate 'this' polynomial on the interval 'on', returning its value as
* a double.
* @param on the interval of integration
*/
def integrate (on: Interval): Double =
{
val pl = Poly (1.0 +: (for (i <- 0 to deg) yield c(i) / (i+1.0)) :_*)
pl (on._2) - pl (on._1)
} // integrate
def ∫ (on: Interval): Double =
{
val pl = Poly (1.0 +: (for (i <- 0 to deg) yield c(i) / (i+1.0)) :_*)
pl (on._2) - pl (on._1)
} // ∫
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Trim away trailing zero coefficients (i.e., those on highest order terms),
* returning the resulting polynomial of possibly lower degree.
*/
def trim: Poly =
{
var i = deg; while (c(i) == 0.0) i -= 1 // skip trailing zeros
Poly (c.slice (0, i+1)) // keep the rest
} // trim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the polynomial to a string.
*/
override def toString: String =
{
val sb = new StringBuilder ()
for (i <- deg to 0 by -1) sb.append (if (i >= 2) s"${c(i)} x^$i + "
else if (i == 1) s"${c(i)} x + "
else s"${c(i)}")
sb.toString
} // toString
} // Poly class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Poly` companion object provides factory methods for the 'Poly' class.
*/
object Poly
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a polynomial from repeated doubles.
* @param c the coefficients as a repeated double
*/
def apply (c: Double*): Poly = Poly (VectorD (c))
} // Poly object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PolyTest` object is used to test the `Poly` class.
* > run-main scalation.calculus.PolyTest
*/
object PolyTest extends App
{
import scalation.plot.Plot
val pl = Poly (4.0, 3.0, 2.0) // example polynomial: 2 x^2 + 3 x + 4
val dpl = pl.derivative // its derivative
val ipl = pl.integrate // one of its indefinite integrals
val jpl = pl.integrate ((0.0, 2.0)) // one of its definite integrals
val spl = pl + dpl // sum of polynomials and its dervivate
val mpl = pl - dpl // difference of polynomial and its dervivate
val tpl = pl * dpl // product of polynomial and its dervivate
val zpl = Poly (4.0, 0.0, 3.0, 0.0, 0.0) // polynomial with trailing zero (e.g., 0 x^2)
println (s"pl = $pl")
println (s"dpl = $dpl")
println (s"ipl = $ipl")
println (s"spl = $spl")
println (s"mpl = $mpl")
println (s"tpl = $tpl")
println (s"zpl = $zpl")
println (s"t(zpl) = ${zpl.trim}")
println (s"pl (2) = ${pl (2)}")
println (s"dpl (2) = ${dpl (2)}")
println (s"ipl (2) = ${ipl (2)}")
println (s"jpl = $jpl")
println (s"spl (2) = ${spl (2)}")
println (s"mpl (2) = ${mpl (2)}")
println (s"tpl (2) = ${tpl (2)}")
val x = VectorD.range (0, 20) / 5.0
val y = x.map (pl (_))
val z = x.map (dpl (_))
new Plot (x, y, z)
} // PolyTest object
|
package com.st.map;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
/*
需求:创建一个HashMap集合,存储三个键值对元素,每一个键值对元素的键是String,值是ArrayList,
每一个ArrayList的元素是String,并遍历
思路:
1:创建HashMap集合
2:创建ArrayList集合,并添加元素
3:把ArrayList作为元素添加到HashMap集合
4:遍历HashMap集合
给出如下的数据:
第一个ArrayList集合的元素:(三国演义)
诸葛亮
赵云
第二个ArrayList集合的元素:(西游记)
唐僧
孙悟空
第三个ArrayList集合的元素:(水浒传)
武松
鲁智深
*/
public class HashMapIncludeArrayListDemo {
public static void main(String[] args) {
//创建HashMap集合
HashMap<String, ArrayList<String>> hm = new HashMap<String, ArrayList<String>>();
//创建ArrayList集合,并添加元素
ArrayList<String> sgyy = new ArrayList<String>();
sgyy.add("诸葛亮");
sgyy.add("赵云");
//把ArrayList作为元素添加到HashMap集合
hm.put("三国演义",sgyy);
ArrayList<String> xyj = new ArrayList<String>();
xyj.add("唐僧");
xyj.add("孙悟空");
//把ArrayList作为元素添加到HashMap集合
hm.put("西游记",xyj);
ArrayList<String> shz = new ArrayList<String>();
shz.add("武松");
shz.add("鲁智深");
//把ArrayList作为元素添加到HashMap集合
hm.put("水浒传",shz);
//遍历HashMap集合
Set<String> keySet = hm.keySet();
for(String key : keySet) {
System.out.println(key);
ArrayList<String> value = hm.get(key);
for(String s : value) {
System.out.println("\t" + s);
}
}
}
}
|
<gh_stars>0
package com.opalfire.foodorder.fcm;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Build.VERSION;
import android.provider.Settings;
import android.provider.Settings.System;
import android.support.v4.app.NotificationCompat.Builder;
import android.support.v4.app.NotificationCompat.InboxStyle;
import android.support.v4.app.NotificationCompat.Style;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import com.google.firebase.messaging.FirebaseMessagingService;
import com.google.firebase.messaging.RemoteMessage;
import com.opalfire.foodorder.HomeActivity;
import com.opalfire.foodorder.R;
public class MyFirebaseMessagingService extends FirebaseMessagingService {
private static final String TAG = "MyFirebaseMsgService";
@Override
public void onMessageReceived(RemoteMessage remoteMessage) {
if (remoteMessage.getData() != null) {
Log.d(TAG, "From: " + remoteMessage.getFrom());
Log.d(TAG, "Notification Message Body: " + remoteMessage.getData());
sendNotification(remoteMessage.getData().get("message"));
return;
}
Log.d(TAG, "FCM Notification failed");
}
private void sendNotification(String str) {
Intent intent = new Intent(getApplicationContext(), HomeActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.putExtra("Notification", str);
PendingIntent activity = PendingIntent.getActivity(this, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
Builder builder = new Builder(this, "PUSH");
InboxStyle inboxStyle = new InboxStyle();
inboxStyle.addLine(str);
long currentTimeMillis = java.lang.System.currentTimeMillis();
String str3 = "my_channel_01";
CharSequence charSequence = "Channel human readable title";
int i = VERSION.SDK_INT >= 24 ? 4 : 0;
Notification notification = builder.setSmallIcon(R.mipmap.ic_launcher)
.setTicker(getString(R.string.app_name))
.setWhen(currentTimeMillis)
.setContentTitle(getString(R.string.app_name))
.setContentIntent(activity)
.setSound(System.DEFAULT_NOTIFICATION_URI)
.setStyle(inboxStyle)
.setWhen(currentTimeMillis)
.setSmallIcon(getNotificationIcon(builder))
.setContentText(str)
.setChannelId(str3)
.setDefaults(6)
.build();
NotificationManager notificationManager = (NotificationManager) getApplicationContext().getSystemService(NOTIFICATION_SERVICE);
if (VERSION.SDK_INT >= 26) {
notificationManager.createNotificationChannel(new NotificationChannel(str3, charSequence, NotificationManager.IMPORTANCE_MIN));
}
notificationManager.notify(0, notification);
}
private int getNotificationIcon(Builder builder) {
if (VERSION.SDK_INT < 21) {
return R.drawable.ic_stat_push;
}
builder.setColor(ContextCompat.getColor(getApplicationContext(), R.color.colorAccent));
return R.drawable.ic_stat_push;
}
}
|
var values = {
fixLength: false,
fixAngle: false,
showCircle: false,
showAngleLength: true,
showCoordinates: false
};
////////////////////////////////////////////////////////////////////////////////
// Variables
var vectorStart, vector, vectorPrevious;
var vectorItem, items, dashedItems;
var new_w = 0.75;
/// Objetos
var Dibujo = {
Nombre_Dibujo : null,
//User : myUser,
grupos : [],
texts : [],
guides :[]
};
var Grupo ={
nombre_grupo : "",
color : "#000000",
paths : [],
texts_g : [],
}
var grupoSeleccionado = null;
var grados_actuales = 0;
// Variables para los asistentes
var DegreeAsistant = true;
var MoveLineAssitant = false;
var UnionAsistant= true;
var txtPosition = 0;
var CopyAssitant = false;
var GuideAssistant = false;
var MoveOnX = false;
var MoveOnY = false;
///////////////////Local storage of Design
var storedDesign = window.localStorage.getItem("ML-PLAk-CAD");
if(storedDesign != null){
var newDesign= JSON.parse(storedDesign);
SetDesign(newDesign);
}
//////////
var Temporal_Txts =[]
function init_Texts() {
if(storedDesign == null){
var t1 = new PointText( new Point(view.center - 50));
t1.content = "Bienvenido! Aqui algunas instrucciones: ";
t1.justification = 'center';
t1.fillColor = 'black';
var t2 = new PointText( new Point(view.center - 30));
t2.content = "Para comenzar crea un nuevo grupo en el boton de la derecha :)";
t2.justification = 'center';
t2.fillColor = 'black';
Temporal_Txts.push(t1,t2);
}
};
init_Texts();
/// Cargar la cookie si existe en el explorador al iniciar
// Funciones para generar el vector rojo de guia, y medidas de guia junto con grados
function processVector(event, drag) {
vector = event.point - vectorStart;
if (vectorPrevious) {
if (values.fixLength && values.fixAngle) {
vector = vectorPrevious;
} else if (values.fixLength) {
vector.length = vectorPrevious.length;
} else if (values.fixAngle) {
vector = vector.project(vectorPrevious);
}
}
drawVector(drag);
}
// Se dibuja el vector rojo de guia
function drawVector(drag) {
if(DegreeAsistant == true){
if (vector.angle > 0){
if (vector.angle <=30){
grados_actuales = vector.angle = 0;
}
if(vector.angle >30){
if (vector.angle <=60){
grados_actuales = vector.angle = 45;
}
if(vector.angle >60 ){
if (vector.angle <=120){
grados_actuales = vector.angle = 90;
}
if(vector.angle >120){
if (vector.angle <=150){
grados_actuales = vector.angle = 135;
}
if(vector.angle >150){
grados_actuales = vector.angle = 180;
}
}
}
}
}
if (vector.angle < 0){
if (vector.angle >=-30){
grados_actuales = vector.angle = 0;
}
if(vector.angle < -30){
if (vector.angle >= -60){
grados_actuales = vector.angle = -45;
}
if(vector.angle < -60 ){
if (vector.angle >= -120){
grados_actuales = vector.angle = -90;
}
if(vector.angle < -120){
if (vector.angle >= -150){
grados_actuales = vector.angle = -135;
}
if(vector.angle < -150){
grados_actuales = vector.angle = -180;
}
}
}
}
}
} ;
if (items) {
for (var i = 0, l = items.length; i < l; i++) {
items[i].remove();
}
}
if (vectorItem)
vectorItem.remove();
items = [];
var arrowVector = vector.normalize(10);
var end = vectorStart + vector;
vectorItem = new Group(
new Path(vectorStart, end),
new Path(
end + arrowVector.rotate(135),
end,
end + arrowVector.rotate(-135)
)
);
vectorItem.strokeWidth = new_w;
vectorItem.strokeColor = '#e4141b';
vectorItem.miterLimit = 30;
// Display:
dashedItems = [];
// Draw Circle
if (values.showCircle) {
dashedItems.push(new Path.Circle(vectorStart, vector.length));
}
// Draw Labels
if (values.showAngleLength) {
drawAngle(vectorStart, vector, !drag);
if (!drag)
drawLength(vectorStart, end, vector.angle < 0 ? -1 : 1, true);
}
var quadrant = vector.quadrant;
if (values.showCoordinates && !drag) {
drawLength(vectorStart, vectorStart + [vector.x, 0],
[1, 3].indexOf(quadrant) != -1 ? -1 : 1, true, vector.x, 'x: ');
drawLength(vectorStart, vectorStart + [0, vector.y],
[1, 3].indexOf(quadrant) != -1 ? 1 : -1, true, vector.y, 'y: ');
}
for (var i = 0, l = dashedItems.length; i < l; i++) {
var item = dashedItems[i];
item.strokeColor = 'black';
item.dashArray = [1, 2];
items.push(item);
}
// Update palette
values.x = vector.x;
values.y = vector.y;
values.length = vector.length;
values.angle = vector.angle;
if(UnionAsistant == true){
for(var i=0; i<Dibujo.grupos[grupoSeleccionado].paths.length;i++){
for(var j=0; j<Dibujo.grupos[grupoSeleccionado].paths[i].children.length;j++){
if(Dibujo.grupos[grupoSeleccionado].paths[i].children[j].contains(vectorItem.children[0].lastSegment.point)){
if ( j==0 || j ==2){
if (j ==0){
putLinesTogether(Dibujo.grupos[grupoSeleccionado].paths[i].children[j], 1);
}else{
putLinesTogether(Dibujo.grupos[grupoSeleccionado].paths[i].children[j], 1);
}
}
}else if(Dibujo.grupos[grupoSeleccionado].paths[i].children[j].contains(vectorItem.children[0].firstSegment.point)){
if ( j==0 || j ==2){
if (j ==0){
putLinesTogether(Dibujo.grupos[grupoSeleccionado].paths[i].children[j], 0);
vectorStart.x = Dibujo.grupos[grupoSeleccionado].paths[i].children[j].position.x;
vectorStart.y = Dibujo.grupos[grupoSeleccionado].paths[i].children[j].position.y;
}else{
putLinesTogether(Dibujo.grupos[grupoSeleccionado].paths[i].children[j], 0);
vectorStart.x = Dibujo.grupos[grupoSeleccionado].paths[i].children[j].position.x;
vectorStart.y = Dibujo.grupos[grupoSeleccionado].paths[i].children[j].position.y;
}
}
}
}
}
}
$('#Mesure').val(Math.floor(vectorItem.children[0].length));
}
function putLinesTogether(pathCircle, segmentIndex){
vectorItem.children[0].insert(segmentIndex, pathCircle.position);
vectorItem.children[0].removeSegment(segmentIndex + 1);
if(vectorItem.children[1]){
vectorItem.children[1].remove();
}
}
// Se dibuja el angulo del vector
function drawAngle(center, vector, label) {
var radius = 25, threshold = 10;
var from = new Point(radius, 0);
var through = from.rotate(vector.angle / 2);
var to = from.rotate(vector.angle);
var end = center + to;
dashedItems.push(new Path.Line(center,
center + new Point(radius + threshold, 0)));
dashedItems.push(new Path.Arc(center + from, center + through, end));
var arrowVector = to.normalize(7.5).rotate(vector.angle < 0 ? -90 : 90);
dashedItems.push(new Path([
end + arrowVector.rotate(135),
end,
end + arrowVector.rotate(-135)
]));
if (label) {
// Angle Label
var text = new PointText(center + through.normalize(radius + 10) + new Point(0, 3));
text.content = Math.floor(vector.angle * 100) / 100 + '\xb0';
items.push(text);
}
}
// Se dibuja la longitud del vector rojo
function drawLength(from, to, sign, label, value, prefix) {
var lengthSize = 5;
if ((to - from).length < lengthSize * 4)
return;
var vector = to - from;
var awayVector = vector.normalize(lengthSize).rotate(90 * sign);
var upVector = vector.normalize(lengthSize).rotate(45 * sign);
var downVector = upVector.rotate(-90 * sign);
var lengthVector = vector.normalize(vector.length / 2 - lengthSize * Math.SQRT2);
var line = new Path();
line.add(from + awayVector);
line.lineBy(upVector);
line.lineBy(lengthVector);
line.lineBy(upVector);
var middle = line.lastSegment.point;
line.lineBy(downVector);
line.lineBy(lengthVector);
line.lineBy(downVector);
dashedItems.push(line);
if (label) {
// Length Label
var textAngle = Math.abs(vector.angle) > 90
? textAngle = 180 + vector.angle : vector.angle;
// Label needs to move away by different amounts based on the
// vector's quadrant:
var away = (sign >= 0 ? [1, 4] : [2, 3]).indexOf(vector.quadrant) != -1
? 8 : 0;
var text = new PointText(middle + awayVector.normalize(away + lengthSize));
text.rotate(textAngle);
text.justification = 'center';
value = value || vector.length;
text.content = (prefix || '') + Math.floor(value * 1000) / 1000;
items.push(text);
}
}
////////////////////////////////////////////////////////////////////////////////
// Mouse Handling
/**Estas on funciones por defecto de manejo del mouse dentro de paperjs */
var dashItem;
function onMouseDown(event) {
if (event.modifiers.control === false && Dibujo.grupos[grupoSeleccionado] != null){
var end = vectorStart + vector;
var create = false;
if (vector && (event.modifiers.option || end && end.getDistance(event.point) < 10)) {
create = false;
} else {
vectorStart = event.point;
}
processVector(event, true);
}
txtPosition = event.point;
}
function onMouseDrag(event) {
if (event.modifiers.control === false && Dibujo.grupos[grupoSeleccionado] != null){
if (!event.modifiers.shift && values.fixLength && values.fixAngle){
vectorStart = event.point;
}
processVector(event, event.modifiers.shift);
}else{
var e = window.event || e;
view.center -= event.delta/1.1;
event.stopPropagation()
}
}
function onMouseUp(event) {
if (event.modifiers.control === false && Dibujo.grupos[grupoSeleccionado] != null){
processVector(event, false);
if (dashItem) {
dashItem.dashArray = [1, 2];
dashItem = null;
}
vectorPrevious = vector;
}
}
///////////////////////////////////////////////////////////////////////
//////////Botones
//// Asistente de Grados
$(document).on('click','#DegreeAsistant', function(){
if (this.checked) {
DegreeAsistant = true;
}else{
DegreeAsistant = false;
}
});
//// Asistente de Movimiento de lineas
$(document).on('click','#MoveAsistant', function(){
if (MoveLineAssitant == false && DegreeAsistant == true) {
MoveLineAssitant = true;
$('#extraOptions').css("display", "none");
$('#MovementOptions').css("display", "flex");
$('#line_menu').css("width", "15%");
GuideAssistant = false;
CopyAssitant = false;
}else{
MoveLineAssitant = false;
if(DegreeAsistant == false) {
$('#MovementOptions').css("display", "nonde");
}
}
});
/// Asistente de union de lineas
$(document).on('click','#UnionAsistant', function(){
if (this.checked) {
UnionAsistant = true;
}else{
UnionAsistant = false;
}
});
$( '#Eraser' ).click(function() {
for(var j=0; j<Dibujo.grupos.length; j++){
for(var i=0; i< Dibujo.grupos[j].paths.length; i++){
if(Dibujo.grupos[j].paths[i].selected == true){
Dibujo.grupos[j].paths[i].remove();
Dibujo.grupos[j].paths.splice(i, 1);
}
}
for(var i=0; i< Dibujo.grupos[j].texts_g.length; i++){
if(Dibujo.grupos[j].texts_g[i].selected == true){
Dibujo.grupos[j].texts_g[i].remove();
Dibujo.grupos[j].texts_g.splice(i, 1);
}
}
for(var i=0; i< Dibujo.texts.length; i++){
if(Dibujo.texts[i].selected == true){
Dibujo.texts[i].remove();
Dibujo.texts.splice(i, 1);
}
}
}
for(var j=0; j<Dibujo.guides.length; j++){
if(Dibujo.guides[j].selected == true){
Dibujo.guides[j].remove();
Dibujo.guides.splice(j, 1);
}
}
LinesDiv();
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
});
$( ".Design" ).click(function() {
DibujarLineas();
$("#length").focus();
LinesDiv();
});
//////////////////////////////-----------------------------
//----- creacion de grupos y gestion de grupos
$( "#Group" ).click(function() {
if(Temporal_Txts[0] && Temporal_Txts[1]){
Temporal_Txts[0].content = " Ahora selecciona las caracteristicas de la linea que pertenece al grupo y presiona editar!";
Temporal_Txts[1].content = "No olvides darle nombre a tu grupo ;)";
}
$( ".Design" ).css("display", 'inline');
var newgroup = $.extend(true,{},Grupo);
Dibujo.grupos.push(newgroup);
/// se crea el html de un nuevo grupo
var g_id = Dibujo.grupos.length - 1;
GroupDiv(g_id);
LinesDiv();
$("#length").focus();
});
/// clear canvas completo con un boton*
$("#clear_canvas").click(function(){
clearcanvas();
for(var i=0; i<Dibujo.texts.length; i++){
Dibujo.texts[i].remove();
}
$('#group_container').empty();
$('#Lineas').empty();
$("#length").focus();
});
//------------ change name
$( "#name_button" ).click(function() {
Dibujo.Nombre_Dibujo = $('#name').val();
});
//------------ Insert Txt
$( "#InsertTextBtn" ).click(function() {
var text;
if(txtPosition != null){
text = new PointText(new Point(txtPosition));
}else{
text = new PointText(new Point(viewe.center));
}
text.justification = 'right';
text.fillColor = 'black';
text. fontSize = $( "#TxtFont" ).val();
text.content = $( "#InsertTextctn" ).val();
text.onMouseDown = function(event) {
if (text.selected == true) {
text.selected = false;
}else{
text.selected = true;
}
}
text.onMouseDown = function(event) {
if (text.selected == true) {
text.selected = false;
}else{
text.selected = true;
}
}
text.onMouseDrag = function(event) {
if(MoveLineAssitant == true){
text.position += event.delta;
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
}
Dibujo.texts.push(text);
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
$("#length").focus();
})
// ----- Undo
$( "#Undo" ).click(function() {
var last_id = Dibujo.grupos[grupoSeleccionado].paths.length - 1;
Dibujo.grupos[grupoSeleccionado].paths[last_id].remove();
last_id = Dibujo.grupos[grupoSeleccionado].texts_g.length - 1;
Dibujo.grupos[grupoSeleccionado].texts_g[last_id].remove();
Dibujo.grupos[grupoSeleccionado].texts_g.pop();
Dibujo.grupos[grupoSeleccionado].paths.pop();
LinesDiv();
$("#length").focus();
});
//Clone paths
$( "#clone_path" ).click(function() {
GuideAssistant = false;
$('#extraOptions').css("display", "flex");
$('#line_menu').css("width", "22%");
CopyAssitant = true;
});
$( "#Guide" ).click(function() {
CopyAssitant = false;
$('#extraOptions').css("display", "flex");
$('#line_menu').css("width", "22%");
GuideAssistant = true;
});
$( "#PlusSide" ).click(function() {
if(CopyAssitant == true){
ClonePathBySign(-1);
}else if(GuideAssistant == true){
CreateGuideLineBySign(-1);
}
});
$( "#MinusSide" ).click(function() {
if(CopyAssitant == true){
ClonePathBySign(1);
}else if(GuideAssistant == true){
CreateGuideLineBySign(1);
}
});
$( "#MoveOnX" ).click(function() {
if(MoveOnX == false){
MoveOnX = true;
MoveOnY = false;
}
});
$( "#MoveOnY" ).click(function() {
if(MoveOnY == false){
MoveOnY = true;
MoveOnX = false;
}
});
//Download as SVG
$( "#Get_SVG" ).click(function() {
downloadAsSVG();
});
$( "#HideMenu" ).click(function() {
$( '#line_menu' ).css("display", "none");
});
//// function para crear divs de grupos
function GroupDiv(groupid){
$('.GroupSelect').prop('checked', false);
grupoSeleccionado = groupid;
$( "#group_container" ).append("<div id='group'><div class='form-control w-input group-name-container'><input type='radio' class='form-check-input GroupSelect' name='GroupSelect' id='#GroupSelect_"+groupid+"' value="+groupid+" checked><input class='group-name' placeholder='Nombre Grupo' id='name-group_"+groupid+"' value="+Dibujo.grupos[groupid].nombre_grupo+"></div><div class='group-caracts'><input id='color-line-group_"+groupid+"' class='form-control btn btn-primary color-container' type='color' value="+Dibujo.grupos[groupid].color+"><button id='Group_edit_btn_"+groupid+"' class='Group_edit btn btn-primary' value='"+groupid+"'>Editar</button></div></div>");
// Le asignamos a grupo de base los valores dentro del div de grupo
Dibujo.grupos[grupoSeleccionado].color = $('#color-line-group_'+grupoSeleccionado).val();
Dibujo.grupos[grupoSeleccionado].nombre_grupo = $('#name-group_'+grupoSeleccionado).val();
//boton para editar lineas
$( '#Group_edit_btn_'+groupid ).click(function() {
var thisGroup = this.value;
//se guarda en el objeto de grupos que esta dentro del Dibujo
Dibujo.grupos[thisGroup].color = $('#color-line-group_'+thisGroup).val();
Dibujo.grupos[thisGroup].nombre_grupo = $('#name-group_'+thisGroup).val();
// SetGroupValues(Grupo.color, Grupo.nombre_grupo, thisGroup);
//CargarLineasAlCanvas(Dibujo.grupos[thisGroup].lineas, thisGroup);
for(var j=0; j<Dibujo.grupos[thisGroup].paths.length; j++){
Dibujo.grupos[thisGroup].paths[j].children[1].set({
strokeColor: Dibujo.grupos[thisGroup].color,
});
}
LinesDiv();
// dejamos cochada la casilla
$('#GroupSelect_'+thisGroup).prop('checked', true);
//cambiamos el texto en la pantalla
if(Temporal_Txts[0] && Temporal_Txts[1]){
Temporal_Txts[0].content = " Ahora Puedes Comenzar a dibujar!!";
Temporal_Txts[1].content = "Haz click en el lienzo y luego desliza el mouse, deberas pulsar en Dibujar linea para dejar marcada la linea";
}
});
// se selecciona el grupo con el que vamos a trabajar
$(document).on('click','.GroupSelect', function(){
$('.GroupSelect').prop('checked', false);
$(this).prop('checked', true);
if (this.checked) {
grupoSeleccionado = this.value;
LinesDiv();
}
});
}
////------------Stoke lines in canvas from button
function DibujarLineas(){
if(Temporal_Txts != null){
for(var i=0; i<Temporal_Txts.length; i++){
Temporal_Txts[i].remove();
}
}
var length = parseInt($("#length").val());
if(grupoSeleccionado!=null){
var from = vectorItem.children[0].firstSegment.point;
var to = vectorItem.children[0].lastSegment.point;
if(!isNaN(length)){
switch(grados_actuales){
case 0 :
to = new Point((vectorItem.children[0].firstSegment.point.x + length), vectorItem.children[0].firstSegment.point.y);
break;
case 45 :
to = new Point((vectorItem.children[0].firstSegment.point.x + (length/1.41479)), (vectorItem.children[0].firstSegment.point.y + (length/1.41479)));
break;
case 90 :
to = new Point(vectorItem.children[0].firstSegment.point.x, (vectorItem.children[0].firstSegment.point.y + length));
break;
case 135 :
to = new Point((vectorItem.children[0].firstSegment.point.x - (length/1.41479)), (vectorItem.children[0].firstSegment.point.y + (length/1.41479)));
break;
case 180 :
to = new Point((vectorItem.children[0].firstSegment.point.x - length), vectorItem.children[0].firstSegment.point.y);
break;
case -45 :
to = new Point((vectorItem.children[0].firstSegment.point.x + (length/1.41479)), (vectorItem.children[0].firstSegment.point.y - (length/1.41479)));
break;
case -90 :
to = new Point(vectorItem.children[0].firstSegment.point.x, (vectorItem.children[0].firstSegment.point.y - length));
break;
case -135 :
to = new Point((vectorItem.children[0].firstSegment.point.x - (length/1.41479)), (vectorItem.children[0].firstSegment.point.y - (length/1.41479)));
break;
case -180 :
to = new Point((vectorItem.children[0].firstSegment.point.x - length), vectorItem.children[0].firstSegment.point.y);
break;
}
}
createPath(from, to, grupoSeleccionado);
}
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
/// Fin butons
////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////
/// Funciones
///-----clear canvas
function clearcanvas(){
for(var j=0; j<Dibujo.grupos.length; j++){
for(var i=0; i< Dibujo.grupos[j].paths.length; i++){
Dibujo.grupos[j].paths[i].remove();
}
for(var i=0; i< Dibujo.grupos[j].texts_g.length; i++){
Dibujo.grupos[j].texts_g[i].remove();
}
}
for(var j=0; j<Dibujo.guides.length; j++){
Dibujo.guides[j].remove();
Dibujo.guides[j].splice(j, 1);
}
LinesDiv();
localStorage.removeItem("ML-PLAk-CAD");
}
//////////////////////////////////-------------------- quede con esto, no he tocado el zoom
//----- control de lineas
function LinesDiv(){
if(grupoSeleccionado!=null){
$('#Lineas').empty();
$.each(Dibujo.grupos[grupoSeleccionado].paths, function( index, path ) {
var hexColor = rgbToHex(path.children[1].strokeColor.toCSS());
$( "#Lineas" ).append("<div id='linea'><a class='btn btn-primary line-number'>Linea "+(index+1)+"</a><div class='group-caracts'><input id='color-line_"+index+"' class='form-control btn btn-primary color-container line-changes-table-item' type='color' value="+hexColor+"><input class='form-control w-input line-changes-table-item' value="+Math.floor(path.children[1].length)+"></div><div class='group-caracts'><button class='btn btn-primary modify_line line-changes-table-item' value='"+index+"'>Edit</button></div></div>");
$('a').css({
"text-decoration": "none",
"color": "white"
});
$('.modify_line').click(function(){
var id= this.value;
var color = $('#color-line_'+id).val();
var strokeWidth = $('#grosor-line_'+id).val();
for(var i=0; i<Dibujo.grupos[grupoSeleccionado].paths.length; i++){
if(id == i){
Dibujo.grupos[grupoSeleccionado].paths[i].children[1].set({
strokeColor: color,
strokeWidth: strokeWidth,
});
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
}
});
});
}
$("#length").focus();
}
// Transform color from RGB to HEX
function rgbToHex(color) {
color = ""+ color;
if (!color || color.indexOf("rgb") < 0) {
return;
}
if (color.charAt(0) == "#") {
return color;
}
var nums = /(.*?)rgb\((\d+),\s*(\d+),\s*(\d+)\)/i.exec(color),
r = parseInt(nums[2], 10).toString(16),
g = parseInt(nums[3], 10).toString(16),
b = parseInt(nums[4], 10).toString(16);
return "#"+ (
(r.length == 1 ? "0"+ r : r) +
(g.length == 1 ? "0"+ g : g) +
(b.length == 1 ? "0"+ b : b)
);
}
//----------------------------zoom
/**Aqui dentro tambien se gestiona la funcionalidad de poner mas gruesas o mas delgadas las lineas en zoom o dezoom */
////-------------------------------
if (canvas.addEventListener)
{
// IE9, Chrome, Safari, Opera
canvas.addEventListener("mousewheel", MouseWheelHandler, false);
// Firefox
canvas.addEventListener("DOMMouseScroll", MouseWheelHandler, false);
}
// IE 6/7/8
else
{
canvas.attachEvent("onmousewheel", MouseWheelHandler);
}
function changeZoom(oldZoom, delta, c, p) {
var factor;
var newZoom;
factor = 1.05;
if (delta > 0) {
newZoom = oldZoom * factor;
for(var i=0; i<Dibujo.grupos.length; i++){
for(var j=0; j<Dibujo.grupos[i].paths.length; j++){
if(Dibujo.grupos[i].paths[j].strokeWidth>1){
new_w= Dibujo.grupos[i].paths[j].strokeWidth / 1.05;
}
Dibujo.grupos[i].paths[j].set({
strokeWidth: new_w
});
Dibujo.grupos[i].grosor = new_w;
}
}
for(var i=0; i<Dibujo.guides.length; i++){
if(Dibujo.guides[i].strokeWidth>1){
new_w= Dibujo.guides[i].strokeWidth / 1.05;
}
Dibujo.guides[i].set({
strokeWidth: new_w
});
}
}
if (delta < 0) {
newZoom = oldZoom / factor;
for(var i=0; i<Dibujo.grupos.length; i++){
for(var j=0; j<Dibujo.grupos[i].paths.length; j++){
if( newZoom < 1){
new_w= Dibujo.grupos[i].paths[j].children[1].strokeWidth * 1.05;
}
Dibujo.grupos[i].paths[j].set({
strokeWidth: new_w
});
Dibujo.grupos[i].grosor = new_w;
}
}
for(var i=0; i<Dibujo.guides.length; i++){
if(Dibujo.guides[i].strokeWidth>1){
new_w= Dibujo.guides[i].strokeWidth * 1.05;
}
Dibujo.guides[i].set({
strokeWidth: new_w
});
}
}
beta = oldZoom / newZoom;
p.add(new Point(7.5, 7.5));
pc = p.subtract(c);
a = p.subtract(pc.multiply(beta)).subtract(c);
return [newZoom, a];
};
function MouseWheelHandler(e)
{
// cross-browser wheel delta
var e = window.event || e; // old IE support
var delta = Math.max(-1, Math.min(1, (e.wheelDelta || -e.detail)));
var mousePosition = new paper.Point(e.offsetX, e.offsetY);
var viewPosition = view.viewToProject(mousePosition);
_ref1 = changeZoom(view.zoom, delta, view.center, viewPosition);
var newZoom = _ref1[0];
var offset = _ref1[1];
view.zoom = newZoom;
view.center = view.center.add(offset);
return false;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////--------- Control del centro del lienzo con el teclado
$(document).keydown(function(e) {
switch(e.which) {
case 37: // left
var center = new Point(paper.view.center.x - 10, paper.view.center.y);
paper.view.center = center;
break;
case 38: // up
var center = new Point(paper.view.center.x, paper.view.center.y - 10);
paper.view.center = center;
break;
case 39: // right
var center = new Point(paper.view.center.x + 10, paper.view.center.y);
paper.view.center = center;
break;
case 40: // down
var center = new Point(paper.view.center.x, paper.view.center.y + 10);
paper.view.center = center;
break;
default: return; // exit this handler for other keys
}
e.preventDefault(); // prevent the default action (scroll / move caret)
});
////--------- Put design Stored into canvas
function SetDesign(cookie){
$( ".Design" ).css("display", 'inline');
Dibujo.Nombre_Dibujo = cookie.Nombre_Dibujo;
$.each(cookie.grupos, function( index1, grupo ) {
var newgroup = $.extend(true,{},Grupo);
newgroup.nombre_grupo = grupo.nombre_grupo;
newgroup.color = grupo.color;
Dibujo.grupos.push(newgroup);
$.each(grupo.paths, function( index2, array1 ) {
createPath(grupo.paths[index2][1].children[1][1].segments[0], grupo.paths[index2][1].children[1][1].segments[1], index1);
});
GroupDiv(index1);
LinesDiv();
});
$.each(cookie.guides, function( index3, guide ) {
var point1= new Point(guide[1].segments[0][0], guide[1].segments[0][1]) ;
var point2= new Point(guide[1].segments[1][0], guide[1].segments[1][1])
CreateGuideLine(point1, point2);
});
$.each(cookie.texts, function( index2, texto ) {
var matrix = new Matrix(texto[1].matrix);
var point = new Point()
point = point.transform(matrix);
writeText(point, texto[1].content, texto[1].sizefont, texto[1].justification, texto[1].color)
});
}
///------ Function to create paths
function createPath(point1, point2, idGrupo){
var Onepath = new Path.Line(point1,point2);
Onepath.strokeWidth= Dibujo.grupos[idGrupo].grosor;
Onepath.strokeColor = Dibujo.grupos[idGrupo].color;
Onepath.miterLimit = 30;
Onepath.strokeColor = Dibujo.grupos[idGrupo].color;
// We're going to be working with a half of the length
// of the path as the offset:
var offset = Onepath.length / 2;
var Circle_r = Onepath.length / 15;
// Find the point on the path:
var point = Onepath.getPointAt(offset);
var text = new PointText(new Point(point));
text.justification = 'right';
text.fillColor = 'black';
text.content = Math.floor(parseInt(Onepath.length));
// Grupo con los paths que conforman una linea
var PathGroup = new Group(
new Path.Circle(Onepath.firstSegment.point, Circle_r),
Onepath,
new Path.Circle(Onepath.lastSegment.point, Circle_r)
);
PathGroup.children[1].onMouseDown = function(event) {
if (PathGroup.children[1].selected == true) {
text.selected = false;
PathGroup.children[1].selected = false;
document.getElementById('line_menu').style.display ="none";
}else{
unselectAll();
PathGroup.children[1].selected = true;
text.selected = true;
PlaceDiv(event.clientX,event.clientY);
}
}
PathGroup.children[1].onMouseDrag = function(event) {
if(MoveLineAssitant == true){
if(DegreeAsistant == false){
PathGroup.children[0].position += event.delta;
PathGroup.children[1].position += event.delta;
PathGroup.children[2].position += event.delta;
text.position += event.delta;
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
}
}
text.onMouseDown = function(event) {
if (PathGroup.children[1].selected == true) {
text.selected = false;
PathGroup.children[1].selected = false;
document.getElementById('line_menu').style.display ="none";
}else{
unselectAll();
text.selected = true;
PathGroup.children[1].selected = true;
PlaceDiv(event.clientX,event.clientY);
}
}
text.onMouseDrag = function(event) {
if(MoveLineAssitant == true){
if(DegreeAsistant == false){
PathGroup.children[0].position += event.delta;
PathGroup.children[1].position += event.delta;
PathGroup.children[2].position += event.delta;
text.position += event.delta;
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}else{
if(MoveOnX == true){
PathGroup.children[0].position.x += event.delta.x;
PathGroup.children[1].position.x += event.delta.x;
PathGroup.children[2].position.x += event.delta.x;
text.position.x += event.delta.x;
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}else if (MoveOnY == true){
PathGroup.children[0].position.y += event.delta.y;
PathGroup.children[1].position.y += event.delta.y;
PathGroup.children[2].position.y += event.delta.y;
text.position.y += event.delta.y;
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
}
}
}
Dibujo.grupos[idGrupo].paths.push(PathGroup);
Dibujo.grupos[idGrupo].texts_g.push(text);
LinesDiv();
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
}
///--------Writing txt in canvas
function writeText(point, content, sizefont, justification, color){
var text = new PointText(new Point(point));
text.justification = justification;
text.fillColor = color;
text. fontSize = sizefont;
text.content = content;
text.onMouseDown = function(event) {
if (text.selected == true) {
text.selected = false;
}else{
text.selected = true;
}
}
text.onMouseDrag = function(event) {
if(MoveLineAssitant == true){
text.position += event.delta;
var storage = JSON.stringify(Dibujo);
window.localStorage.setItem("ML-PLAk-CAD", storage);
}
}
Dibujo.texts.push(text);
}
// Dibujar lineas con el enter
tool.onKeyDown = function(event) {
if (event.key == 'enter') {
DibujarLineas();
$("#length").focus();
LinesDiv();
return false;
}
}
/// Change local storage
function SetLocalStorage(Object, NameStorageItem){
var storage = JSON.stringify(Object);
window.localStorage.setItem(NameStorageItem, storage);
}
// unselectAll paths and texts
function unselectAll(){
for(var i=0; i<Dibujo.grupos.length; i++){
for (var j = 0; j < Dibujo.grupos[i].paths.length; j++) {
if(Dibujo.grupos[i].paths[j].children[1].selected == true){
Dibujo.grupos[i].paths[j].children[1].selected = false;
}
}
for (var j = 0; j < Dibujo.grupos[i].texts_g.length; j++) {
if(Dibujo.grupos[i].texts_g[j].selected == true){
Dibujo.grupos[i].texts_g[j].selected = false;
}
}
}
for(var i=0; i<Dibujo.guides.length; i++){
if(Dibujo.guides[i].selected == true){
Dibujo.guides[i].selected = false;
}
}
}
// Download as SVG
function downloadAsSVG() {
var fileName = "paperjs_example.svg"
var url = "data:image/svg+xml;utf8," + encodeURIComponent(paper.project.exportSVG({asString:true}));
var link = document.createElement("a");
link.download = fileName;
link.href = url;
link.click();
}
function PlaceDiv(ScreenX,ScreeY) {
var x = ScreenX;
var y = ScreeY;
var el = document.getElementById('line_menu');
el.style.display = 'flex';
el.style.position = "absolute";
el.style.left = x + 'px !imoortant';
el.style.top = y + 'px !important';
}
function ClonePathBySign(factor){
for(var i=0; i<Dibujo.grupos.length; i++){
for (var j = 0; j < Dibujo.grupos[i].paths.length; j++) {
if(Dibujo.grupos[i].paths[j].children[1].selected == true){
var point1 = Dibujo.grupos[i].paths[j].children[1].firstSegment.point.clone();
var point2 = Dibujo.grupos[i].paths[j].children[1].lastSegment.point.clone();
if(point1.y != point2.y){
point1.x = point1.x + (parseInt($( '#CopyDistance' ).val()) * factor);
point2.x = point2.x + (parseInt($( '#CopyDistance' ).val()) * factor);
}else if(point1.x != point2.x){
point1.y = point1.y + (parseInt($( '#CopyDistance' ).val()) * factor);
point2.y = point2.y + (parseInt($( '#CopyDistance' ).val()) * factor);
}else{
point1 += (parseInt($( '#CopyDistance' ).val()) * factor);
point2 += (parseInt($( '#CopyDistance' ).val()) * factor);
}
createPath(point1, point2, grupoSeleccionado);
}
}
}
}
function CreateGuideLineBySign(factor){
for(var i=0; i<Dibujo.grupos.length; i++){
for (var j = 0; j < Dibujo.grupos[i].paths.length; j++) {
if(Dibujo.grupos[i].paths[j].children[1].selected == true){
var point1 = Dibujo.grupos[i].paths[j].children[1].firstSegment.point.clone();
var point2 = Dibujo.grupos[i].paths[j].children[1].lastSegment.point.clone();
if(Math.floor(point1.y) !== Math.floor(point2.y)){
point1.x = point1.x + (parseInt($( '#CopyDistance' ).val()) * factor);
point2.x = point2.x + (parseInt($( '#CopyDistance' ).val()) * factor);
point1.y = point1.y - 100000000;
point2.y = point2.y + 100000000;
}else if(Math.floor(point1.x) !== Math.floor(point2.x)){
point1.y = point1.y + (parseInt($( '#CopyDistance' ).val()) * factor);
point2.y = point2.y + (parseInt($( '#CopyDistance' ).val()) * factor);
point1.x = point1.x - 100000000;
point2.x = point2.x + 100000000;
}
CreateGuideLine(point1, point2);
}
}
}
}
/// Funcion para crear lineas de guia
function CreateGuideLine(point1, point2){
var Onepath = new Path.Line(point1,point2);
Onepath.strokeWidth= new_w;
Onepath.strokeColor = 'red';
Onepath.onMouseDown = function(event) {
if (Onepath.selected != true) {
Onepath.selected = true;
}else{
Onepath.selected = false;
}
}
Dibujo.guides.push(Onepath);
SetLocalStorage(Dibujo, "ML-PLAk-CAD");
} |
#!/usr/bin/env bash
cd "$(dirname "$0")"
sh ./webapp/srcJekyll/deploy.sh
cd ./webapp
cp ./src/config/devConfig.js ./public/config.js
#cp ./src/config/devConfig.js ./public/config.js
npm run devserver
|
import { Meteor } from 'meteor/meteor';
import { hasPermission } from '../../../authorization';
import { LivechatTrigger } from '../../../models';
Meteor.publish('livechat:triggers', function(_id) {
if (!this.userId) {
return this.error(new Meteor.Error('error-not-authorized', 'Not authorized', { publish: 'livechat:triggers' }));
}
if (!hasPermission(this.userId, 'view-livechat-manager')) {
return this.error(new Meteor.Error('error-not-authorized', 'Not authorized', { publish: 'livechat:triggers' }));
}
if (_id !== undefined) {
return LivechatTrigger.findById(_id);
} else {
return LivechatTrigger.find();
}
});
|
// / <reference types="vite/client" />
declare module '*.svg' {
import * as React from 'react'
export const ReactComponent: React.FunctionComponent<React.SVGProps<SVGSVGElement> & { title?: string }>
}
|
<filename>src/main/java/norensa/parquet/io/DictionaryPage.java
package norensa.parquet.io;
import org.apache.parquet.format.CompressionCodec;
import org.apache.parquet.format.PageHeader;
import java.nio.ByteBuffer;
class DictionaryPage extends Page {
private PageDataProvider dataProvider;
DictionaryPage(PageHeader pageHeader, ByteBuffer buf, ColumnIdentifier columnIdentifier, CompressionCodec compressionCodec) {
super(pageHeader, buf, columnIdentifier, compressionCodec);
}
@Override
public void run() {
try {
dataProvider = PageDataProvider.createPageDataProvider(new DataPage(pageHeader, buf, columnIdentifier, compressionCodec,
0, null));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Object decode(int id) {
return dataProvider.get(id);
}
}
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2018 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.smg.graphs;
import java.util.Optional;
import org.sosy_lab.common.collect.PersistentMap;
import org.sosy_lab.cpachecker.cpa.smg.CLangStackFrame;
import org.sosy_lab.cpachecker.cpa.smg.graphs.edge.SMGEdgeHasValue;
import org.sosy_lab.cpachecker.cpa.smg.graphs.object.SMGObject;
import org.sosy_lab.cpachecker.cpa.smg.graphs.object.SMGRegion;
import org.sosy_lab.cpachecker.cpa.smg.refiner.SMGMemoryPath;
import org.sosy_lab.cpachecker.cpa.smg.util.PersistentSet;
import org.sosy_lab.cpachecker.cpa.smg.util.PersistentStack;
/**
* A view on a CLangSMG, where no modifications are allowed.
*
* <p>All returned Collections are unmodifiable.
*/
public interface UnmodifiableCLangSMG extends UnmodifiableSMG {
@Override
CLangSMG copyOf();
SMGRegion getObjectForVisibleVariable(String pVariableName);
/**
* Returns the (unmodifiable) stack of frames containing objects. The frames are ordered from
* bottom (main function) to top (most local function call).
*/
PersistentStack<CLangStackFrame> getStackFrames();
/** return a unmodifiable view on all SMG-objects on the heap. */
PersistentSet<SMGObject> getHeapObjects();
/** check whether an object is part of the heap. */
boolean isHeapObject(SMGObject object);
PersistentMap<String, SMGRegion> getGlobalObjects();
/**
* return the FunctionReturn-object for the most recent function call, i.e., from the top-level
* stackframe.
*/
SMGObject getFunctionReturnObject();
Optional<SMGEdgeHasValue> getHVEdgeFromMemoryLocation(SMGMemoryPath pLocation);
}
|
<filename>lib/radiomanager_client/api/presenter_api.rb
=begin
#RadioManager
#RadioManager
OpenAPI spec version: 2.0
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.0
=end
require "uri"
module RadioManagerClient
class PresenterApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Create presenter.
# Create presenter.
# @param data Data **(Required)**
# @param [Hash] opts the optional parameters
# @return [PostSuccess]
def create_presenter(data, opts = {})
data, _status_code, _headers = create_presenter_with_http_info(data, opts)
return data
end
# Create presenter.
# Create presenter.
# @param data Data **(Required)**
# @param [Hash] opts the optional parameters
# @return [Array<(PostSuccess, Fixnum, Hash)>] PostSuccess data, response status code and response headers
def create_presenter_with_http_info(data, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: PresenterApi.create_presenter ..."
end
# verify the required parameter 'data' is set
if @api_client.config.client_side_validation && data.nil?
fail ArgumentError, "Missing the required parameter 'data' when calling PresenterApi.create_presenter"
end
# resource path
local_var_path = "/presenters"
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(data)
auth_names = ['<NAME>']
data, status_code, headers = @api_client.call_api(:POST, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'PostSuccess')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PresenterApi#create_presenter\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Delete presenter by id
# Delete presenter by id
# @param id id of presenter
# @param [Hash] opts the optional parameters
# @return [Success]
def delete_presenter_by_id(id, opts = {})
data, _status_code, _headers = delete_presenter_by_id_with_http_info(id, opts)
return data
end
# Delete presenter by id
# Delete presenter by id
# @param id id of presenter
# @param [Hash] opts the optional parameters
# @return [Array<(Success, Fixnum, Hash)>] Success data, response status code and response headers
def delete_presenter_by_id_with_http_info(id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: PresenterApi.delete_presenter_by_id ..."
end
# verify the required parameter 'id' is set
if @api_client.config.client_side_validation && id.nil?
fail ArgumentError, "Missing the required parameter 'id' when calling PresenterApi.delete_presenter_by_id"
end
if @api_client.config.client_side_validation && id < 0
fail ArgumentError, 'invalid value for "id" when calling PresenterApi.delete_presenter_by_id, must be greater than or equal to 0.'
end
# resource path
local_var_path = "/presenters/{id}".sub('{' + 'id' + '}', id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['API Key']
data, status_code, headers = @api_client.call_api(:DELETE, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'Success')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PresenterApi#delete_presenter_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Get presenter by id
# Get presenter by id
# @param id id of Presenter
# @param [Hash] opts the optional parameters
# @option opts [Integer] :_external_station_id Query on a different (content providing) station *(Optional)*
# @return [PresenterResult]
def get_presenter_by_id(id, opts = {})
data, _status_code, _headers = get_presenter_by_id_with_http_info(id, opts)
return data
end
# Get presenter by id
# Get presenter by id
# @param id id of Presenter
# @param [Hash] opts the optional parameters
# @option opts [Integer] :_external_station_id Query on a different (content providing) station *(Optional)*
# @return [Array<(PresenterResult, Fixnum, Hash)>] PresenterResult data, response status code and response headers
def get_presenter_by_id_with_http_info(id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: PresenterApi.get_presenter_by_id ..."
end
# verify the required parameter 'id' is set
if @api_client.config.client_side_validation && id.nil?
fail ArgumentError, "Missing the required parameter 'id' when calling PresenterApi.get_presenter_by_id"
end
if @api_client.config.client_side_validation && id < 0
fail ArgumentError, 'invalid value for "id" when calling PresenterApi.get_presenter_by_id, must be greater than or equal to 0.'
end
# resource path
local_var_path = "/presenters/{id}".sub('{' + 'id' + '}', id.to_s)
# query parameters
query_params = {}
query_params[:'_external_station_id'] = opts[:'_external_station_id'] if !opts[:'_external_station_id'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['<NAME>']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'PresenterResult')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PresenterApi#get_presenter_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Get all presenters.
# List all presenters.
# @param [Hash] opts the optional parameters
# @option opts [Integer] :page Current page *(Optional)*
# @option opts [Integer] :program_id Search on Program ID *(Optional)* `(Relation)`
# @option opts [Integer] :broadcast_id Search on Broadcast ID *(Optional)* `(Relation)`
# @option opts [Integer] :model_type_id Search on ModelType ID (Optional)
# @option opts [Integer] :limit Results per page *(Optional)*
# @option opts [String] :order_by Field to order the results *(Optional)*
# @option opts [String] :order_direction Direction of ordering *(Optional)*
# @option opts [Integer] :_external_station_id Query on a different (content providing) station *(Optional)*
# @return [PresenterResults]
def list_presenters(opts = {})
data, _status_code, _headers = list_presenters_with_http_info(opts)
return data
end
# Get all presenters.
# List all presenters.
# @param [Hash] opts the optional parameters
# @option opts [Integer] :page Current page *(Optional)*
# @option opts [Integer] :program_id Search on Program ID *(Optional)* `(Relation)`
# @option opts [Integer] :broadcast_id Search on Broadcast ID *(Optional)* `(Relation)`
# @option opts [Integer] :model_type_id Search on ModelType ID (Optional)
# @option opts [Integer] :limit Results per page *(Optional)*
# @option opts [String] :order_by Field to order the results *(Optional)*
# @option opts [String] :order_direction Direction of ordering *(Optional)*
# @option opts [Integer] :_external_station_id Query on a different (content providing) station *(Optional)*
# @return [Array<(PresenterResults, Fixnum, Hash)>] PresenterResults data, response status code and response headers
def list_presenters_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: PresenterApi.list_presenters ..."
end
if @api_client.config.client_side_validation && !opts[:'page'].nil? && opts[:'page'] < 0
fail ArgumentError, 'invalid value for "opts[:"page"]" when calling PresenterApi.list_presenters, must be greater than or equal to 0.'
end
if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] > 50
fail ArgumentError, 'invalid value for "opts[:"limit"]" when calling PresenterApi.list_presenters, must be smaller than or equal to 50.'
end
if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] < 1
fail ArgumentError, 'invalid value for "opts[:"limit"]" when calling PresenterApi.list_presenters, must be greater than or equal to 1.'
end
if @api_client.config.client_side_validation && opts[:'order_direction'] && !['asc', 'desc'].include?(opts[:'order_direction'])
fail ArgumentError, 'invalid value for "order_direction", must be one of asc, desc'
end
# resource path
local_var_path = "/presenters"
# query parameters
query_params = {}
query_params[:'page'] = opts[:'page'] if !opts[:'page'].nil?
query_params[:'program_id'] = opts[:'program_id'] if !opts[:'program_id'].nil?
query_params[:'broadcast_id'] = opts[:'broadcast_id'] if !opts[:'broadcast_id'].nil?
query_params[:'model_type_id'] = opts[:'model_type_id'] if !opts[:'model_type_id'].nil?
query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
query_params[:'order-by'] = opts[:'order_by'] if !opts[:'order_by'].nil?
query_params[:'order-direction'] = opts[:'order_direction'] if !opts[:'order_direction'].nil?
query_params[:'_external_station_id'] = opts[:'_external_station_id'] if !opts[:'_external_station_id'].nil?
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = nil
auth_names = ['API Key']
data, status_code, headers = @api_client.call_api(:GET, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'PresenterResults')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PresenterApi#list_presenters\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Update presenter by id
# Update presenter by id
# @param id id of Presenter
# @param [Hash] opts the optional parameters
# @option opts [PresenterDataInput] :data Data *(Optional)*
# @return [Success]
def update_presenter_by_id(id, opts = {})
data, _status_code, _headers = update_presenter_by_id_with_http_info(id, opts)
return data
end
# Update presenter by id
# Update presenter by id
# @param id id of Presenter
# @param [Hash] opts the optional parameters
# @option opts [PresenterDataInput] :data Data *(Optional)*
# @return [Array<(Success, Fixnum, Hash)>] Success data, response status code and response headers
def update_presenter_by_id_with_http_info(id, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug "Calling API: PresenterApi.update_presenter_by_id ..."
end
# verify the required parameter 'id' is set
if @api_client.config.client_side_validation && id.nil?
fail ArgumentError, "Missing the required parameter 'id' when calling PresenterApi.update_presenter_by_id"
end
if @api_client.config.client_side_validation && id < 0
fail ArgumentError, 'invalid value for "id" when calling PresenterApi.update_presenter_by_id, must be greater than or equal to 0.'
end
# resource path
local_var_path = "/presenters/{id}".sub('{' + 'id' + '}', id.to_s)
# query parameters
query_params = {}
# header parameters
header_params = {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# HTTP header 'Content-Type'
header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])
# form parameters
form_params = {}
# http body (model)
post_body = @api_client.object_to_http_body(opts[:'data'])
auth_names = ['API Key']
data, status_code, headers = @api_client.call_api(:PATCH, local_var_path,
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => 'Success')
if @api_client.config.debugging
@api_client.config.logger.debug "API called: PresenterApi#update_presenter_by_id\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
|
#!/usr/bin/env bash
## BBR模组 BBR moudle
#---Author Info---
ver="1.0.0"
Author="johnrosen1"
url="https://johnrosen1.com/"
github_url="https://github.com/johnrosen1/vpstoolbox"
#-----------------
install_bbr(){
TERM=ansi whiptail --title "初始化中" --infobox "启动BBR中..." 7 68
colorEcho ${INFO} "Enabling TCP-BBR boost"
modprobe ip_conntrack
systemctl disable apparmor
systemctl stop apparmor
cat > '/etc/sysctl.d/99-sysctl.conf' << EOF
#!!! Do not change these settings unless you know what you are doing !!!
net.ipv4.conf.all.route_localnet=1
net.ipv4.ip_forward = 1
net.ipv4.conf.all.forwarding = 1
net.ipv4.conf.default.forwarding = 1
################################
net.ipv6.conf.all.forwarding = 1
net.ipv6.conf.default.forwarding = 1
net.ipv6.conf.lo.forwarding = 1
################################
net.ipv6.conf.all.disable_ipv6 = 0
net.ipv6.conf.default.disable_ipv6 = 0
net.ipv6.conf.lo.disable_ipv6 = 0
################################
net.ipv6.conf.all.accept_ra = 2
net.ipv6.conf.default.accept_ra = 2
################################
net.core.netdev_max_backlog = 100000
net.core.netdev_budget = 50000
net.core.netdev_budget_usecs = 5000
#fs.file-max = 51200
net.core.rmem_max = 67108864
net.core.wmem_max = 67108864
net.core.rmem_default = 67108864
net.core.wmem_default = 67108864
net.core.optmem_max = 65536
net.core.somaxconn = 10000
################################
net.ipv4.icmp_echo_ignore_all = 0
net.ipv4.icmp_echo_ignore_broadcasts = 1
net.ipv4.icmp_ignore_bogus_error_responses = 1
net.ipv4.conf.all.accept_redirects = 0
net.ipv4.conf.default.accept_redirects = 0
net.ipv4.conf.all.secure_redirects = 0
net.ipv4.conf.default.secure_redirects = 0
net.ipv4.conf.all.send_redirects = 0
net.ipv4.conf.default.send_redirects = 0
net.ipv4.conf.default.rp_filter = 0
net.ipv4.conf.all.rp_filter = 0
net.ipv4.tcp_keepalive_time = 1200
net.ipv4.tcp_keepalive_intvl = 15
net.ipv4.tcp_keepalive_probes = 5
net.ipv4.tcp_synack_retries = 2
net.ipv4.tcp_syncookies = 0
net.ipv4.tcp_rfc1337 = 0
net.ipv4.tcp_timestamps = 1
net.ipv4.tcp_tw_reuse = 1
net.ipv4.tcp_fin_timeout = 15
net.ipv4.ip_local_port_range = 1024 65535
net.ipv4.tcp_max_tw_buckets = 2000000
net.ipv4.tcp_fastopen = 3
net.ipv4.tcp_rmem = 4096 87380 67108864
net.ipv4.tcp_wmem = 4096 65536 67108864
net.ipv4.udp_rmem_min = 8192
net.ipv4.udp_wmem_min = 8192
net.ipv4.tcp_mtu_probing = 0
##############################
net.ipv4.conf.all.arp_ignore = 2
net.ipv4.conf.default.arp_ignore = 2
net.ipv4.conf.all.arp_announce = 2
net.ipv4.conf.default.arp_announce = 2
##############################
net.ipv4.tcp_autocorking = 0
net.ipv4.tcp_slow_start_after_idle = 0
net.ipv4.tcp_max_syn_backlog = 30000
net.core.default_qdisc = fq
net.ipv4.tcp_congestion_control = bbr
net.ipv4.tcp_notsent_lowat = 16384
net.ipv4.tcp_no_metrics_save = 1
net.ipv4.tcp_ecn = 2
net.ipv4.tcp_ecn_fallback = 1
net.ipv4.tcp_frto = 0
##############################
net.ipv6.conf.all.accept_redirects = 0
net.ipv6.conf.default.accept_redirects = 0
vm.swappiness = 1
vm.overcommit_memory = 1
#vm.nr_hugepages=1280
kernel.pid_max=64000
net.ipv4.neigh.default.gc_thresh3=8192
net.ipv4.neigh.default.gc_thresh2=4096
net.ipv4.neigh.default.gc_thresh1=2048
net.ipv6.neigh.default.gc_thresh3=8192
net.ipv6.neigh.default.gc_thresh2=4096
net.ipv6.neigh.default.gc_thresh1=2048
net.ipv4.tcp_max_syn_backlog = 262144
net.netfilter.nf_conntrack_max = 262144
net.nf_conntrack_max = 262144
EOF
sysctl -p
sysctl --system
echo madvise > /sys/kernel/mm/transparent_hugepage/enabled
cat > '/etc/systemd/system.conf' << EOF
[Manager]
#DefaultTimeoutStartSec=90s
DefaultTimeoutStopSec=30s
#DefaultRestartSec=100ms
DefaultLimitCORE=infinity
DefaultLimitNOFILE=65535
EOF
cat > '/etc/security/limits.conf' << EOF
* soft nofile 65535
* hard nofile 65535
EOF
if grep -q "ulimit" /etc/profile
then
:
else
echo "ulimit -SHn 65535" >> /etc/profile
fi
if grep -q "pam_limits.so" /etc/pam.d/common-session
then
:
else
echo "session required pam_limits.so" >> /etc/pam.d/common-session
fi
systemctl daemon-reload
}
|
#!/bin/bash
cd ../../
#make -C examples/esp8266_led_rgb_w_strip clean
#make -C examples/esp8266_led_rgb_w_strip all
make -C examples/esp8266_led_rgb_w_strip erase_flash
make -C examples/esp8266_led_rgb_w_strip flash
|
<filename>quarkus/funqy-http/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/http/FunqyHttpBindingRecorder.java<gh_stars>10-100
package io.quarkus.funqy.runtime.bindings.http;
import io.quarkus.arc.runtime.BeanContainer;
import io.quarkus.funqy.runtime.FunctionConstructor;
import io.quarkus.funqy.runtime.FunctionInvoker;
import io.quarkus.funqy.runtime.FunctionRecorder;
import io.quarkus.funqy.runtime.query.QueryObjectMapper;
import io.quarkus.funqy.runtime.query.QueryReader;
import io.quarkus.qson.parser.QsonParser;
import io.quarkus.qson.runtime.QuarkusQsonRegistry;
import io.quarkus.qson.writer.QsonObjectWriter;
import io.quarkus.runtime.ShutdownContext;
import io.quarkus.runtime.annotations.Recorder;
import io.smallrye.mutiny.Uni;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.ext.web.RoutingContext;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.concurrent.Executor;
import java.util.function.Supplier;
/**
* Provides the runtime methods to bootstrap Quarkus Funq
*/
@Recorder
public class FunqyHttpBindingRecorder {
private static QueryObjectMapper queryMapper;
public void init() {
queryMapper = new QueryObjectMapper();
for (FunctionInvoker invoker : FunctionRecorder.registry.invokers()) {
try {
if (invoker.hasInput()) {
QsonParser reader = QuarkusQsonRegistry.getParser(invoker.getInputType());
if (reader == null) {
throw new RuntimeException("Unable to find JsonParser for invoker:" + invoker.getName());
}
QueryReader queryReader = queryMapper.readerFor(invoker.getInputType());
invoker.getBindingContext().put(QsonParser.class.getName(), reader);
invoker.getBindingContext().put(QueryReader.class.getName(), queryReader);
}
if (invoker.hasOutput()) {
Type genericType = invoker.getMethod().getGenericReturnType();
if (Uni.class.isAssignableFrom(invoker.getMethod().getReturnType())) {
ParameterizedType pt = (ParameterizedType)invoker.getMethod().getGenericReturnType();
genericType = pt.getActualTypeArguments()[0];
}
QsonObjectWriter writer = QuarkusQsonRegistry.getWriter(genericType);
if (writer == null) {
throw new RuntimeException("Unable to find ObjectWriter for invoker:" + invoker.getName());
}
invoker.getBindingContext().put(QsonObjectWriter.class.getName(), writer);
}
} catch (Exception e) {
throw new RuntimeException (e);
}
}
}
public Handler<RoutingContext> start(String contextPath,
Supplier<Vertx> vertx,
ShutdownContext shutdown,
BeanContainer beanContainer,
Executor executor) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
FunctionConstructor.CONTAINER = null;
}
});
FunctionConstructor.CONTAINER = beanContainer;
return new VertxRequestHandler(vertx.get(), beanContainer, contextPath, executor);
}
}
|
<filename>api/command_permission.go
package api
// GuildCommandPermissions holds all permissions for a Command
type GuildCommandPermissions struct {
Disgo Disgo
ID Snowflake `json:"id"`
ApplicationID Snowflake `json:"application_id"`
GuildID Snowflake `json:"guild_id"`
Permissions []CommandPermission `json:"permissions"`
}
// TODO: add methods to update those
// CommandPermissionType is the type of the CommandPermission
type CommandPermissionType int
// types of CommandPermissionType
const (
CommandPermissionTypeRole = iota + 1
CommandPermissionTypeUser
)
// CommandPermission holds a User or Role and if they are allowed to use the Command
type CommandPermission struct {
ID Snowflake `json:"id"`
Type CommandPermissionType `json:"type"`
Permission bool `json:"permission"`
}
// SetGuildCommandsPermissions holds a slice of SetGuildCommandPermissions
type SetGuildCommandsPermissions []SetGuildCommandPermissions
// SetGuildCommandPermissions is used to update CommandPermission ID should be omitted fro bulk update
type SetGuildCommandPermissions struct {
ID Snowflake `json:"id,omitempty"`
Permissions []CommandPermission `json:"permissions"`
}
|
### Copyright (C) 2017 NVIDIA Corporation. All rights reserved.
### Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode).
import numpy as np
import torch
import os
from torch.autograd import Variable
from util.image_pool import ImagePool
from .base_model import BaseModel
from . import networks
from .Audio_VGG_Extractor import Audio_VGGLoss
class Audio_GAN_Model(BaseModel):
def name(self):
return 'Audio_GAN_Model'
def initialize(self, opt):
BaseModel.initialize(self, opt)
if opt.resize_or_crop != 'none': # when training at full res this causes OOM
torch.backends.cudnn.benchmark = True
self.isTrain = opt.isTrain
input_nc = opt.label_nc
##### define networks
# Generator network
netE_input_nc = input_nc
self.netE = networks.define_E(input_nc=netE_input_nc,ngf=opt.ngf,n_downsample=opt.n_downsample_global,C_channel=opt.C_channel,norm=opt.norm,gpu_ids=self.gpu_ids,one_D_conv=opt.OneDConv,one_D_conv_size=opt.OneDConv_size,max_ngf=opt.max_ngf,Conv_type=opt.Conv_type)
self.netDecoder = networks.define_Decoder(output_nc=opt.output_nc,ngf=opt.ngf,n_downsample=opt.n_downsample_global,C_channel=opt.C_channel,n_blocks_global=opt.n_blocks_global,norm=opt.norm,gpu_ids=self.gpu_ids,one_D_conv=opt.OneDConv,one_D_conv_size=opt.OneDConv_size, max_ngf=opt.max_ngf, Conv_type=opt.Conv_type,Dw_Index=opt.Dw_Index)
# Discriminator network
if self.isTrain:
use_sigmoid = opt.no_lsgan
netD_input_nc = opt.output_nc
self.netD = networks.define_D(netD_input_nc, opt.ndf, opt.n_layers_D, opt.norm, use_sigmoid,
opt.num_D, not opt.no_ganFeat_loss, gpu_ids=self.gpu_ids,one_D_conv=opt.OneDConv,one_D_conv_size=opt.OneDConv_size)
print('---------- Networks initialized -------------')
# load networks
if not self.isTrain or opt.continue_train or opt.load_pretrain:
pretrained_path = '' if not self.isTrain else opt.load_pretrain
self.load_network(self.netE, 'E', opt.which_epoch, pretrained_path)
self.load_network(self.netDecoder, 'Decoder', opt.which_epoch, pretrained_path)
if self.isTrain:
self.load_network(self.netD, 'D', opt.which_epoch, pretrained_path)
# set loss functions and optimizers
if self.isTrain:
if opt.pool_size > 0 and (len(self.gpu_ids)) > 1:
raise NotImplementedError("Fake Pool Not Implemented for MultiGPU")
self.fake_pool = ImagePool(opt.pool_size)
self.old_lr = opt.lr
# define loss functions
self.criterionGAN = networks.GANLoss(use_lsgan=not opt.no_lsgan, tensor=self.Tensor)
self.criterionFeat = torch.nn.L1Loss()
self.criteraion_mse = torch.nn.MSELoss()
if not opt.no_vgg_loss:
self.criterionVGG = Audio_VGGLoss()
# Names so we can breakout loss
self.loss_names = ['G_GAN', 'G_GAN_Feat', 'MSE_Loss', 'Feature', 'D_real', 'D_fake']
params = list(self.netE.parameters())+list(self.netDecoder.parameters())
self.optimizer_G = torch.optim.Adam(params, lr=opt.lr, betas=(opt.beta1, 0.999))
# optimizer D
params = list(self.netD.parameters())
self.optimizer_D = torch.optim.Adam(params, lr=opt.lr, betas=(opt.beta1, 0.999))
def discriminate(self, test_image, use_pool=False):
input_concat = test_image.detach()
if use_pool:
fake_query = self.fake_pool.query(input_concat)
return self.netD.forward(fake_query)
else:
return self.netD.forward(input_concat)
def encode_input(self, label_map, real_image=None, infer=False):
input_label = label_map.data.cuda()
# get edges from instance map
input_label = Variable(input_label, requires_grad=not infer)
# real images for training
if real_image is not None:
real_image = Variable(real_image.data.cuda())
# instance map for feature encoding
return input_label, real_image
def inference(self, label):
# Encode Inputs
input_label, image = self.encode_input(Variable(label), infer=True)
# Fake Generation
input_concat = input_label
Compressed_p = self.netE.forward(input_concat)
fake_image = self.netDecoder.forward(Compressed_p)
return fake_image,Compressed_p
def forward(self, label, image, infer=False, ADMM = False):
# Encode Inputs
input_label, real_image = self.encode_input(label, image)
# Fake Generation
input_concat = input_label
Compressed_p = self.netE.forward(input_concat)
fake_image = self.netDecoder.forward(Compressed_p)
# Fake Detection and Loss
pred_fake_pool = self.discriminate(fake_image, use_pool=True)
loss_D_fake = self.criterionGAN(pred_fake_pool, False)
# Real Detection and Loss
pred_real = self.discriminate(real_image)
loss_D_real = self.criterionGAN(pred_real, True)
# GAN loss (Fake Passability Loss)
pred_fake = self.netD.forward(fake_image)
loss_G_GAN = self.criterionGAN(pred_fake, True)
# GAN feature matching loss
loss_G_GAN_Feat = 0
if not self.opt.no_ganFeat_loss:
feat_weights = 4.0 / (self.opt.n_layers_D + 1)
D_weights = 1.0 / self.opt.num_D
for i in range(self.opt.num_D):
for j in range(len(pred_fake[i])-1):
loss_G_GAN_Feat += D_weights * feat_weights * \
self.criterionFeat(pred_fake[i][j], pred_real[i][j].detach()) * self.opt.lambda_feat
# VGG feature matching loss
loss_G_VGG = 0
if not self.opt.no_vgg_loss:
loss_G_VGG = self.criterionVGG(fake_image, real_image) * self.opt.lambda_feat
loss_mse = 0
if not self.opt.no_mse_loss:
loss_mse = self.criteraion_mse(fake_image,real_image) * self.opt.lambda_mse
# Only return the fake_B image if necessary to save BW
if ADMM == False:
return [ [ loss_G_GAN, loss_G_GAN_Feat, loss_mse, loss_G_VGG, loss_D_real, loss_D_fake ], None if not infer else fake_image ]
else:
return [ [ loss_G_GAN, loss_G_GAN_Feat, loss_mse, loss_G_VGG, loss_D_real, loss_D_fake ], None if not infer else fake_image,Compressed_p ]
def ADMM_loss(self,latent_vector,real_image,infer=False):
fake_image = self.netDecoder.forward(latent_vector)
pred_fake_pool = self.discriminate(fake_image,use_pool=True)
loss_D_fake = self.criterionGAN(pred_fake_pool, False)
pred_real = self.discriminate(real_image)
loss_D_real = self.criterionGAN(pred_real, True)
pred_fake = self.netD.forward(fake_image)
loss_G_GAN = self.criterionGAN(pred_fake, True)
loss_G_GAN_Feat = 0
if not self.opt.no_ganFeat_loss:
feat_weights = 4.0 / (self.opt.n_layers_D + 1)
D_weights = 1.0 / self.opt.num_D
for i in range(self.opt.num_D):
for j in range(len(pred_fake[i]) - 1):
loss_G_GAN_Feat += D_weights * feat_weights * \
self.criterionFeat(pred_fake[i][j],
pred_real[i][j].detach()) * self.opt.lambda_feat
# VGG feature matching loss
loss_G_VGG = 0
if not self.opt.no_vgg_loss:
loss_G_VGG = self.criterionVGG(fake_image, real_image) * self.opt.lambda_feat
loss_mse = 0
if not self.opt.no_mse_loss:
loss_mse = self.criteraion_mse(fake_image, real_image) * self.opt.lambda_mse
# Only return the fake_B image if necessary to save BW
return [[loss_G_GAN, loss_G_GAN_Feat, loss_mse, loss_G_VGG, loss_D_real, loss_D_fake],
None if not infer else fake_image]
def save(self, which_epoch):
self.save_network(self.netE, 'E', which_epoch, self.gpu_ids)
self.save_network(self.netD, 'D', which_epoch, self.gpu_ids)
self.save_network(self.netDecoder, 'Decoder', which_epoch, self.gpu_ids)
def update_fixed_params(self):
# after fixing the global generator for a number of iterations, also start finetuning it
params = list(self.netE.parameters())+list(self.netDecoder.parameters())
self.optimizer_G = torch.optim.Adam(params, lr=self.opt.lr, betas=(self.opt.beta1, 0.999))
print('------------ Now also finetuning generator -----------')
def update_learning_rate(self):
lrd = self.opt.lr / self.opt.niter_decay
lr = self.old_lr - lrd
for param_group in self.optimizer_D.param_groups:
param_group['lr'] = lr
for param_group in self.optimizer_G.param_groups:
param_group['lr'] = lr
print('update learning rate: %f -> %f' % (self.old_lr, lr))
self.old_lr = lr
|
import { AppRegistry, View, StyleSheet, Text, Button } from 'react-native'
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
userName: 'Jhon Doe',
times: 0
}
};
render(){
return (
<View style={styles.container}>
<Text>{this.state.userName}</Text>
<Button
title={'Press me'}
onPress={() => this.setState({times: this.state.times + 1})}
/>
{this.state.times > 10 && <Text>Done!</Text>}
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
justifyContent: 'center',
alignItems: 'center',
}
})
AppRegistry.registerComponent('App', () => App); |
<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import ExclamationCircle2Svg from '@rsuite/icon-font/lib/legacy/ExclamationCircle2';
const ExclamationCircle2 = createSvgIcon({
as: ExclamationCircle2Svg,
ariaLabel: 'exclamation circle 2',
category: 'legacy',
displayName: 'ExclamationCircle2'
});
export default ExclamationCircle2;
|
var interface_discovered_printer =
[
[ "initWithAddress:", "interface_discovered_printer.html#aa17feef32f32b19fedfaadd4f672add8", null ],
[ "toString", "interface_discovered_printer.html#a46a27392b47477bba374322c77ce3b36", null ],
[ "address", "interface_discovered_printer.html#a0b7e10e502acb1562db18f9b8018f84b", null ]
]; |
async function simulateAssertion(el) {
if (el === null) {
return false;
}
await GapToggle.do(el as Element, defaultHelpers);
el.click();
const target = document.getElementById("target");
return target.getAttribute("hidden") === "true";
} |
<reponame>davidcostadev/ufrn-itp
// https://www.urionlinejudge.com.br/judge/en/problems/view/1016
#include <stdio.h>
int main()
{
int km;
scanf("%d", &km);
int minutes_distance = km * 2;
printf("%d minutos\n", minutes_distance);
return 0;
}
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
rail-rna align elastic -m $DIR/tcga_batch_19.manifest --profile dbgap --secure-stack-name dbgap-us-east-1e --core-instance-type c3.8xlarge --master-instance-type c3.8xlarge --junction-criteria .01,-1 -c 90 --core-instance-bid-price 0.8 --master-instance-bid-price 0.8 -i s3://sb-rail-rna-mapreduce/tcga_prep_batch_19 -o s3://sb-rail-rna-mapreduce/tcga_align_batch_19 -a hg38 -f -d jx,tsv,bed,bw,idx --max-task-attempts 6 --name TCGA_align_batch_19_job_flow
|
import { ApiProperty } from '@nestjs/swagger';
import { BaseResponsePagination } from 'src/shared/response/baseResponse.dto';
import { ProductDetails } from './productDetails.dto';
export class ProductsBase extends BaseResponsePagination {
@ApiProperty({
type: ProductDetails,
isArray: true,
})
data: Array<ProductDetails>;
}
|
package dankook.kanghyeyoung.capstone_2;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import androidx.recyclerview.widget.RecyclerView;
import java.util.ArrayList;
import static dankook.kanghyeyoung.capstone_2._FORMAT.DECIMAL_FORMAT;
public class CalendarAdapter extends BaseAdapter {
ArrayList<DayInfo> mDayInfos;
/* 리스너 객체 참조를 저장하는 변수 */
private OnItemClickListener mItemClickListener=null;
/* onItemClickListener 객체 참조를 어댑터에 전달하는 메서드 */
public void setOnItemClickListener(OnItemClickListener listener) {
mItemClickListener=listener;
}
public CalendarAdapter(ArrayList<DayInfo> dayInfos) {
mDayInfos = dayInfos;
}
@Override
public int getCount() {
return mDayInfos.size();
}
@Override
public Object getItem(int i) {
return mDayInfos.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
final int position=i;
DayInfo dayInfo = mDayInfos.get(position);
/* 해당 그리드가 비었으면 view_item_cal 인플레이트 */
if (view == null) {
LayoutInflater inflater = LayoutInflater.from(viewGroup.getContext());
view = inflater.inflate(R.layout.view_item_cal, viewGroup, false);
}
/* textView에 날짜와 수입/지출 설정 */
TextView textView_day = view.findViewById(R.id.textView_day);
TextView textView_income = view.findViewById(R.id.textView_income);
TextView textView_expense = view.findViewById(R.id.textView_expense);
if (dayInfo == null) {
textView_day.setText(" ");
textView_expense.setText(" ");
textView_income.setText(" ");
view.setClickable(false);
} else {
textView_day.setText(Integer.toString(dayInfo.getDay()));
textView_expense.setText(DECIMAL_FORMAT.format(dayInfo.getExpense()));
textView_income.setText(DECIMAL_FORMAT.format(dayInfo.getIncome()));
/* click listener 설정 */
view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(mItemClickListener!=null) {
mItemClickListener.onItemClick(v, position);
}
}
});
}
return view;
}
}
|
<gh_stars>0
module.exports = (wd, api_config) =>
{
let output = '<style>*{font-family: sans-serif;} table,td,tr{border:1px solid #000;border-collapse:collapse;padding:8px;margin:-1px;vertical-align:top}th{background-color:#ec6907;color:#fff;text-transform:uppercase;padding:10px;text-align:left}td>table{width:100%}table td:first-child{font-weight:700}table table{background-color:#fde0c9}table table table{background-color:#e0edf5}table table table table{background-color:#e5dbeb}table table table table table{background-color:#d6e6e1}table,td,tr{border:1px solid #000;border-collapse:collapse;padding:8px;margin:-1px;vertical-align:top}th{background-color:#ec6907;color:#fff;text-transform:uppercase;padding:10px;text-align:left}td>table{width:100%}table td:first-child{font-weight:700}table table{background-color:#fde0c9}table table table{background-color:#e0edf5}table table table table{background-color:#e5dbeb}table table table table table{background-color:#d6e6e1}</style>'
output += '<table>'
output += `<thead>`
output += `<tr>`
output += `<th>`
output += `Endpoint`
output += `</th>`
output += `<th>`
output += `Input`
output += `</th>`
output += `<th>`
output += `Output`
output += `</th>`
output += `</tr>`
output += `</thead>`
output += `<tbody>`
for(const endpoint in api_config)
{
const url = api_config[endpoint].url
if('endpoint' in api_config[endpoint])
{
delete api_config[endpoint].endpoint
if('input' in api_config[endpoint])
{
if(api_config[endpoint].input)
{
api_config[endpoint].input = require(wd + '/src/domain/schema/' + api_config[endpoint].input)
for(const key in api_config[endpoint].input)
{
if(api_config[endpoint].input[key].type === 'schema')
{
delete api_config[endpoint].input[key].type
api_config[endpoint].input[key].schema = require(wd + '/src/domain/schema/' + api_config[endpoint].input[key].schema)
}
}
}
else
{
delete api_config[endpoint].input
}
}
if('output' in api_config[endpoint])
{
if(api_config[endpoint].output)
{
api_config[endpoint].output = require(wd + '/src/domain/schema/' + api_config[endpoint].output)
for(const key in api_config[endpoint].output)
{
if(api_config[endpoint].output[key].type === 'schema')
{
delete api_config[endpoint].output[key].type
api_config[endpoint].output[key].schema = require(wd + '/src/domain/schema/' + api_config[endpoint].output[key].schema)
}
}
}
else
{
delete api_config[endpoint].output
}
}
output += `<tr>`
// ...
output += `<td>`
output += api_config[endpoint].url
output += `</td>`
// ...
output += `<td class="input">`
if('input' in api_config[endpoint])
{
output += `<table>`
for(const input_key in api_config[endpoint].input)
{
output += `<tr>`
output += `<td>`
output += `${input_key}`
output += `</td>`
output += `<td>`
output += level(api_config[endpoint].input[input_key])
output += `</td>`
output += `</tr>`
}
output += `</table>`
}
output += `</td>`
// ...
output += `<td class="output">`
if('output' in api_config[endpoint])
{
output += `<table>`
for(const output_key in api_config[endpoint].output)
{
output += `<tr>`
output += `<td>`
output += `${output_key}`
output += `</td>`
output += `<td>`
output += level(api_config[endpoint].output[output_key])
output += `</td>`
output += `</tr>`
}
output += `</table>`
}
output += `</td>`
output += `</tr>`
}
else
{
delete api_config[endpoint]
}
}
output += `</tbody>`
output += `</table>`
return output
}
function level(input)
{
output = `<table>`
for(const attribute_key in input)
{
output += `<tr>`
output += `<td>`
output += `${attribute_key}`
output += `</td>`
if(attribute_key === 'schema')
{
output += `<td>`
output += `<table>`
for(const nested_attribute_key in input[attribute_key])
{
output += `<tr>`
output += `<td>`
output += `${nested_attribute_key}`
output += `</td>`
output += `<td>`
output += level(input[attribute_key][nested_attribute_key])
output += `</td>`
output += `</tr>`
}
output += `</table>`
output += `</td>`
}
else
{
output += `<td>`
output += `${input[attribute_key]}`
output += `</td>`
}
output += `</tr>`
}
output += `</table>`
return output
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-3393-1
#
# Security announcement date: 2015-11-04 00:00:00 UTC
# Script generation date: 2017-01-01 21:07:36 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: armv7l
#
# Vulnerable packages fix on version:
# - iceweasel:38.4.0esr-1~deb7u1
#
# Last versions recommanded by security team:
# - iceweasel:38.3.0esr-1~deb7u1
#
# CVE List:
# - CVE-2015-4513
# - CVE-2015-7181
# - CVE-2015-7182
# - CVE-2015-7183
# - CVE-2015-7188
# - CVE-2015-7189
# - CVE-2015-7193
# - CVE-2015-7194
# - CVE-2015-7196
# - CVE-2015-7197
# - CVE-2015-7198
# - CVE-2015-7199
# - CVE-2015-7200
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade iceweasel=38.3.0esr-1~deb7u1 -y
|
import io
import numpy as np
from PIL import Image
from PyQt5 import QtGui, QtWidgets, QtCore, uic
class Painter(QtWidgets.QWidget):
def __init__(self, color_picker):
super().__init__()
self.color_picker = color_picker
self.chosen_point = []
self._hint = QtGui.QPixmap(500, 500)
self._hint.fill(QtCore.Qt.white)
dummy = np.require(
np.zeros(shape=[500, 500, 3],
dtype=np.uint8), np.uint8, 'C')
self.line_np = dummy
dummy = QtGui.QImage(dummy, 500, 500, QtGui.QImage.Format_RGB888)
self._line = QtGui.QPixmap(dummy)
self.setMinimumSize(520, 520)
self.last_x, self.last_y = None, None
self.pen = QtGui.QPen()
self.pen.setWidth(4)
self.pen.setColor(QtCore.Qt.red)
def QImageToCvMat(self, incomingImage):
''' Converts a QImage into an opencv MAT format '''
incomingImage = incomingImage.convertToFormat(
QtGui.QImage.Format.Format_RGBA8888)
width = incomingImage.width()
height = incomingImage.height()
ptr = incomingImage.constBits()
ptr.setsize(height * width * 4)
arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
return arr
def QImageToImage(self, image: QtGui.QImage):
buf = QtCore.QBuffer()
image.save(buf, 'png')
return Image.open(io.BytesIO(buf.data()))
def get_image(self):
size = self._line.size()
hint_map = QtGui.QPixmap(size)
hint_map.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(hint_map)
for pos in self.chosen_point:
self.pen.setColor(pos['color'])
self.pen.setWidth(pos['width'])
painter.setPen(self.pen)
painter.drawPoint(pos['pos'])
painter.end()
hint = self.QImageToImage(hint_map.toImage())
hint = np.array(hint)
return self.line_np, hint
@staticmethod
def create_pixmap(image: np.ndarray):
image = QtGui.QImage(image, image.shape[1],
image.shape[0], image.shape[1] * 3,
QtGui.QImage.Format_RGB888)
return QtGui.QPixmap(image)
def set_line(self, image: Image.Image, parent) -> None:
image = image.convert('RGB')
w, h = image.size
image = np.array(image)
self.line_np = image
self._line = self.create_pixmap(image)
self.setFixedHeight(h)
self.setFixedWidth(w)
parent.resize(parent.minimumSize())
def paintEvent(self, a0: QtGui.QPaintEvent) -> None:
painter = QtGui.QPainter(self)
painter.drawPixmap(self.rect(), self._line)
color = self.pen.color()
size = self.pen.width()
painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
for pos in self.chosen_point:
self.pen.setColor(pos['color'])
self.pen.setWidth(pos['width'])
painter.setPen(self.pen)
painter.drawPoint(pos['pos'])
self.pen.setColor(color)
self.pen.setWidth(size)
def mouseReleaseEvent(self, e: QtGui.QMouseEvent) -> None:
if e.button() == QtCore.Qt.LeftButton:
data = {
'pos': e.pos(),
'color': self.pen.color(),
'width': self.pen.width()
}
self.chosen_point.append(data)
self.update()
def mousePressEvent(self, e):
if e.button() == QtCore.Qt.RightButton:
self.remove()
self.update()
def remove(self):
if len(self.chosen_point) > 0:
self.chosen_point.pop()
self.update()
|
browser.runtime.onMessage.addListener(request => {
if (request && request.action === "performCheck") {
performCheck();
}
});
function performCheck() {
if (hasInappropriateMessage()) {
browser.runtime.sendMessage({ action: "proceed" });
}
}
function hasInappropriateMessage() {
const proceedButtonText = "I understand and wish to proceed";
const errorRootNode = document.getElementById("error-screen");
return findNodeWithTextRecursively(errorRootNode, proceedButtonText);
}
function findNodeWithTextRecursively(parentNode, text) {
if (!parentNode) {
return false;
}
if (parentNode.text === text) {
return parentNode;
}
for (let i = 0; i < parentNode.children.length; i++) {
const child = parentNode.children[i];
if (findNodeWithTextRecursively(child, text)) {
return child;
}
}
}
const observer = new MutationObserver(mutations => {
for (let i = 0; i < mutations.length; i++) {
const mut = mutations[i];
if (mut.type === "attributes" && mut.attributeName === "hidden") {
performCheck();
}
}
});
setTimeout(function () {
const targetNodes = document.getElementsByTagName("yt-page-navigation-progress");
for (let i = 0; i < targetNodes.length; i++) {
observer.observe(targetNodes[i], {
attributes: true
});
}
}, 3000);
performCheck();
|
require 'spec_helper'
require 'active_support/core_ext/date'
require "active_support/core_ext/numeric/time"
module WithEthics
describe Duration do
describe "general" do
it "should initialize current time" do
d = Duration.new
expect(d.current).to be_kind_of DateTime
end
end
describe "one date" do
before do
@d = Duration.new
end
it "should know days ago" do
t = 2.days.ago
expect(@d.since(t)).to eq("2 day(s) ago")
end
it "should know 2 hours ago" do
t = 2.hours.ago
expect(@d.since(t)).to eq("2 hours ago")
end
it "should know ten minutes ago" do
t = 10.minutes.ago
expect(@d.since(t)).to eq("10 minutes ago")
end
it "should know mixed increments ago" do
t = 2.days.ago - 3.hours - 5.minutes
expect(@d.since(t)).to eq("2 day(s) 3 hours 5 minutes ago")
end
it "should know when time is in the future" do
t = DateTime.now + 20.minutes
expect(@d.since(t)).to eq("ERROR. Time given is in the future!")
end
it "should know when time is current" do
t = DateTime.now
expect(@d.since(t)).to eq("now")
end
end
end
end |
#!/usr/bin/env bash
rm bin/ga-svm
ghc -o bin/ga-svm src/ga-svm.hs
#ghc --make -Wall src/ga-svm.hs
bin/ga-svm data/bcw-gasvm.model
|
sudo apt-get update
# to uninstall deb version
sudo apt-get -y --purge remove libboost-all-dev libboost-doc libboost-dev
sudo apt autoremove -y
# to uninstall the version which we installed from source
sudo rm -f /usr/lib/libboost_*
# install other dependencies if they are not met
sudo apt-get -y install build-essential python-dev autotools-dev libicu-dev libbz2-dev
# go to home folder
cd
wget http://downloads.sourceforge.net/project/boost/boost/1.70.0/boost_1_70_0.tar.gz
tar -xvf boost_1_70_0.tar.gz
cd boost_1_70_0
# get the no of cpucores to make faster
# cpuCores=`cat /proc/cpuinfo | grep "cpu cores" | uniq | awk '{print $NF}'`
# echo "Available CPU cores: "$cpuCores
./bootstrap.sh # this will generate ./b2
sudo ./b2 --with=all install
sudo ldconfig
# let's check the installed version
cat /usr/local/include/boost/version.hpp | grep "BOOST_LIB_VERSION"
# clean up
cd ..
sudo rm boost_1_70_0.tar.gz
sudo rm -rf boost_1_70_0
|
#!/bin/bash
set -e
. ../scripts/set_environment.sh
rm -rf msdk
sudo apt-get install curl -y
curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo -E bash
sudo apt-get update -y
sudo apt-get install git-lfs -y
git lfs install
GIT_LFS_SKIP_SMUDGE=1 git clone -b intel-mediasdk-18.3 --single-branch https://github.com/Intel-Media-SDK/MediaSDK msdk
cd msdk
mkdir build
cd build
cmake -DCMAKE_INSTALL_PREFIX:PATH=/opt/intel/ ..
make -j$N_JOBS
sudo make install
sudo cp -rf /opt/intel/include/mfx/* /opt/intel/include/
|
import Source from 'Source/Source';
import Cache from 'Core/Scheduler/Cache';
import CRS from 'Core/Geographic/Crs';
/**
* @classdesc
* An object defining the source of a single resource to get from a direct
* access. It inherits from {@link Source}. There is multiple ways of adding a
* resource here:
* <ul>
* <li>add the file like any other sources, using the `url` property.</li>
* <li>fetch the file, and give the data to the source using the `fetchedData`
* property.</li>
* <li>fetch the file, parse it and git the parsed data to the source using the
* `features` property.</li>
* </ul>
* See the examples below for real use cases.
*
* @extends Source
*
* @property {boolean} isFileSource - Used to checkout whether this source is a
* FileSource. Default is true. You should not change this, as it is used
* internally for optimisation.
* @property {*} fetchedData - Once the file has been loaded, the resulting data
* is stored in this property.
* @property {*} features - Once the file has been loaded and parsed, the
* resulting data is stored in this property.
*
* @example <caption>Simple: create a source, a layer, and let iTowns taking
* care of everything.</caption>
* const kmlSource = new itowns.FileSource({
* url: 'https://raw.githubusercontent.com/iTowns/iTowns2-sample-data/master/croquis.kml',
* crs: 'EPSG:4326',
* fetcher: itowns.Fetcher.xml,
* parser: itowns.KMLParser.parse,
* });
*
* const kmlLayer = new itowns.ColorLayer('Kml', {
* name: 'kml',
* transparent: true,
* crs: view.tileLayer.extent.crs,
* source: kmlSource,
* });
*
* view.addLayer(kmlLayer);
*
* @example <caption>Advanced: fetch some data, create a source, a layer, and
* let iTowns do the parsing and converting.</caption>
* // Parse and Convert by iTowns
* itowns.Fetcher.xml('https://raw.githubusercontent.com/iTowns/iTowns2-sample-data/master/ULTRA2009.gpx')
* .then(function _(gpx) {
* const gpxSource = new itowns.FileSource({
* data: gpx,
* crs: 'EPSG:4326',
* parser: itowns.GpxParser.parse,
* });
*
* const gpxLayer = new itowns.ColorLayer('Gpx', {
* name: 'Ultra 2009',
* transparent: true,
* source: gpxSource,
* });
*
* return view.addLayer(gpxLayer);
* });
*
* @example <caption>More advanced: create a layer, fetch some data, parse the
* data, append a source to the layer and add the layer to iTowns.</caption>
* // Create a layer
* const ariege = new itowns.GeometryLayer('ariege', new itowns.THREE.Group());
*
* // Specify update method and conversion
* ariege.update = itowns.FeatureProcessing.update;
* ariege.convert = itowns.Feature2Mesh.convert({
* color: () => new itowns.THREE.Color(0xffcc00),
* extrude: () => 5000,
* });
*
* itowns.Fetcher.json('https://raw.githubusercontent.com/gregoiredavid/france-geojson/master/departements/09-ariege/departement-09-ariege.geojson')
* .then(function _(geojson) {
* return itowns.GeoJsonParser.parse(geojson, {
* in: { crs: 'EPSG:4326' },
* out: { crs: view.tileLayer.extent.crs,
* style: new itowns.Style({
* fill: {
* color: new itowns.THREE.Color(0xffcc00),
* extrusion_height: () => 5000,
* }),
* },
* },
* });
* }).then(function _(features) {
* ariege.source = new itowns.FileSource({
* crs: 'EPSG:4326',
* features,
* });
*
* return view.addLayer(ariegeLayer);
* });
*/
class FileSource extends Source {
/**
* @param {Object} source - An object that can contain all properties of a
* FileSource and {@link Source}. Only `crs` is mandatory, but if it
* presents in `features` under the property `crs`, it is fine.
*
* @constructor
*/
constructor(source) {
/* istanbul ignore next */
if (source.parsedData) {
console.warn('FileSource parsedData parameter is deprecated, use features instead of.');
source.features = source.features || source.parsedData;
}
/* istanbul ignore next */
if (source.projection) {
console.warn('FileSource projection parameter is deprecated, use crs instead.');
source.crs = source.crs || source.projection;
}
if (!source.crs) {
if (source.features && source.features.crs) {
source.crs = source.features.crs;
} else {
throw new Error('source.crs is required in FileSource');
}
}
if (!source.url && !source.fetchedData && !source.features) {
throw new Error(`url, fetchedData and features are not set in
FileSource; at least one needs to be present`);
}
// the fake url is for when we use the fetchedData or features mode
source.url = source.url || 'fake-file-url';
super(source);
this.isFileSource = true;
this.fetchedData = source.fetchedData;
if (!this.fetchedData && !source.features) {
this.whenReady = this.fetcher(this.urlFromExtent(), this.networkOptions).then((f) => {
this.fetchedData = f;
});
} else if (source.features) {
this._featuresCaches[source.features.crs] = new Cache();
this._featuresCaches[source.features.crs].setByArray(Promise.resolve(source.features), [0]);
}
this.whenReady.then(() => this.fetchedData);
this.zoom = { min: 0, max: Infinity };
}
urlFromExtent() {
return this.url;
}
onLayerAdded(options) {
options.in = this;
super.onLayerAdded(options);
let features = this._featuresCaches[options.out.crs].getByArray([0]);
if (!features) {
options.out.buildExtent = this.crs != 'EPSG:4978';
if (options.out.buildExtent) {
options.out.forcedExtentCrs = options.out.crs != 'EPSG:4978' ? options.out.crs : CRS.formatToEPSG(this.crs);
}
features = this.parser(this.fetchedData, options);
this._featuresCaches[options.out.crs].setByArray(features, [0]);
}
features.then((data) => {
if (data.extent) {
this.extent = data.extent.clone();
// Transform local extent to data.crs projection.
if (this.extent.crs == data.crs) {
this.extent.applyMatrix4(data.matrixWorld);
}
}
if (data.isFeatureCollection) {
data.setParentStyle(options.out.style);
}
});
}
/**
* load data from cache or Fetch/Parse data.
* The loaded data is a Feature or Texture.
*
* @param {Extent} extent extent requested parsed data.
* @param {FeatureBuildingOptions|Layer} out The feature returned options
* @return {FeatureCollection|Texture} The parsed data.
*/
loadData(extent, out) {
return this._featuresCaches[out.crs].getByArray([0]);
}
extentInsideLimit(extent) {
return this.extent.intersectsExtent(extent);
}
}
export default FileSource;
|
import React, { useState, useCallback, useEffect } from 'react'
import { useApi } from '../../utils/api'
import Select from 'react-select'
import './EntitySelect.css'
const EntitySelect = ({parameter}) => {
const { get } = useApi();
const [options, setOptions] = useState([]);
// create a callback function that wraps the loadData effect
const loadData = useCallback(() => {
async function call() {
const [response, error] = await get(`entities/${parameter.entity}`);
if (error || !response.ok) {
setOptions([]);
return;
}
const item = await response.json();
if (item && !item.error) {
const items = item.data;
const optionsData = items && items.map(e => { return { label: e.__id, value: e.__id } });
setOptions(optionsData);
}
}
call();
}, [get, parameter.entity]);
// load entity data automatically on first page render
useEffect(() => {
loadData();
}, [loadData]);
const defaultOption = parameter.value ? [ { label: parameter.value, value: parameter.value} ] : options[0];
return (
<Select
className='paramSelect' options={options} defaultValue={defaultOption} onChange={ (e) => { parameter.value = e.value }}
/>
)
}
export default EntitySelect |
# -----------------------------------------------------------------------------
#
# Package : enabled
# Version : 2.0.0
# Source repo : https://github.com/3rd-Eden/enabled
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=enabled
PACKAGE_VERSION=2.0.0
PACKAGE_URL=https://github.com/3rd-Eden/enabled
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
#!/usr/bin/env bash
# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
# Description
# This sample script creates a Kubernetes secret for WebLogic domain admin credentials.
#
# The following pre-requisites must be handled prior to running this script:
# * The kubernetes namespace must already be created
#
# Secret name determination
# 1) secretName - if specified
# 2) soainfra-weblogic-credentials - if secretName and domainUID are both not specified. This is the default out-of-the-box.
# 3) <domainUID>-weblogic-credentials - if secretName is not specified, and domainUID is specified.
# 4) weblogic-credentials - if secretName is not specified, and domainUID is specified as "".
#
# The generated secret will be labeled with
# weblogic.domainUID=$domainUID
# and
# weblogic.domainName=$domainUID
# Where the $domainUID is the value of the -d command line option, unless the value supplied is an empty String ""
#
script="${BASH_SOURCE[0]}"
#
# Function to exit and print an error message
# $1 - text of message
function fail {
echo [ERROR] $*
exit 1
}
# Try to execute kubectl to see whether kubectl is available
function validateKubectlAvailable {
if ! [ -x "$(command -v kubectl)" ]; then
fail "kubectl is not installed"
fi
}
function usage {
echo usage: ${script} -u username -p password [-d domainUID] [-n namespace] [-s secretName] [-h]
echo " -u username, must be specified."
echo " -p password, must be specified."
echo " -d domainUID, optional. The default value is soainfra. If specified, the secret will be labeled with the domainUID unless the given value is an empty string."
echo " -n namespace, optional. Use the soans namespace if not specified"
echo " -s secretName, optional. If not specified, the secret name will be determined based on the domainUID value"
echo " -h Help"
exit $1
}
#
# Parse the command line options
#
domainUID=soainfra
namespace=soans
while getopts "hu:p:n:d:s:" opt; do
case $opt in
u) username="${OPTARG}"
;;
p) password="${OPTARG}"
;;
n) namespace="${OPTARG}"
;;
d) domainUID="${OPTARG}"
;;
s) secretName="${OPTARG}"
;;
h) usage 0
;;
*) usage 1
;;
esac
done
if [ -z $secretName ]; then
if [ -z $domainUID ]; then
secretName=weblogic-credentials
else
secretName=$domainUID-weblogic-credentials
fi
fi
if [ -z ${username} ]; then
echo "${script}: -u must be specified."
missingRequiredOption="true"
fi
if [ -z ${password} ]; then
echo "${script}: -p must be specified."
missingRequiredOption="true"
fi
if [ "${missingRequiredOption}" == "true" ]; then
usage 1
fi
# check and see if the secret already exists
result=`kubectl get secret ${secretName} -n ${namespace} --ignore-not-found=true | grep ${secretName} | wc | awk ' { print $1; }'`
if [ "${result:=Error}" != "0" ]; then
fail "The secret ${secretName} already exists in namespace ${namespace}."
fi
# create the secret
kubectl -n $namespace create secret generic $secretName \
--from-literal=username=$username \
--from-literal=password=$password
# label the secret with domainUID if needed
if [ ! -z $domainUID ]; then
kubectl label secret ${secretName} -n $namespace weblogic.domainUID=$domainUID weblogic.domainName=$domainUID
fi
# Verify the secret exists
SECRET=`kubectl get secret ${secretName} -n ${namespace} | grep ${secretName} | wc | awk ' { print $1; }'`
if [ "${SECRET}" != "1" ]; then
fail "The secret ${secretName} was not found in namespace ${namespace}"
fi
echo "The secret ${secretName} has been successfully created in the ${namespace} namespace."
|
<gh_stars>10-100
//
// Created by ooooo on 2020/4/25.
//
#ifndef CPP_059_2__SOLUTION1_H_
#define CPP_059_2__SOLUTION1_H_
#include <iostream>
#include <deque>
#include <unordered_map>
using namespace std;
class MaxQueue {
public:
deque<int> q, help;
MaxQueue() {
q.clear();
help.clear();
}
int max_value() {
if (q.empty()) return -1;
return help.front();
}
void push_back(int value) {
q.push_back(value);
while (!help.empty() && help.back() < value) help.pop_back();
help.push_back(value);
}
int pop_front() {
if (q.empty()) return -1;
int ret = q.front();
q.pop_front();
if (help.front() == ret) help.pop_front();
return ret;
}
};
#endif //CPP_059_2__SOLUTION1_H_
|
SELECT * FROM students WHERE age >= 18 AND gender = 'F' |
from typing import List
from datetime import datetime
class Loan:
def __init__(self, loanid, item, user, started, endtime):
self.loanid = loanid
self.item = item
self.user = user
self.started = datetime.strptime(started, '%Y-%m-%d %H:%M:%S')
self.endtime = datetime.strptime(endtime, '%Y-%m-%d %H:%M:%S')
def count_overdue_loans(loans: List[Loan]) -> int:
current_time = datetime.now()
overdue_count = sum(1 for loan in loans if loan.endtime < current_time)
return overdue_count
# Test the function with sample data
loans = [
Loan(loanid=1, item=1, user='user1', started='2022-10-01 08:00:00', endtime='2022-10-10 08:00:00'),
Loan(loanid=2, item=2, user='user2', started='2022-10-05 10:00:00', endtime='2022-10-15 10:00:00'),
Loan(loanid=3, item=3, user='user3', started='2022-10-08 12:00:00', endtime='2022-10-12 12:00:00'),
Loan(loanid=4, item=4, user='user4', started='2022-10-02 14:00:00', endtime='2022-10-04 14:00:00')
]
print(count_overdue_loans(loans)) # Output: 2 |
<reponame>vharsh/cattle2
package io.cattle.platform.api.service;
import io.cattle.platform.api.credential.ApiKeyCertificateDownloadLinkHandler;
import io.cattle.platform.certificate.CertificateService;
import io.cattle.platform.core.model.Service;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.ActionHandler;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import org.apache.commons.codec.binary.Base64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ServiceCertificateActionHandler implements ActionHandler {
private static final Logger log = LoggerFactory.getLogger(ServiceCertificateActionHandler.class);
CertificateService certService;
public ServiceCertificateActionHandler(CertificateService certService) {
super();
this.certService = certService;
}
@Override
public Object perform(Object obj, ApiRequest request) {
if (!(obj instanceof Service)) {
return null;
}
Service service = (Service)obj;
String serviceName = service.getName();
try {
String certs = certService.getServiceCertificate(service);
if (certs == null) {
return null;
}
ApiKeyCertificateDownloadLinkHandler.prepareRequest(serviceName + "-certs.zip", request);
request.getOutputStream().write(Base64.decodeBase64(certs));
} catch (Exception e) {
log.error("Failed to generate certificate for service [{}]", service.getId(), e);
throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, "CertGenerationFailed");
}
return new Object();
}
} |
import React, { useState, useEffect } from "react";
import { NextSeo } from "next-seo";
import {
useColorMode,
Heading,
Text,
Flex,
Stack,
Input,
InputGroup,
InputRightElement,
Icon,
} from "@chakra-ui/core";
import ProjectCard from "../components/ProjectCard";
import Container from "../components/Container";
const url = "https://joshjacobsonmusic.com/blog";
const title = "<NAME> | Original Music";
const description = "Original music by <NAME>";
const Music = () => {
const { colorMode } = useColorMode();
const secondaryTextColor = {
light: "gray.700",
dark: "gray.400",
};
return (
<>
<NextSeo
title={title}
description={description}
canonical={url}
openGraph={{
url,
title,
description,
}}
/>
<Container>
<Stack
as="main"
spacing={8}
justifyContent="center"
alignItems="flex-start"
m="0 auto 4rem auto"
maxWidth="700px"
>
<Flex
flexDirection="column"
justifyContent="flex-start"
alignItems="flex-start"
maxWidth="700px"
>
<Heading letterSpacing="tight" mb={2} as="h1" size="2xl">
Original Music
</Heading>
</Flex>
<Flex
flexDirection="column"
justifyContent="flex-start"
alignItems="flex-start"
maxWidth="700px"
mt={8}
>
<Text color={secondaryTextColor[colorMode]} mb={4}>
As a solo artist, Josh has performed all over the world including
a headline US tour and music festival performances including
Brooklyn Electronic Music Festival and Splash House. Josh has also
opened for major dance acts including Petit Biscuit and Louis the
Child as direct support.
</Text>
<Heading size="md" as="h3" mb={2} fontWeight="medium">
Press / Radio
</Heading>
<Text color={secondaryTextColor[colorMode]} mb={4}>
Featured on tastemaker channels and press outlets including
Huffington Post, KCRW, Rinse FM London, Nest HQ, Indie Shuffle, Nylon, and Earmilk.
</Text>
<Heading size="md" as="h3" mb={2} fontWeight="medium">
In-store play
</Heading>
<Text color={secondaryTextColor[colorMode]} mb={4}>
Starbucks Reserve, MAC, Abercrombie & Fitch, Hollister, Joe &
the Juice and other stores
</Text>
<Heading size="md" as="h3" mb={2} fontWeight="medium">
Playlist features
</Heading>
<Text color={secondaryTextColor[colorMode]} mb={4}>
Chill Vibes, Young & Free, Chill Tracks, Classical New Releases,
New Music Friday UK and many others. Also featured in playlists by
independent curators including ChilledCow, Nike, Chillhop and Pickup Music.
</Text>
</Flex>
<Flex
flexDirection="column"
justifyContent="flex-start"
alignItems="flex-start"
maxWidth="700px"
mt={8}
>
<Heading letterSpacing="tight" mb={4} size="xl" fontWeight={700}>
Instrumental side projects:
</Heading>
<ProjectCard
title="Treman"
description="Modern classical piano"
href="https://open.spotify.com/artist/7MnTSJ0nl9IdMRSq3vLdGP?si=qdHsyw8qRRu1OSZ_NQhlHQ"
image="/juniper_200.jpg"
/>
<ProjectCard
title="azula"
description="Lofi hip hop & instrumental collaborations"
href="https://open.spotify.com/artist/1ExCrobB2mqThavegHEHeS?si=z3XhuKqMRvSCYO2EomMe8A"
image="/beginnings_200.jpeg"
/>
</Flex>
</Stack>
</Container>
</>
);
};
export default Music;
|
<filename>node_modules/pino/test/serializers.test.js
'use strict'
var test = require('tap').test
var pino = require('../')
var sink = require('./helper').sink
var parentSerializers = {
test: function () { return 'parent' }
}
var childSerializers = {
test: function () { return 'child' }
}
test('serializers override values', function (t) {
t.plan(1)
var parent = pino({ serializers: parentSerializers }, sink(function (o, enc, cb) {
t.is(o.test, 'parent')
cb()
}))
parent.child({ serializers: childSerializers })
parent.fatal({test: 'test'})
})
test('child does not overwrite parent serializers', function (t) {
t.plan(2)
var c = 0
var parent = pino({ serializers: parentSerializers }, sink(function (o, enc, cb) {
c++
if (c === 1) t.is(o.test, 'parent')
if (c === 2) t.is(o.test, 'child')
cb()
}))
var child = parent.child({ serializers: childSerializers })
parent.fatal({test: 'test'})
child.fatal({test: 'test'})
})
test('children inherit parent serializers', function (t) {
t.plan(1)
var parent = pino({ serializers: parentSerializers }, sink(function (o, enc, cb) {
t.is(o.test, 'parent')
}))
var child = parent.child({a: 'property'})
child.fatal({test: 'test'})
})
test('children serializers get called', function (t) {
t.plan(1)
var parent = pino({
test: 'this'
}, sink(function (o, enc, cb) {
t.is(o.test, 'child')
cb()
}))
var child = parent.child({ 'a': 'property', serializers: childSerializers })
child.fatal({test: 'test'})
})
test('children serializers get called when inherited from parent', function (t) {
t.plan(1)
var parent = pino({
test: 'this',
serializers: parentSerializers
}, sink(function (o, enc, cb) {
t.is(o.test, 'pass')
cb()
}))
var child = parent.child({serializers: {test: function () { return 'pass' }}})
child.fatal({test: 'fail'})
})
test('non overriden serializers are available in the children', function (t) {
t.plan(4)
var pSerializers = {
onlyParent: function () { return 'parent' },
shared: function () { return 'parent' }
}
var cSerializers = {
shared: function () { return 'child' },
onlyChild: function () { return 'child' }
}
var c = 0
var parent = pino({ serializers: pSerializers }, sink(function (o, enc, cb) {
c++
if (c === 1) t.is(o.shared, 'child')
if (c === 2) t.is(o.onlyParent, 'parent')
if (c === 3) t.is(o.onlyChild, 'child')
if (c === 4) t.is(o.onlyChild, 'test')
cb()
}))
var child = parent.child({ serializers: cSerializers })
child.fatal({shared: 'test'})
child.fatal({onlyParent: 'test'})
child.fatal({onlyChild: 'test'})
parent.fatal({onlyChild: 'test'})
})
test('Symbol.for(\'pino.*\') serializer', function (t) {
t.plan(6)
var globalSerializer = {
[Symbol.for('pino.*')]: function (obj) {
if (obj.lionel === 'richie') {
return {hello: 'is', it: 'me', you: 'are', looking: 'for'}
}
return {lionel: 'richie'}
}
}
var c = 0
var logger = pino({serializers: globalSerializer}, sink(function (o, enc, cb) {
c++
if (c === 1) { t.match(o, {lionel: 'richie'}); t.notMatch(o, ['hello', 'it', 'you', 'looking']) }
if (c === 2) { t.match(o, {hello: 'is', it: 'me', you: 'are', looking: 'for'}); t.notMatch(o, ['lionel']) }
if (c === 3) { t.match(o, {lionel: 'richie'}); t.notMatch(o, ['pid', 'hostname']) }
cb()
}))
logger.info({hello: 'is', it: 'me', you: 'are', looking: 'for'})
logger.info({lionel: 'richie'})
logger.info('message')
})
|
#!/bin/bash
installer "kotlin" "brew install kotlin"
|
<filename>src/pages/paths.ts
export const path = {
home: () => '/',
login: () => '/login',
register: () => '/register',
registerConfirm: (code: string) => `/register/confirm-${code}`,
accessRecovery: () => '/access-recovery',
accessRecoveryConfirm: (code: string) => `/access-recovery/confirm-${code}`,
oauthAuthorize: () => '/oauth/authorize',
};
|
package core.checker.checker;
import core.checker.vo.Result;
import core.checker.vo.TestInfo;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import util.Store;
import java.io.File;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.*;
import static org.junit.jupiter.api.Assertions.assertTrue;
class CheckerUtilTest {
CheckerUtil checkerUtil = new CheckerUtil();
public List<Operation> history;
Map<String, Object> noopTest = new HashMap<>(Map.of(
"nodes", new ArrayList<>(List.of("n1", "n2", "n3", "n4", "n5")),
"name", "noop",
"checker", new UnbridledOptimism()
));
CheckerUtilTest() {
Random random = new Random();
List<Operation> history = new ArrayList<>();
for (int i = 0; i < 10000; i++) {
double latency = 1e9 / (random.nextInt(1000) + 1);
history.addAll(perfGen(latency));
}
this.history = history;
}
private List<Operation> perfGen(double latency) {
return perfGen(latency, null);
}
private List<Operation> perfGen(double latency, Object nemesis) {
Random random = new Random();
int i = random.nextInt(2);
List<Operation.F> fs = List.of(Operation.F.WRITE, core.checker.checker.Operation.F.READ);
Operation.F f = fs.get(i);
int proc = random.nextInt(100);
double time = 1e9 * random.nextInt(100);
i = random.nextInt(8);
List<Operation.Type> types = List.of(Operation.Type.OK, Operation.Type.OK, Operation.Type.OK, Operation.Type.OK, Operation.Type.OK,
Operation.Type.FAIL, Operation.Type.INFO, Operation.Type.INFO);
Operation.Type type = types.get(i);
Operation op1 = new Operation();
op1.setProcess(proc);
op1.setType(Operation.Type.INVOKE);
op1.setF(f);
op1.setTime(time);
Operation op2 = new Operation();
op2.setProcess(proc);
op2.setType(type);
op2.setF(f);
op2.setTime(time + latency);
return new ArrayList<>(List.of(op1, op2));
}
@Test
void mergeValid() {
List<Object> valids2 = List.of(true, true, false, "unknown");
Object res = checkerUtil.mergeValid(valids2);
Assertions.assertEquals(res, false);
}
@Test
void mergeValid2() {
List<Object> valids2 = List.of(true, true, "unknown");
Object res = checkerUtil.mergeValid(valids2);
Assertions.assertEquals(res, "unknown");
}
@Test
void mergeValid3() {
List<Object> valids1 = List.of(true, true);
List<Object> valids2 = List.of(true, true);
Object res = checkerUtil.mergeValid(valids2);
Assertions.assertEquals(res,true);
}
@Test
void frequencyDistribution() {
List<Double> points = List.of(0.3, 0d, 0.5, 0.65);
List<Double> c = List.of(3d, 9d, 2.4, 999d);
CheckerUtil checkerUtil = new CheckerUtil();
Object res = checkerUtil.frequencyDistribution(points, c);
Map<Double,Double> resExpected= Map.of(
0.0,2.4,
0.3,3.0,
0.5,9.0,
0.65,9.0
);
Assertions.assertEquals(res, resExpected);
}
@Test
void frequencyDistribution2() {
List<Double> points = List.of(0.3, 0d, 0.5, 0.65);
List<Double> c = List.of();
CheckerUtil checkerUtil = new CheckerUtil();
Map res = checkerUtil.frequencyDistribution(points, c);
assertTrue(res.isEmpty());
}
@Test
void perf() {
Checker checker = CheckerUtil.perf(new HashMap<>());
Result result = checker.check(new HashMap<>(Map.of(
"name", "perf graph",
"start-time", 0
)), history, new HashMap<>());
Map<?, ?> results = result.getResults();
Result latencyGraph = (Result) results.get("latency-graph");
Result rateGraph = (Result) results.get("rate-graph");
Assertions.assertEquals(result.getValid(), true);
Assertions.assertEquals(latencyGraph.getValid(), true);
Assertions.assertEquals(rateGraph.getValid(), true);
}
@Test
void logFilePatternTest() {
noopTest.put("name", "checker-log-file-patten");
noopTest.put("start-time", 0);
noopTest.put("nodes", new ArrayList<>(List.of("n1", "n2", "n3")));
TestInfo testInfo = new TestInfo((String) noopTest.get("name"), LocalDateTime.ofEpochSecond((int) noopTest.get("start-time"), 0, ZoneOffset.ofHours(8)) );
String[] args = new String[]{"n1", "db.log"};
File file1 = Store.makePathIfNotExists(testInfo, args);
args = new String[]{"n2", "db.log"};
File file2 = Store.makePathIfNotExists(testInfo, args);
LogFilePattern logFilePattern = new LogFilePattern("evil\\d+", "db.log");
logFilePattern.check(noopTest, null, null);
}
} |
#!/bin/bash
############################################
# SYNOPSIS
# - The "remove-aliasAll" function removes all existing aliases in the session.
#
###########
# NOTES
# - Name: remove-aliasAll.sh
# - Author: Travis Logue
# - Version History: 1.1 | Initial Version
# - Dependencies:
# - Notes:
# - This was helpful: https://www.tecmint.com/create-alias-in-linux/
#
###########
# EXAMPLE
<< '#comment'
#comment
############################################
function remove-aliasAll () {
unalias -a
}
|
#!/usr/bin/env bash
PY_PATH_STR="$(which python)"
# echo "${OUTPUT}"
# check if ``venv`` keyword is in string, if yes the virtualenv is activated already, else activate it
if [[ ${PY_PATH_STR} == *"/venv/"* ]]; then
echo "yes, venv activated already"
else
echo "no, activate venv now"
source ./venv/bin/activate
echo "venv activated"
fi
# source venv/bin/activate && sphinx-autobuild -p 9992 -H localhost . _build_html
echo "remove previous build dirs and rebuild" &&
rm -rf _build_html && rm -rf _build &&
sphinx-autobuild -p 9992 -H localhost . _build_html
|
package com.androidapp.tablayout.listener;
import android.support.annotation.DrawableRes;
public interface CustomTabEntity {
String getTabTitle();
@DrawableRes
int getTabSelectedIcon();
@DrawableRes
int getTabUnselectedIcon();
boolean selectAble();
int getIconSize();
String getTabWebSelectedIcon();
String getTabWebUnSelectedIcon();
} |
using System;
public class PrimeFinder
{
public static void Main()
{
int min = 10;
int max = 20;
for (int i = min; i <= max; i++)
{
if (IsPrime(i))
{
Console.WriteLine(i + " is a prime number");
}
}
}
public static bool IsPrime(int n)
{
if (n <= 1)
{
return false;
}
for (int i = 2; i < n; i++)
{
if (n % i == 0)
{
return false;
}
}
return true;
}
} |
<gh_stars>10-100
class SignUpsController < ApplicationController
def index
@network_events = NetworkEvent.where(scheduled_at: Date.today..1.week.from_now)
end
def new
@network_event = NetworkEvent.find(params[:network_event_id])
@participation = Participation.new(:level => params[:level])
@member = Member.new
@searched = false
@level = params[:level]
end
def create
@network_event = NetworkEvent.find(params[:network_event_id])
@member = Member.new
if params[:commit] == "Confirm attendance"
member_level = participation_params[:level]
@level = ""
@participation = Participation.new(member_id: participation_params[:member_id],
network_event_id: @network_event.id,
level: participation_params[:level],
participation_type: 'signed_up')
@participation.user = current_user
respond_to do |format|
if @participation.save
format.html { redirect_to action: 'new', level: member_level}
format.json { render @participation}
else
format.html { render :new}
format.json { render json: @participation.errors, status: :unprocessable_entity}
end
end
elsif params[:member_id].present?
member_level = params[:level]
@member = Member.find(params[:member_id])
respond_to do |format|
if @member.update(member_params)
participation = Participation.new(member_id: @member.id,
network_event_id: @network_event.id,
level: params[:level],
participation_type: 'signed_up')
participation.user = current_user
participation.save
flash[:sign_up_success] = 'Member was updated and signed up for event successfully'
format.html { redirect_to action: 'new', level: member_level}
format.json { render @member}
else
format.html { render :new}
format.json { render json: @member.errors, status: :unprocessable_entity}
end
end
else
member_level = params[:level]
@member = Member.new(member_params)
@member.user = current_user
respond_to do |format|
if @member.save
participation = Participation.new(member_id: @member.id,
network_event_id: @network_event.id,
level: params[:level],
participation_type: 'signed_up')
participation.user = current_user
participation.save
flash[:sign_up_success] = 'Member was created and signed up for event successfully'
format.html { redirect_to action: 'new', level: member_level}
format.json { render @member}
else
format.html { render :new}
format.json { render json: @member.errors, status: :unprocessable_entity}
end
end
end
end
def update
end
private
def member_params
params.permit(member: [:id, :first_name, :last_name, :date_of_birth, :phone, :email, :identity_id, :school_id, :graduating_class_id])[:member]
end
def participation_params
params.permit(participation: [:member_id, :level])[:participation]
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.