text stringlengths 1 1.05M |
|---|
/*
* Copyright 2012 Yichun "agentzh" Zhang
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*/
#include <re2/re2.h>
#include <re2/stringpiece.h>
#include <cassert>
#include <cstring>
#include <cstdio>
#include <cerrno>
#include <ctime>
#include <cstdlib>
static void usage(int rc);
static void run_engine(RE2 *re, char *input);
#define TIMER_START \
if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &begin) == -1) { \
perror("clock_gettime"); \
exit(2); \
}
#define TIMER_STOP \
if (clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &end) == -1) { \
perror("clock_gettime"); \
exit(2); \
} \
elapsed = (end.tv_sec - begin.tv_sec) * 1e3 + (end.tv_nsec - begin.tv_nsec) * 1e-6;
int
main(int argc, char **argv)
{
int i;
RE2 *re;
char *re_str, *p;
char *input;
FILE *f;
size_t len;
long rc;
if (argc < 3) {
usage(1);
}
for (i = 1; i < argc; i++) {
if (argv[i][0] != '-') {
break;
}
fprintf(stderr, "unknown option: %s\n", argv[i]);
exit(1);
}
if (argc - i != 2) {
usage(1);
}
re_str = argv[i++];
len = strlen(re_str);
p = (char *) malloc(len + 1 + sizeof("()") - 1);
if (p == NULL) {
return 2;
}
p[0] = '(';
memcpy(&p[1], re_str, len);
p[len + 1] = ')';
p[len + 2] = '\0';
//printf("regex: %s\n", p);
re = new RE2(p);
if (re == NULL) {
return 2;
}
free(p);
if (!re->ok()) {
delete re;
return 2;
}
errno = 0;
f = fopen(argv[i], "rb");
if (f == NULL) {
perror("open file");
return 1;
}
if (fseek(f, 0L, SEEK_END) != 0) {
perror("seek to file end");
return 1;
}
rc = ftell(f);
if (rc == -1) {
perror("get file offset by ftell");
return 1;
}
len = (size_t) rc;
if (fseek(f, 0L, SEEK_SET) != 0) {
perror("seek to file beginning");
return 1;
}
input = (char *) malloc(len + 1);
if (input == NULL) {
fprintf(stderr, "failed to allocate %ld bytes.\n", len);
return 1;
}
if (fread(input, 1, len, f) < len) {
if (feof(f)) {
fprintf(stderr, "file truncated.\n");
return 1;
} else {
perror("read file");
}
}
input[len] = '\0';
if (fclose(f) != 0) {
perror("close file");
return 1;
}
run_engine(re, input);
delete re;
free(input);
return 0;
}
static void
run_engine(RE2 *re, char *input)
{
bool rc;
re2::StringPiece cap;
struct timespec begin, end;
double elapsed;
const char *p;
printf("re2 ");
TIMER_START
rc = RE2::PartialMatch(input, *re, &cap);
TIMER_STOP
if (rc) {
p = cap.data();
printf("match (%ld, %ld)", (long) (p - input),
(long) (p - input + cap.size()));
} else {
printf("no match");
}
printf(": %.02lf ms elapsed.\n", elapsed);
}
static void
usage(int rc)
{
fprintf(stderr, "usage: re2 <regexp> <file>\n");
exit(rc);
}
|
<reponame>NYCMOTI/open-bid
class RemoveAwardeePaidAtFromAuctions < ActiveRecord::Migration
def change
remove_column :auctions, :awardee_paid_at
end
end
|
/*
* Copyright (c) 2019 Ford Motor Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package com.ford.labs.daab.subscribers.slack;
import com.ford.labs.daab.WireMockExtension;
import com.ford.labs.daab.event.HealthEvent;
import com.ford.labs.daab.event.JobEvent;
import com.ford.labs.daab.subscribers.EventSubscriptionService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson;
import static com.github.tomakehurst.wiremock.client.WireMock.okJson;
import static com.github.tomakehurst.wiremock.client.WireMock.post;
import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.*;
class SlackSubscriberTest {
@RegisterExtension
static WireMockExtension wireMock = new WireMockExtension();
SlackSubscriber subject;
SlackClient client = new SlackClient(WebClient.create(), "http://localhost:8123");
EventSubscriptionService mockEventSubscriptionService = mock(EventSubscriptionService.class);
SlackClientProperties configuration = new SlackClientProperties("mockToken", "channel");
@BeforeEach
void setup() {
this.subject = new SlackSubscriber(
mockEventSubscriptionService,
client,
configuration
);
}
@Test
void subscribeToEvents_postsEachFailedJobEventToSlack() {
wireMock.getServer().stubFor(post(urlEqualTo("/chat.postMessage")).willReturn(okJson("{}")));
OffsetDateTime now = OffsetDateTime.now();
String timestamp = now.format(DateTimeFormatter.ISO_DATE_TIME);
JobEvent successfulJobEvent = new JobEvent();
successfulJobEvent.setId("job.success");
successfulJobEvent.setName("job.success");
successfulJobEvent.setTime(timestamp);
successfulJobEvent.setStatus(JobEvent.Status.SUCCESS);
JobEvent failedJobEvent = new JobEvent();
failedJobEvent.setId("job.failure");
failedJobEvent.setName("FAILURE");
failedJobEvent.setUrl("fakeurl");
failedJobEvent.setTime(timestamp);
failedJobEvent.setStatus(JobEvent.Status.FAILURE);
when(mockEventSubscriptionService.subscribe(anyString()))
.thenReturn(Flux.just(
successfulJobEvent,
failedJobEvent
));
StepVerifier.create(subject.subscribeToEvents())
.expectNextCount(1)
.verifyComplete();
wireMock.getServer().verify(
postRequestedFor(urlEqualTo("/chat.postMessage"))
.withHeader("Authorization", equalTo("Bearer mockToken"))
.withRequestBody(equalToJson("{\"channel\": \"channel\", \"text\": \"Job FAILURE has failed. \", \"as_user\": true, \"attachments\": [{\"fallback\": \"Job FAILURE has failed. \", \"color\": \"#B71C1C\", \"title\": \"Job FAILURE has failed. \", \"title_link\": \"fakeurl\", \"ts\": " + now.toEpochSecond() + "}] }"))
);
}
@Test
void subscribeToEvents_ifEventFailsAndThenSucceeds_postSuccessToSlack() {
wireMock.getServer().stubFor(post(urlEqualTo("/chat.postMessage")).willReturn(okJson("{}")));
OffsetDateTime now = OffsetDateTime.now();
String timestamp = now.format(DateTimeFormatter.ISO_DATE_TIME);
HealthEvent firstHealthEvent = new HealthEvent();
firstHealthEvent.setId("job.willChange");
firstHealthEvent.setName("<NAME>");
firstHealthEvent.setTime(timestamp);
firstHealthEvent.setStatus(HealthEvent.Status.DOWN);
when(mockEventSubscriptionService.subscribe(anyString()))
.thenReturn(Flux.just(firstHealthEvent));
StepVerifier.create(subject.subscribeToEvents())
.expectNextCount(1)
.verifyComplete();
String failureMessage = "App Will Change is down! ";
wireMock.getServer().verify(
postRequestedFor(urlEqualTo("/chat.postMessage"))
.withHeader("Authorization", equalTo("Bearer mockToken"))
.withRequestBody(equalToJson("{\"channel\": \"channel\", \"text\": \"" + failureMessage + "\", \"as_user\": true, \"attachments\": [{\"fallback\": \"" + failureMessage + "\", \"color\": \"#B71C1C\", \"title\": \"" + failureMessage + "\", \"title_link\": null, \"ts\": " + now.toEpochSecond() + "}] }"))
);
HealthEvent secondHealthEvent = new HealthEvent();
secondHealthEvent.setId("job.willChange");
secondHealthEvent.setName("<NAME>");
secondHealthEvent.setTime(timestamp);
secondHealthEvent.setStatus(HealthEvent.Status.UP);
when(mockEventSubscriptionService.subscribe(anyString()))
.thenReturn(Flux.just(secondHealthEvent));
StepVerifier.create(subject.subscribeToEvents())
.expectNextCount(1)
.verifyComplete();
String successMessage = "App Will Change is back up! ";
wireMock.getServer().verify(
postRequestedFor(urlEqualTo("/chat.postMessage"))
.withHeader("Authorization", equalTo("Bearer mockToken"))
.withRequestBody(equalToJson("{\"channel\": \"channel\", \"text\": \"" + successMessage + "\", \"as_user\": true, \"attachments\": [{\"fallback\": \"" + successMessage + "\", \"color\": \"#1B5E20\", \"title\": \"" + successMessage + "\", \"title_link\": null, \"ts\": " + now.toEpochSecond() + "}] }"))
);
StepVerifier.create(subject.subscribeToEvents())
.verifyComplete();
}
} |
<gh_stars>1-10
public class Banda2 {
public static void main(String[] args) {
Musica2 songOne = new Musica2("<NAME>","Biligean",360,"Biligean you gotcha my hearth");
new Musica3("<NAME>", "Pagodeira", 160, "Le le le le le");
System.out.println(Musica3.nota);
try{
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.tocar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
songOne.rebobinar();
} catch (Exception e) {
System.out.println(e.getMessage());
}
}
}
|
import asyncio
import os
import queue
import time
import traceback
from datetime import date, datetime, timedelta
from typing import Dict, List, Optional, Tuple
import pandas as pd
import requests
from alpaca_trade_api.entity import Order as AlpacaOrder
from alpaca_trade_api.rest import REST, URL, Entity
from alpaca_trade_api.stream import Stream
from pytz import timezone
from requests.auth import HTTPBasicAuth
from liualgotrader.common import config
from liualgotrader.common.tlog import tlog
from liualgotrader.common.types import Order, QueueMapper, Trade
from liualgotrader.trading.base import Trader
nyc = timezone("America/New_York")
class AlpacaTrader(Trader):
def __init__(self, qm: QueueMapper = None):
self.market_open: Optional[datetime]
self.market_close: Optional[datetime]
self.alpaca_brokage_api_baseurl = os.getenv(
"ALPACA_BROKER_API_BASEURL", None
)
self.alpaca_brokage_api_key = os.getenv("ALPACA_BROKER_API_KEY", None)
self.alpaca_brokage_api_secret = os.getenv(
"ALPACA_BROKER_API_SECRET", None
)
self.alpaca_rest_client = REST(
base_url=URL(config.alpaca_base_url),
key_id=config.alpaca_api_key,
secret_key=config.alpaca_api_secret,
)
if qm:
self.alpaca_ws_client = Stream(
base_url=URL(config.alpaca_base_url),
key_id=config.alpaca_api_key,
secret_key=config.alpaca_api_secret,
)
if not self.alpaca_ws_client:
raise AssertionError(
"Failed to authenticate Alpaca web_socket client"
)
self.alpaca_ws_client.subscribe_trade_updates(
AlpacaTrader.trade_update_handler
)
self.running_task: Optional[asyncio.Task] = None
now = datetime.now(nyc)
calendar = self.alpaca_rest_client.get_calendar(
start=now.strftime("%Y-%m-%d"), end=now.strftime("%Y-%m-%d")
)[0]
if now.date() >= calendar.date.date():
self.market_open = now.replace(
hour=calendar.open.hour,
minute=calendar.open.minute,
second=0,
microsecond=0,
)
self.market_close = now.replace(
hour=calendar.close.hour,
minute=calendar.close.minute,
second=0,
microsecond=0,
)
else:
self.market_open = self.market_close = None
super().__init__(qm)
async def _is_personal_order_completed(
self, order_id: str
) -> Tuple[Order.EventType, float, float, float]:
alpaca_order = self.alpaca_rest_client.get_order(order_id=order_id)
event = (
Order.EventType.canceled
if alpaca_order.status in ["canceled", "expired", "replaced"]
else Order.EventType.pending
if alpaca_order.status in ["pending_cancel", "pending_replace"]
else Order.EventType.fill
if alpaca_order.status == "filled"
else Order.EventType.partial_fill
if alpaca_order.status == "partially_filled"
else Order.EventType.other
)
return (
event,
float(alpaca_order.filled_avg_price or 0.0),
float(alpaca_order.filled_qty or 0.0),
0.0,
)
async def is_fractionable(self, symbol: str) -> bool:
asset_details = self.alpaca_rest_client.get_asset(symbol)
return asset_details.fractionable
async def _is_brokerage_account_order_completed(
self, order_id: str, external_order_id: Optional[str] = None
) -> Tuple[Order.EventType, float, float, float]:
if not self.alpaca_brokage_api_baseurl:
raise AssertionError(
"order_on_behalf can't be called, if brokerage configs incomplete"
)
endpoint: str = (
f"/v1/trading/accounts/{external_order_id}/orders/{order_id}"
)
tlog(f"_is_brokerage_account_order_completed:{endpoint}")
url: str = self.alpaca_brokage_api_baseurl + endpoint
response = await self._get_request(url)
tlog(f"_is_brokerage_account_order_completed: response: {response}")
event = (
Order.EventType.canceled
if response["status"] in ["canceled", "expired", "replaced"]
else Order.EventType.pending
if response["status"] in ["pending_cancel", "pending_replace"]
else Order.EventType.fill
if response["status"] == "filled"
else Order.EventType.partial_fill
if response["status"] == "partially_filled"
else Order.EventType.other
)
return (
event,
float(response.get("filled_avg_price") or 0.0),
float(response.get("filled_qty") or 0.0),
0.0,
)
async def is_order_completed(
self, order_id: str, external_order_id: Optional[str] = None
) -> Tuple[Order.EventType, float, float, float]:
if not external_order_id:
return await self._is_personal_order_completed(order_id)
return await self._is_brokerage_account_order_completed(
order_id, external_order_id
)
def get_market_schedule(
self,
) -> Tuple[Optional[datetime], Optional[datetime]]:
return self.market_open, self.market_close
def get_trading_days(
self, start_date: date, end_date: date = date.today()
) -> pd.DataFrame:
calendars = self.alpaca_rest_client.get_calendar(
start=str(start_date), end=str(end_date)
)
_df = pd.DataFrame.from_dict([calendar._raw for calendar in calendars])
_df["date"] = pd.to_datetime(_df.date)
return _df.set_index("date")
def get_position(self, symbol: str) -> float:
pos = self.alpaca_rest_client.get_position(symbol)
return float(pos.qty) if pos.side == "long" else -1.0 * float(pos.qty)
def to_order(self, alpaca_order: AlpacaOrder) -> Order:
event = (
Order.EventType.canceled
if alpaca_order.status in ["canceled", "expired", "replaced"]
else Order.EventType.pending
if alpaca_order.status in ["pending_cancel", "pending_replace"]
else Order.EventType.fill
if alpaca_order.status == "filled"
else Order.EventType.partial_fill
if alpaca_order.status == "partially_filled"
else Order.EventType.other
)
return Order(
order_id=alpaca_order.id,
symbol=alpaca_order.symbol.lower(),
event=event,
price=float(alpaca_order.limit_price or 0.0),
side=Order.FillSide[alpaca_order.side],
filled_qty=float(alpaca_order.filled_qty),
remaining_amount=float(alpaca_order.qty)
- float(alpaca_order.filled_qty),
submitted_at=alpaca_order.submitted_at,
avg_execution_price=alpaca_order.filled_avg_price,
trade_fees=0.0,
)
def _json_to_order(
self,
brokerage_response: dict,
external_account_id: Optional[str] = None,
) -> Order:
event = (
Order.EventType.canceled
if brokerage_response["status"]
in ["canceled", "expired", "replaced"]
else Order.EventType.pending
if brokerage_response["status"]
in ["pending_cancel", "pending_replace"]
else Order.EventType.fill
if brokerage_response["status"] == "filled"
else Order.EventType.partial_fill
if brokerage_response["status"] == "partially_filled"
else Order.EventType.other
)
return Order(
order_id=brokerage_response["id"],
symbol=brokerage_response["symbol"].lower(),
event=event,
price=float(brokerage_response["limit_price"] or 0.0),
side=Order.FillSide[brokerage_response["side"]],
filled_qty=float(brokerage_response["filled_qty"]),
remaining_amount=float(brokerage_response["qty"])
- float(brokerage_response["filled_qty"]),
submitted_at=pd.Timestamp(
ts_input=brokerage_response["submitted_at"],
unit="ms",
tz="US/Eastern",
),
avg_execution_price=brokerage_response["filled_avg_price"],
trade_fees=0.0,
external_account_id=external_account_id,
)
async def get_order(self, order_id: str) -> Order:
return self.to_order(self.alpaca_rest_client.get_order(order_id))
def is_market_open_today(self) -> bool:
return self.market_open is not None
def get_time_market_close(self) -> Optional[timedelta]:
if not self.is_market_open_today():
raise AssertionError("Market closed today")
return (
self.market_close - datetime.now(nyc)
if self.market_close
else None
)
async def reconnect(self):
self.alpaca_rest_client = REST(
key_id=config.alpaca_api_key, secret_key=config.alpaca_api_secret
)
async def run(self) -> asyncio.Task:
if not self.running_task:
tlog("starting Alpaca listener")
self.running_task = asyncio.create_task(
self.alpaca_ws_client._trading_ws._run_forever()
)
return self.running_task
async def close(self):
if not self.alpaca_ws_client:
raise AssertionError("Must call w/ authenticated Alpaca client")
if self.running_task:
await self.alpaca_ws_client.stop_ws()
async def get_tradeable_symbols(self) -> List[str]:
data = self.alpaca_rest_client.list_assets()
return [asset.symbol.lower() for asset in data if asset.tradable]
async def get_shortable_symbols(self) -> List[str]:
data = self.alpaca_rest_client.list_assets()
return [
asset.symbol.lower()
for asset in data
if asset.tradable and asset.easy_to_borrow and asset.shortable
]
async def is_shortable(self, symbol) -> bool:
asset = self.alpaca_rest_client.get_asset(symbol.upper())
return (
asset.tradable is not False
and asset.shortable is not False
and asset.status != "inactive"
and asset.easy_to_borrow is not False
)
async def _cancel_personal_order(self, order_id: str) -> bool:
self.alpaca_rest_client.cancel_order(order_id)
return True
async def _cancel_brokerage_order(
self, account_id: str, order_id: str
) -> bool:
if not self.alpaca_brokage_api_baseurl:
raise AssertionError(
"_cancel_brokerage_order can't be called, if brokerage configs incomplete"
)
endpoint: str = f"/v1/trading/accounts/{account_id}/orders/{order_id}"
url: str = self.alpaca_brokage_api_baseurl + endpoint
response_code = await self._delete_request(url)
tlog(
f"cancel_brokerage_order {account_id},{order_id} -> {response_code}"
)
return response_code == 204
async def cancel_order(self, order: Order) -> bool:
if order.external_account_id:
return await self._cancel_brokerage_order(
order.external_account_id, order.order_id
)
return await self._cancel_personal_order(order.order_id)
async def _personal_submit(
self,
symbol: str,
qty: float,
side: str,
order_type: str,
time_in_force: str,
limit_price: str = None,
stop_price: str = None,
client_order_id: str = None,
extended_hours: bool = None,
order_class: str = None,
take_profit: dict = None,
stop_loss: dict = None,
trail_price: str = None,
trail_percent: str = None,
on_behalf_of: str = None,
) -> Order:
o = self.alpaca_rest_client.submit_order(
symbol.upper(),
str(qty),
side,
order_type,
time_in_force,
limit_price,
stop_price,
client_order_id,
extended_hours,
order_class,
take_profit,
stop_loss,
trail_price,
trail_percent,
)
return self.to_order(o)
async def _post_request(self, url: str, payload: Dict) -> Dict:
response = requests.post(
url=url,
json=payload,
auth=HTTPBasicAuth(
self.alpaca_brokage_api_key, self.alpaca_brokage_api_secret
),
)
if response.status_code in (429, 504):
if "x-ratelimit-reset" in response.headers:
tlog(
f"ALPACA BROKERAGE rate-limit till {response.headers['x-ratelimit-reset']}"
)
asyncio.sleep(
int(time.time())
- int(response.headers["x-ratelimit-reset"])
)
tlog("ALPACA BROKERAGE going to retry")
else:
tlog(
f"ALPACA BROKERAGE push-back w/ {response.status_code} and no x-ratelimit-reset header"
)
asyncio.sleep(10.0)
return await self._post_request(url, payload)
if response.status_code in (200, 201, 204):
return response.json()
raise AssertionError(
f"HTTP ERROR {response.status_code} from ALPACA BROKERAGE API with error {response.text}"
)
async def _get_request(self, url: str) -> Dict:
response = requests.get(
url=url,
auth=HTTPBasicAuth(
self.alpaca_brokage_api_key, self.alpaca_brokage_api_secret
),
)
if response.status_code in (429, 504):
if "x-ratelimit-reset" in response.headers:
tlog(
f"ALPACA BROKERAGE rate-limit till {response.headers['x-ratelimit-reset']}"
)
asyncio.sleep(
int(time.time())
- int(response.headers["x-ratelimit-reset"])
)
tlog("ALPACA BROKERAGE going to retry")
else:
tlog(
f"ALPACA BROKERAGE push-back w/ {response.status_code} and no x-ratelimit-reset header"
)
asyncio.sleep(10.0)
return await self._get_request(url)
if response.status_code in (200, 201, 204):
return response.json()
raise AssertionError(
f"HTTP ERROR {response.status_code} from ALPACA BROKERAGE API with error {response.text}"
)
async def _delete_request(self, url: str) -> int:
response = requests.delete(
url=url,
auth=HTTPBasicAuth(
self.alpaca_brokage_api_key, self.alpaca_brokage_api_secret
),
)
# TODO: create a decorator the the re-try / push-backs from server instead of copying.
if response.status_code in (429, 504):
if "x-ratelimit-reset" in response.headers:
tlog(
f"ALPACA BROKERAGE rate-limit till {response.headers['x-ratelimit-reset']}"
)
asyncio.sleep(
int(time.time())
- int(response.headers["x-ratelimit-reset"])
)
tlog("ALPACA BROKERAGE going to retry")
else:
tlog(
f"ALPACA BROKERAGE push-back w/ {response.status_code} and no x-ratelimit-reset header"
)
asyncio.sleep(10.0)
return await self._delete_request(url)
return response.status_code
async def _order_on_behalf(
self,
symbol: str,
qty: float,
side: str,
order_type: str,
time_in_force: str,
limit_price: str = None,
stop_price: str = None,
client_order_id: str = None,
extended_hours: bool = None,
order_class: str = None,
take_profit: dict = None,
stop_loss: dict = None,
trail_price: str = None,
trail_percent: str = None,
on_behalf_of: str = None,
) -> Order:
if not self.alpaca_brokage_api_baseurl:
raise AssertionError(
"order_on_behalf can't be called, if brokerage configs incomplete"
)
endpoint: str = f"/v1/trading/accounts/{on_behalf_of}/orders"
url: str = self.alpaca_brokage_api_baseurl + endpoint
payload = {
"symbol": symbol.upper(),
"qty": qty,
"side": side,
"type": order_type,
}
if limit_price:
payload["limit_price"] = limit_price
if time_in_force:
payload["time_in_force"] = time_in_force
json_response: Dict = await self._post_request(
url=url, payload=payload
)
tlog(f"ALPACA BROKERAGE RESPONSE: {json_response}")
return self._json_to_order(json_response, on_behalf_of)
async def submit_order(
self,
symbol: str,
qty: float,
side: str,
order_type: str,
time_in_force: str = "day",
limit_price: str = None,
stop_price: str = None,
client_order_id: str = None,
extended_hours: bool = None,
order_class: str = None,
take_profit: dict = None,
stop_loss: dict = None,
trail_price: str = None,
trail_percent: str = None,
on_behalf_of: str = None,
) -> Order:
if on_behalf_of:
return await self._order_on_behalf(
symbol,
qty,
side,
order_type,
time_in_force,
limit_price,
stop_price,
client_order_id,
extended_hours,
order_class,
take_profit,
stop_loss,
trail_price,
trail_percent,
on_behalf_of,
)
else:
return await self._personal_submit(
symbol,
qty,
side,
order_type,
time_in_force,
limit_price,
stop_price,
client_order_id,
extended_hours,
order_class,
take_profit,
stop_loss,
trail_price,
trail_percent,
on_behalf_of,
)
@classmethod
def _trade_from_dict(cls, trade_dict: Entity) -> Optional[Trade]:
if trade_dict.event == "new":
return None
return Trade(
order_id=trade_dict.order["id"],
symbol=trade_dict.order["symbol"].lower(),
event=Order.EventType.canceled
if trade_dict.event
in ["canceled", "suspended", "expired", "cancel_rejected"]
else Order.EventType.rejected
if trade_dict.event == "rejected"
else Order.EventType.fill
if trade_dict.event == "fill"
else Order.EventType.partial_fill
if trade_dict.event == "partial_fill"
else Order.EventType.other,
filled_qty=float(trade_dict.qty),
trade_fee=0.0,
filled_avg_price=float(
trade_dict.order["filled_avg_price"] or 0.0
),
liquidity="",
updated_at=pd.Timestamp(
ts_input=trade_dict.order["updated_at"],
unit="ms",
tz="US/Eastern",
),
side=Order.FillSide[trade_dict.order["side"]],
)
@classmethod
async def trade_update_handler(cls, data):
try:
# cls.get_instance().queues[symbol].put(
# data.__dict__["_raw"], timeout=1
# )
trade = cls._trade_from_dict(data)
if not trade:
return
to_send = {
"EV": "trade_update",
"symbol": trade.symbol.lower(),
"trade": trade.__dict__,
}
for q in cls.get_instance().queues.get_allqueues():
q.put(to_send, timeout=1)
except queue.Full as f:
tlog(
f"[EXCEPTION] process_message(): queue for {trade.symbol} is FULL:{f}, sleeping for 2 seconds and re-trying."
)
raise
# except AssertionError:
# for q in cls.get_instance().queues.get_allqueues():
# q.put(data.__dict__["_raw"], timeout=1)
except Exception as e:
tlog(f"[EXCEPTION] process_message(): exception {e}")
if config.debug_enabled:
traceback.print_exc()
|
#!/bin/bash
NCPU=1
CFG=cfg/exsig.yaml
if [ "$#" -ne 0 ]; then
echo "USAGE:"
echo "bash run.sh"
exit 1
fi
eval "$(conda shell.bash hook)"
conda activate nbwpgmain
REPODIR=~/ws/nbwpg
echo 'BEGIN --- '`date '+%Y-%m-%d %H:%M:%S'`
python $REPODIR/make_gradsamplingbasedexact_mesh.py \
--cfg $REPODIR/xprmt/biasgradexact-mesh/$CFG \
--ncpu $NCPU \
--repo $REPODIR
echo 'END --- '`date '+%Y-%m-%d %H:%M:%S'`
|
#==============================================================================
# CONTEXT ASSERTIONS - Require running the testable function with the 'run'.
#==============================================================================
#==============================================================================
# ASSERT_STATUS
#------------------------------------------------------------------------------
# Asserts the status of the last function or command runned by the 'run'
# command.
#==============================================================================
test__assert_status() {
dummy_function() {
return 3
}
run dummy_function
assert_status 3
}
should_fail__assert_status() {
dm_tools__echo 'Expected [assert_status] failure.'
dummy_function() {
return 3
}
run dummy_function
assert_status 2
}
#==============================================================================
# ASSERT_OUTPUT
#------------------------------------------------------------------------------
# Asserts the whole captured output of the last function or command runned by
# the 'run' command.
#==============================================================================
test__assert_output() {
dummy_function() {
dm_tools__echo 'hello'
}
run dummy_function
assert_output 'hello'
}
should_fail__assert_output__mismatch() {
dm_tools__echo 'Expected [assert_output] failure.'
dummy_function() {
dm_tools__echo 'hello'
}
run dummy_function
assert_output 'bye'
}
should_fail__assert_output__multiline_output_fails_assertion() {
dm_tools__echo 'Expected [assert_output] failure.'
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
}
run dummy_function
assert_output 'hello 1\nhello 2'
}
#==============================================================================
# ASSERT_OUTPUT_LINE_COUNT
#------------------------------------------------------------------------------
# Asserts the line count of the output of the last function or command runned
# by the 'run' command.
#==============================================================================
test__assert_output_line_count() {
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
dm_tools__echo 'hello 3'
}
run dummy_function
assert_output_line_count 3
}
should_fail__assert_output_line_count() {
dm_tools__echo 'Expected [assert_output_line_count] failure.'
dummy_function() {
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_count 42
}
#==============================================================================
# ASSERT_OUTPUT_LINE_AT_INDEX
#------------------------------------------------------------------------------
# Asserts the line indexed line of the output of the last function or command
# runned by the 'run' command. Lines are indexed from 1.
#==============================================================================
test__assert_output_line_at_index() {
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
dm_tools__echo 'hello 3'
}
run dummy_function
assert_output_line_at_index 2 'hello 2'
}
should_fail__assert_output_line_at_index__invalid_index__case_1() {
dm_tools__echo 'Expected [assert_output_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_at_index 42 'invalid index'
}
should_fail__assert_output_line_at_index__invalid_index__case_2() {
dm_tools__echo 'Expected [assert_output_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_at_index 0 'invalid index'
}
should_fail__assert_output_line_at_index__invalid_index__case_3() {
dm_tools__echo 'Expected [assert_output_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_at_index -1 'invalid index'
}
should_fail__assert_output_line_at_index__no_match() {
dm_tools__echo 'Expected [assert_output_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
dm_tools__echo 'hello 3'
}
run dummy_function
# This assertion matches the whole line.
assert_output_line_at_index 2 'hello'
}
test__assert_output_line_line_at_index__empty_line_can_be_validated() {
dummy_function() {
dm_tools__echo ''
}
run dummy_function
assert_output_line_at_index 1 ''
}
should_fail__assert_output_line_at_index__empty_line_wont_get_ignored() {
dummy_function() {
dm_tools__echo ''
}
run dummy_function
assert_output_line_at_index 1 'not empty line'
}
#==============================================================================
# ASSERT_OUTPUT_LINE_PARTIALLY_AT_INDEX
#------------------------------------------------------------------------------
# Asserts the line indexed line partially of the output of the last function or
# command runned by the 'run' command. Lines are indexed from 1.
#==============================================================================
test__assert_output_line_partially_at_index() {
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
dm_tools__echo 'hello 3'
}
run dummy_function
assert_output_line_partially_at_index 2 'hello'
}
should_fail__assert_output_line_partially_at_index__invalid_index__case_1() {
dm_tools__echo 'Expected [assert_output_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_partially_at_index 42 'hello'
}
should_fail__assert_output_line_partially_at_index__invalid_index__case_2() {
dm_tools__echo 'Expected [assert_output_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_partially_at_index 0 'hello'
}
should_fail__assert_output_line_partially_at_index__invalid_index__case_3() {
dm_tools__echo 'Expected [assert_output_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello'
dm_tools__echo 'hello'
dm_tools__echo 'hello'
}
run dummy_function
assert_output_line_partially_at_index -1 'hello'
}
should_fail__assert_output_line_partially_at_index__no_match() {
dm_tools__echo 'Expected [assert_output_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello 1'
dm_tools__echo 'hello 2'
dm_tools__echo 'hello 3'
}
run dummy_function
assert_output_line_partially_at_index 2 'unrelated content'
}
test__assert_output_line_partially_at_index__empty_line_can_be_validated() {
dummy_function() {
dm_tools__echo ''
}
run dummy_function
assert_output_line_partially_at_index 1 ''
}
should_fail__assert_output_line_partially_at_index__empty_line_wont_get_ignored() {
dummy_function() {
dm_tools__echo ''
}
run dummy_function
assert_output_line_partially_at_index 1 'not empty line'
}
#==============================================================================
# ASSERT_ERROR
#------------------------------------------------------------------------------
# Asserts the whole captured error output of the last function or command
# runned by the 'run' command.
#==============================================================================
test__assert_error() {
dummy_function() {
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error 'hello'
}
should_fail__assert_error__mismatch() {
dm_tools__echo 'Expected [assert_error] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error 'bye'
}
should_fail__assert_error__multiline_output_fails_assertion() {
dm_tools__echo 'Expected [assert_error] failure.'
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
}
run dummy_function
assert_error 'hello 1\nhello 2'
}
#==============================================================================
# ASSERT_NO_ERROR
#------------------------------------------------------------------------------
# Asserts the whole captured error output of the last function or command
# runned by the 'run' command.
#==============================================================================
test__assert_no_error() {
dummy_function() {
dm_tools__echo 'hello'
}
run dummy_function
assert_no_error
}
should_fail__assert_no_error() {
dummy_function() {
dm_tools__echo 'error line 1' >&2
dm_tools__echo 'error line 2' >&2
}
run dummy_function
assert_no_error
}
#==============================================================================
# ASSERT_ERROR_LINE_COUNT
#------------------------------------------------------------------------------
# Asserts the line count of the error output of the last function or command
# runned by the 'run' command.
#==============================================================================
test__assert_error_line_count() {
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
dm_tools__echo 'hello 3' >&2
}
run dummy_function
assert_error_line_count 3
}
should_fail__assert_error_line_count() {
dm_tools__echo 'Expected [assert_error_line_count] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 42
}
#==============================================================================
# ASSERT_ERROR_LINE_AT_INDEX
#------------------------------------------------------------------------------
# Asserts the line indexed line of the error output of the last function or
# command runned by the 'run' command. Lines are indexed from 1.
#==============================================================================
test__assert_error_line_at_index() {
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
dm_tools__echo 'hello 3' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_at_index 2 'hello 2'
}
should_fail__assert_error_line_at_index__invalid_index__case_1() {
dm_tools__echo 'Expected [assert_error_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_at_index 42 'invalid index'
}
should_fail__assert_error_line_at_index__invalid_index__case_2() {
dm_tools__echo 'Expected [assert_error_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_at_index 0 'invalid index'
}
should_fail__assert_error_line_at_index__invalid_index__case_3() {
dm_tools__echo 'Expected [assert_error_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_at_index -1 'invalid index'
}
should_fail__assert_error_line_at_index__no_match() {
dm_tools__echo 'Expected [assert_error_line_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
dm_tools__echo 'hello 3' >&2
}
run dummy_function
assert_error_line_count 3
# This assertion matches the whole line.
assert_error_line_at_index 2 'hello'
}
test__assert_output_line_line_at_index__empty_line_can_be_validated() {
dummy_function() {
dm_tools__echo '' >&2
}
run dummy_function
assert_error_line_count 1
assert_error_line_at_index 1 ''
}
should_fail__assert_error_line_at_index__empty_line_wont_get_ignored() {
dummy_function() {
dm_tools__echo '' >&2
}
run dummy_function
assert_error_line_count 1
assert_error_line_at_index 1 'not empty line'
}
#==============================================================================
# ASSERT_ERROR_LINE_PARTIALLY_AT_INDEX
#------------------------------------------------------------------------------
# Asserts the line indexed line partially of the error output of the last
# function or command runned by the 'run' command. Lines are indexed from 1.
#==============================================================================
test__assert_error_line_partially_at_index() {
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
dm_tools__echo 'hello 3' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_partially_at_index 2 'hello'
}
should_fail__assert_error_line_partially_at_index__invalid_index__case_1() {
dm_tools__echo 'Expected [assert_error_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_partially_at_index 42 'hello'
}
should_fail__assert_error_line_partially_at_index__invalid_index__case_2() {
dm_tools__echo 'Expected [assert_error_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_partially_at_index 0 'hello'
}
should_fail__assert_error_line_partially_at_index__invalid_index__case_3() {
dm_tools__echo 'Expected [assert_error_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
dm_tools__echo 'hello' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_partially_at_index -1 'hello'
}
should_fail__assert_error_line_partially_at_index__no_match() {
dm_tools__echo 'Expected [assert_error_line_partially_at_index] failure.'
dummy_function() {
dm_tools__echo 'hello 1' >&2
dm_tools__echo 'hello 2' >&2
dm_tools__echo 'hello 3' >&2
}
run dummy_function
assert_error_line_count 3
assert_error_line_partially_at_index 2 'unrelated content'
}
test__assert_error_line_partially_at_index__empty_line_can_be_validated() {
dummy_function() {
dm_tools__echo '' >&2
}
run dummy_function
assert_error_line_count 1
assert_error_line_partially_at_index 1 ''
}
should_fail__assert_error_line_partially_at_index__empty_line_wont_get_ignored() {
dummy_function() {
dm_tools__echo '' >&2
}
run dummy_function
assert_error_line_count 1
assert_error_line_partially_at_index 1 'not empty line'
}
#==============================================================================
# WORD SPLITTING IS NOT A PROBLEM WITH THE RUN FUNCTION
#------------------------------------------------------------------------------
# Re-splitting should not occure when using the 'run' command to provide a
# testing context.
#==============================================================================
test__word_splitting_validation() {
dummy_function() {
dm_tools__echo "$#"
}
param_3='param 3'
# If word splitting would occur inside the run function, it would report 6
# parameters instead of the correct 3 params here.
run dummy_function 'param 1' 'param 2' "$param_3"
assert_output '3'
}
|
package com.meterware.pseudoserver;
/********************************************************************************************************************
* $Id: PseudoServer.java 826 2008-03-28 00:30:12Z russgold $
*
* Copyright (c) 2000-2003, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*******************************************************************************************************************/
import java.io.*;
import java.net.HttpURLConnection;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.*;
import com.meterware.httpunit.HttpUnitUtils;
/**
* A basic simulated web-server for testing user agents without a web server.
**/
public class PseudoServer {
private static final int DEFAULT_SOCKET_TIMEOUT = 1000;
private static final int INPUT_POLL_INTERVAL = 10;
/** Time in msec to wait for an outstanding server socket to be released before creating a new one. **/
private static int _socketReleaseWaitTime = 50;
/** Number of outstanding server sockets that must be present before trying to wait for one to be released. **/
private static int _waitThreshhold = 10;
private static int _numServers = 0;
private int _serverNum = 0;
private int _connectionNum = 0;
private ArrayList _classpathDirs = new ArrayList();
private String _maxProtocolLevel = "1.1";
private final int _socketTimeout;
/**
* Returns the amount of time the pseudo server will wait for a server socket to be released (in msec)
* before allocating a new one. See also {@link #getWaitThreshhold getWaitThreshhold}.
*/
public static int getSocketReleaseWaitTime() {
return _socketReleaseWaitTime;
}
/**
* Returns the amount of time the pseudo server will wait for a server socket to be released (in msec)
* before allocating a new one. See also {@link #getWaitThreshhold getWaitThreshhold}.
*/
public static void setSocketReleaseWaitTime( int socketReleaseWaitTime ) {
_socketReleaseWaitTime = socketReleaseWaitTime;
}
/**
* Returns the number of server sockets that must have been allocated and not returned before waiting for one
* to be returned.
*/
public static int getWaitThreshhold() {
return _waitThreshhold;
}
/**
* Specifies the number of server sockets that must have been allocated and not returned before waiting for one
* to be returned.
*/
public static void setWaitThreshhold( int waitThreshhold ) {
_waitThreshhold = waitThreshhold;
}
public PseudoServer() {
this( DEFAULT_SOCKET_TIMEOUT );
}
public PseudoServer( int socketTimeout ) {
_socketTimeout = socketTimeout;
_serverNum = ++_numServers;
Thread t = new Thread( "PseudoServer " + _serverNum ) {
public void run() {
while (_active) {
try {
handleNewConnection( getServerSocket().accept() );
Thread.sleep( 20 );
} catch (InterruptedIOException e) {
} catch (IOException e) {
System.out.println( "Error in pseudo server: " + e );
HttpUnitUtils.handleException(e);
} catch (InterruptedException e) {
System.out.println( "Interrupted. Shutting down" );
_active = false;
}
}
try {
if (_serverSocket != null) ServerSocketFactory.release( _serverSocket );
_serverSocket = null;
} catch (IOException e) {
}
debug( "Pseudoserver shutting down" );
}
};
debug( "Starting pseudoserver" );
t.start();
}
public void shutDown() {
debug( "Requested shutdown of pseudoserver" );
_active = false;
}
private void debug( String message ) {
if (!_debug) return;
message = replaceDebugToken( message, "thread", "thread (" + Thread.currentThread().getName() + ")" );
message = replaceDebugToken( message, "server", "server " + _serverNum );
System.out.println( "** " + message );
}
private static String replaceDebugToken( String message, String token, String replacement ) {
return message.indexOf( token ) < 0 ? message : message.replaceFirst( token, replacement );
}
public void setMaxProtocolLevel( int majorLevel, int minorLevel ) {
_maxProtocolLevel = majorLevel + "." + minorLevel;
}
/**
* Returns the port on which this server is listening.
**/
public int getConnectedPort() throws IOException {
return getServerSocket().getLocalPort();
}
/**
* Defines the contents of an expected resource.
**/
public void setResource( String name, String value ) {
setResource( name, value, "text/html" );
}
/**
* Defines the contents of an expected resource.
**/
public void setResource( String name, PseudoServlet servlet ) {
_resources.put( asResourceName( name ), servlet );
}
/**
* Defines the contents of an expected resource.
**/
public void setResource( String name, String value, String contentType ) {
_resources.put( asResourceName( name ), new WebResource( value, contentType ) );
}
/**
* Defines the contents of an expected resource.
**/
public void setResource( String name, byte[] value, String contentType ) {
_resources.put( asResourceName( name ), new WebResource( value, contentType ) );
}
/**
* Defines a resource which will result in an error message.
**/
public void setErrorResource( String name, int errorCode, String errorMessage ) {
_resources.put( asResourceName( name ), new WebResource( errorMessage, errorCode ) );
}
/**
* Enables the sending of the character set in the content-type header.
**/
public void setSendCharacterSet( String name, boolean enabled ) {
WebResource resource = (WebResource) _resources.get( asResourceName( name ) );
if (resource == null) throw new IllegalArgumentException( "No defined resource " + name );
resource.setSendCharacterSet( enabled );
}
/**
* Specifies the character set encoding for a resource.
**/
public void setCharacterSet( String name, String characterSet ) {
WebResource resource = (WebResource) _resources.get( asResourceName( name ) );
if (resource == null) {
resource = new WebResource( "" );
_resources.put( asResourceName( name ), resource );
}
resource.setCharacterSet( characterSet );
}
/**
* Adds a header to a defined resource.
**/
public void addResourceHeader( String name, String header ) {
WebResource resource = (WebResource) _resources.get( asResourceName( name ) );
if (resource == null) {
resource = new WebResource( "" );
_resources.put( asResourceName( name ), resource );
}
resource.addHeader( header );
}
public void mapToClasspath( String directory ) {
_classpathDirs.add( directory );
}
public void setDebug( boolean debug ) {
_debug = debug;
}
//------------------------------------- private members ---------------------------------------
private Hashtable _resources = new Hashtable();
private boolean _active = true;
private boolean _debug;
private String asResourceName( String rawName ) {
if (rawName.startsWith( "http:" ) || rawName.startsWith( "/" )) {
return escape( rawName );
} else {
return escape( "/" + rawName );
}
}
private static String escape( String urlString ) {
if (urlString.indexOf( ' ' ) < 0) return urlString;
StringBuffer sb = new StringBuffer();
int start = 0;
do {
int index = urlString.indexOf( ' ', start );
if (index < 0) {
sb.append( urlString.substring( start ) );
break;
} else {
sb.append( urlString.substring( start, index ) ).append( "%20" );
start = index+1;
}
} while (true);
return sb.toString();
}
private void handleNewConnection( final Socket socket ) {
Thread t = new Thread( "PseudoServer " + _serverNum + " connection " + (++_connectionNum) ) {
public void run() {
try {
serveRequests( socket );
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use Options | File Templates.
}
}
};
t.start();
}
private void serveRequests( Socket socket ) throws IOException {
socket.setSoTimeout( _socketTimeout );
socket.setTcpNoDelay( true );
debug( "Created server thread " + socket.getInetAddress() + ':' + socket.getPort() );
final BufferedInputStream inputStream = new BufferedInputStream( socket.getInputStream() );
final HttpResponseStream outputStream = new HttpResponseStream( socket.getOutputStream() );
try {
while (_active) {
HttpRequest request = new HttpRequest( inputStream );
boolean keepAlive = respondToRequest( request, outputStream );
if (!keepAlive) break;
while (_active && 0 == inputStream.available()) {
try { Thread.sleep( INPUT_POLL_INTERVAL ); } catch (InterruptedException e) {}
}
}
} catch (IOException e) {
outputStream.restart();
outputStream.setProtocol( "HTTP/1.0" );
outputStream.setResponse( HttpURLConnection.HTTP_BAD_REQUEST, e.toString() );
}
debug( "Closing server thread" );
outputStream.close();
socket.close();
debug( "Server thread closed" );
}
private boolean respondToRequest( HttpRequest request, HttpResponseStream response ) {
debug( "Server thread handling request: " + request );
boolean keepAlive = isKeepAlive( request );
WebResource resource = null;
try {
response.restart();
response.setProtocol( getResponseProtocol( request ) );
resource = getResource( request );
if (resource == null) {
response.setResponse( HttpURLConnection.HTTP_NOT_FOUND, "unable to find " + request.getURI() );
} else {
if (resource.closesConnection()) keepAlive = false;
if (resource.getResponseCode() != HttpURLConnection.HTTP_OK) {
response.setResponse( resource.getResponseCode(), "" );
}
String[] headers = resource.getHeaders();
for (int i = 0; i < headers.length; i++) {
debug( "Server thread sending header: " + headers[i] );
response.addHeader( headers[i] );
}
}
} catch (UnknownMethodException e) {
response.setResponse( HttpURLConnection.HTTP_BAD_METHOD, "unsupported method: " + e.getMethod() );
} catch (Throwable t) {
t.printStackTrace();
response.setResponse( HttpURLConnection.HTTP_INTERNAL_ERROR, t.toString() );
}
try { response.write( resource ); } catch (IOException e) { System.out.println( "*** Failed to send reply: " + e ); }
return keepAlive;
}
private boolean isKeepAlive( HttpRequest request ) {
return request.wantsKeepAlive() && _maxProtocolLevel.equals( "1.1" );
}
private String getResponseProtocol( HttpRequest request ) {
return _maxProtocolLevel.equalsIgnoreCase( "1.1" ) ? request.getProtocol() : "HTTP/1.0";
}
private WebResource getResource( HttpRequest request ) throws IOException {
Object resource = _resources.get( request.getURI() );
if (resource == null) resource = _resources.get( withoutParameters( request.getURI() ) );
if (request.getCommand().equals( "GET" ) && resource instanceof WebResource) {
return (WebResource) resource;
} else if (resource instanceof PseudoServlet) {
return getResource( (PseudoServlet) resource, request );
} else if (request.getURI().endsWith( ".class" )) {
for (Iterator iterator = _classpathDirs.iterator(); iterator.hasNext();) {
String directory = (String) iterator.next();
if (request.getURI().startsWith( directory )) {
String resourceName = request.getURI().substring( directory.length()+1 );
return new WebResource( getClass().getClassLoader().getResourceAsStream( resourceName ), "application/class", 200 );
}
}
return null;
} else if (request.getURI().endsWith( ".zip" ) || request.getURI().endsWith( ".jar" )) {
for (Iterator iterator = _classpathDirs.iterator(); iterator.hasNext();) {
String directory = (String) iterator.next();
if (request.getURI().startsWith( directory )) {
String resourceName = request.getURI().substring( directory.length()+1 );
String classPath = System.getProperty( "java.class.path" );
StringTokenizer st = new StringTokenizer( classPath, ":;," );
while (st.hasMoreTokens()) {
String file = st.nextToken();
if (file.endsWith( resourceName )) {
File f = new File( file );
return new WebResource( new FileInputStream( f ), "application/zip", 200 );
}
}
}
}
return null;
} else {
return null;
}
}
private String withoutParameters( String uri ) {
return uri.indexOf( '?' ) < 0 ? uri : uri.substring( 0, uri.indexOf( '?' ) );
}
private WebResource getResource( PseudoServlet servlet, HttpRequest request ) throws IOException {
servlet.init( request );
return servlet.getResponse( request.getCommand() );
}
private ServerSocket getServerSocket() throws IOException {
synchronized (this) {
if (_serverSocket == null) _serverSocket = ServerSocketFactory.newServerSocket();
}
return _serverSocket;
}
private ServerSocket _serverSocket;
}
class HttpResponseStream {
final private static String CRLF = "\r\n";
void restart() {
_headersWritten = false;
_headers.clear();
_responseCode = HttpURLConnection.HTTP_OK;
_responseText = "OK";
}
void close() throws IOException {
flushHeaders();
_pw.close();
}
HttpResponseStream( OutputStream stream ) {
_stream = stream;
try {
setCharacterSet( "us-ascii" );
} catch (UnsupportedEncodingException e) {
_pw = new PrintWriter( new OutputStreamWriter( _stream ) );
}
}
void setProtocol( String protocol ) {
_protocol = protocol;
}
void setResponse( int responseCode, String responseText ) {
_responseCode = responseCode;
_responseText = responseText;
}
void addHeader( String header ) {
_headers.addElement( header );
}
void write( String contents, String charset ) throws IOException {
flushHeaders();
setCharacterSet( charset );
sendText( contents );
}
void write( WebResource resource ) throws IOException {
flushHeaders();
if (resource != null) resource.writeTo( _stream );
_stream.flush();
}
private void setCharacterSet( String characterSet ) throws UnsupportedEncodingException {
if (_pw != null) _pw.flush();
_pw = new PrintWriter( new OutputStreamWriter( _stream, characterSet ) );
}
private void flushHeaders() {
if (!_headersWritten) {
sendResponse( _responseCode, _responseText );
for (Enumeration e = _headers.elements(); e.hasMoreElements();) {
sendLine( (String) e.nextElement() );
}
sendText( CRLF );
_headersWritten = true;
_pw.flush();
}
}
private void sendResponse( int responseCode, String responseText ) {
sendLine( _protocol + ' ' + responseCode + ' ' + responseText );
}
private void sendLine( String text ) {
sendText( text );
sendText( CRLF );
}
private void sendText( String text ) {
_pw.write( text );
}
private OutputStream _stream;
private PrintWriter _pw;
private Vector _headers = new Vector();
private String _protocol = "HTTP/1.0";
private int _responseCode = HttpURLConnection.HTTP_OK;
private String _responseText = "OK";
private boolean _headersWritten;
}
class ServerSocketFactory {
static private ArrayList _sockets = new ArrayList();
static private int _outstandingSockets;
static private Object _releaseSemaphore = new Object();
static synchronized ServerSocket newServerSocket() throws IOException {
if (_sockets.isEmpty() && _outstandingSockets > PseudoServer.getWaitThreshhold()) {
try { synchronized( _releaseSemaphore) {_releaseSemaphore.wait( PseudoServer.getSocketReleaseWaitTime() ); } } catch (InterruptedException e) {};
}
_outstandingSockets++;
if (!_sockets.isEmpty()) {
return (ServerSocket) _sockets.remove(0);
} else {
ServerSocket serverSocket = new ServerSocket(0);
serverSocket.setSoTimeout( 1000 );
return serverSocket;
}
}
static synchronized void release( ServerSocket serverSocket ) throws IOException {
if (_sockets.size() >= 2 * PseudoServer.getWaitThreshhold()) {
serverSocket.close();
} else {
_sockets.add( serverSocket );
_outstandingSockets--;
synchronized (_releaseSemaphore) { _releaseSemaphore.notify(); }
}
}
}
class RecordingOutputStream extends OutputStream {
private OutputStream _nestedStream;
private PrintStream _log;
public RecordingOutputStream( OutputStream nestedStream, PrintStream log ) {
_nestedStream = nestedStream;
_log = log;
}
public void write( int b ) throws IOException {
_nestedStream.write( b );
_log.println( "sending " + Integer.toHexString( b ) );
}
public void write( byte b[], int offset, int len ) throws IOException {
_nestedStream.write( b, offset, len );
_log.print( "sending" );
for (int i = offset; i < offset+len; i++) {
_log.print( ' ' + Integer.toHexString( b[i] ) );
}
_log.println();
}
}
class RecordingInputStream extends InputStream {
private InputStream _nestedStream;
private PrintStream _log;
public RecordingInputStream( InputStream nestedStream, PrintStream log ) {
_nestedStream = nestedStream;
_log = log;
}
public int read() throws IOException {
int value = _nestedStream.read();
if (value != -1) _log.print( ' ' + Integer.toHexString( value ) );
return value;
}
} |
let _ = require("lodash");
class WidgetResolutionService {
constructor() {
this.resolvers = [];
}
addResolver(resolver) {
this.resolvers.push(resolver);
}
resolve(type) {
var resolved = [];
_.reduce(this.resolvers, (result, resolver) => {
var widget = resolver.resolve(type);
if (!widget) {
return;
} else {
result.push(widget);
}
}, resolved);
// TODO: do something more intelligent here
return resolved[0];
}
}
module.exports = function(mod) {
//mod.provider("WidgetResolutionService", WidgetResolutionService);
};
|
<reponame>xyproto/copy
// progress.go
package main
import (
"fmt"
"sync"
"time"
)
type ProgressKeeper interface {
Register(id string, progress float64, donemsg string)
String() string
Get() float64
Message(id string) string
SetMessage(msg string)
}
type SimpleProgressKeeper struct {
progress map[string]float64
msgs map[string]string
mut *sync.RWMutex
currentID string
}
func NewSimpleProgressKeeper() *SimpleProgressKeeper {
spk := &SimpleProgressKeeper{}
spk.mut = &sync.RWMutex{}
return spk
}
func (spk *SimpleProgressKeeper) Register(id string, progress float64, donemsg string) {
spk.mut.Lock()
if id != spk.currentID {
defer spk.Complete(id, donemsg)
}
spk.progress[spk.currentID] = progress
spk.mut.Unlock()
}
func (spk *SimpleProgressKeeper) String() string {
spk.mut.RLock()
percentage := int(100.0*spk.progress[spk.currentID] + 0.5)
spk.mut.RUnlock()
return fmt.Sprintf("The current progress is %d%%", percentage)
}
func (spk *SimpleProgressKeeper) Get() float64 {
spk.mut.RLock()
progress := spk.progress[spk.currentID]
spk.mut.RUnlock()
return progress
}
func (spk *SimpleProgressKeeper) Complete(id string, msg string) {
// id is done
spk.mut.Lock()
spk.progress[id] = 1.0
spk.msgs[id] = msg
spk.mut.Unlock()
}
func (spk *SimpleProgressKeeper) Message(id string) string {
spk.mut.RLock()
msg := spk.msgs[id]
spk.mut.RUnlock()
return msg
}
func (spk *SimpleProgressKeeper) SetMessage(msg string) {
spk.mut.Lock()
spk.msgs[spk.currentID] = msg
spk.mut.Unlock()
}
// Write can report the progress of an implementation of the ProgressKeeper.
// delay is how long to wait beteween each status update of the progress indicator.
func Write(wg *sync.WaitGroup, p ProgressKeeper, delay time.Duration) {
// endless loop without a timeout, on purpose
for {
if p.Get() >= 1.0 {
fmt.Println(p)
break
} else {
fmt.Println(p)
}
time.Sleep(delay)
}
wg.Done()
}
func back() {
fmt.Print("\b")
}
func startOfLine() {
fmt.Print("\r")
}
// TODO: Think this through to allow multiple file copying operations at once,
// that all report progress.
// Spin can report the progress of an implementation of the ProgressKeeper.
// delay is how long to wait beteween each status update of the progress indicator.
func Spin(wg *sync.WaitGroup, p ProgressKeeper, delay time.Duration) {
spinner := []string{"|", "/", "-", "\\"}
i := 0
// endless loop without a timeout, on purpose
for {
startOfLine()
//if p.Skipped() {
// fmt.Printf("[x] %s", spinner[i], p.Message())
//} else if p.Complete() {
// fmt.Printf("[✓] %s\n", spinner[i], p.Message())
//} else {
id := "?"
fmt.Printf("[%s] %s", spinner[i], p.Message(id))
//}
i++
if i == len(spinner) {
i = 0
}
if p.Get() >= 1.0 {
break
}
time.Sleep(delay)
}
fmt.Println()
fmt.Printf("[✓] done")
fmt.Println(" ")
wg.Done()
}
|
#!/bin/bash
# Setup default tool path. All tools will be located here.
PROJ_ROOT="$HOME/binkit/"
TOOL_PATH="$HOME/tools/"
export PROJ_ROOT TOOL_PATH
mkdir -p "$TOOL_PATH"
CTNG_BIN="$TOOL_PATH/crosstool-ng/ct-ng"
CTNG_PATH="$TOOL_PATH/crosstool-ng"
CTNG_CONF_PATH="$PROJ_ROOT/ctng_conf"
CTNG_TARBALL_PATH="$TOOL_PATH/ctng_tarballs"
export CTNG_CONF_PATH CTNG_BIN CTNG_PATH CTNG_TARBALL_PATH
mkdir -p "$CTNG_TARBALL_PATH"
NUM_JOBS=8
MAX_JOBS=8
export NUM_JOBS MAX_JOBS
|
<gh_stars>0
package com.github.masiuchi.mtdataapi;
public class BasicAuth {
public String username = "";
public String password = "";
public boolean isSet() {
return username != null && !username.equals("")
&& password != null && !password.equals("");
}
}
|
export * from './useChild'
export * from './useStyle'
|
<gh_stars>10-100
/*
* Copyright 2019 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.smallrye.jwt.build;
import static org.junit.Assert.assertThrows;
import java.security.Key;
import org.jose4j.jwe.JsonWebEncryption;
import org.jose4j.jwt.JwtClaims;
import org.junit.Assert;
import org.junit.Test;
import io.smallrye.jwt.algorithm.KeyEncryptionAlgorithm;
import io.smallrye.jwt.util.KeyUtils;
public class JwtEncryptJwkTest {
@Test
public void testEncryptA256KW() throws Exception {
String jwt = Jwt.preferredUserName("alice").jwe().encrypt("/privateKey.jwk");
JsonWebEncryption jwe = getJsonWebEncryption(jwt, readSecretKey("/privateKey.jwk"));
Assert.assertEquals("secretkey1", jwe.getHeader("kid"));
// A256KW is a default value
Assert.assertEquals("A256KW", jwe.getHeader("alg"));
JwtClaims claims = JwtClaims.parse(jwe.getPayload());
Assert.assertEquals("alice", claims.getClaimValue("preferred_username"));
}
@Test
public void testEncryptA128KW() throws Exception {
String jwt = Jwt.preferredUserName("alice").jwe().encrypt("/privateKeyA128KW.jwk");
JsonWebEncryption jwe = getJsonWebEncryption(jwt,
readSecretKey("/privateKeyA128KW.jwk", KeyEncryptionAlgorithm.A128KW));
Assert.assertEquals("secretkey3", jwe.getHeader("kid"));
Assert.assertEquals("A128KW", jwe.getHeader("alg"));
JwtClaims claims = JwtClaims.parse(jwe.getPayload());
Assert.assertEquals("alice", claims.getClaimValue("preferred_username"));
}
@Test
public void testAlgorithmMismatch() throws Exception {
assertThrows("JwtEncryptionException is expected", JwtEncryptionException.class,
() -> Jwt.preferredUserName("alice").jwe().keyAlgorithm(KeyEncryptionAlgorithm.A256KW)
.encrypt("/privateKeyA128KW.jwk"));
}
@Test
public void testEncryptJwkSetNoConfiguredKid() throws Exception {
assertThrows("JwtEncryptionException is expected", JwtEncryptionException.class,
() -> Jwt.preferredUserName("alice").jwe().encrypt("/privateEncryptionKeys.jwks"));
}
@Test
public void testSignJwkSetWithKid() throws Exception {
String jwt = Jwt.preferredUserName("alice").jwe().keyId("secretkey1").encrypt("/privateEncryptionKeys.jwks");
JsonWebEncryption jwe = getJsonWebEncryption(jwt, readSecretKey("/privateKey.jwk"));
Assert.assertEquals("secretkey1", jwe.getHeader("kid"));
// A256KW is a default value
Assert.assertEquals("A256KW", jwe.getHeader("alg"));
JwtClaims claims = JwtClaims.parse(jwe.getPayload());
Assert.assertEquals("alice", claims.getClaimValue("preferred_username"));
}
@Test
public void testSignJwkSetWithConfiguredKid() throws Exception {
JwtBuildConfigSource configSource = JwtSignTest.getConfigSource();
try {
configSource.setEncryptonKeyId("secretkey3");
String jwt = Jwt.preferredUserName("alice").jwe().encrypt("/privateEncryptionKeys.jwks");
JsonWebEncryption jwe = getJsonWebEncryption(jwt,
readSecretKey("/privateKeyA128KW.jwk", KeyEncryptionAlgorithm.A128KW));
Assert.assertEquals("secretkey3", jwe.getHeader("kid"));
Assert.assertEquals("A128KW", jwe.getHeader("alg"));
JwtClaims claims = JwtClaims.parse(jwe.getPayload());
Assert.assertEquals("alice", claims.getClaimValue("preferred_username"));
} finally {
configSource.setEncryptonKeyId(null);
}
}
private Key readSecretKey(String keyLocation) throws Exception {
return readSecretKey(keyLocation, KeyEncryptionAlgorithm.A256KW);
}
private Key readSecretKey(String keyLocation, KeyEncryptionAlgorithm keyAlg) throws Exception {
return KeyUtils.readEncryptionKey(keyLocation, null, keyAlg);
}
private static JsonWebEncryption getJsonWebEncryption(String compactJwe, Key decryptionKey) throws Exception {
JsonWebEncryption jwe = new JsonWebEncryption();
jwe.setCompactSerialization(compactJwe);
jwe.setKey(decryptionKey);
return jwe;
}
}
|
import React, { Component } from 'react';
import { MdDelete } from "react-icons/md";
import { MdAddCircle , MdDelete } from "react-icons/md";
import {FaMinusCircle} from "react-icons/fa";
class Cartitem extends Component {
state={item:this.props.it}
item = this.props.it;
render() {
return (
<tr >
<td>
<button style={{border:'none' , backgroundColor: "inherit" }} onClick={() =>{
this.props.deleteFromCart(this.props.index)}}><MdDelete/></button>
</td>
<td>
{this.state.item.price}
</td>
<td>
<FaMinusCircle size="13px" color="red"/>{this.state.item.quantity}<MdAddCircle color="green"/>
</td>
<td>
{this.state.item.price}
</td>
<td>
{this.state.item.name}
</td>
<td>
<img style={{height:"35px"}} alt={this.state.item.name} src={"." + this.state.item.imgurl}></img>
</td>
</tr>
);
}
}
export default Cartitem; |
#!/usr/bin/env bash
rm -vfr $HOME/Downloads/doublecmd*
wget https://github.com/doublecmd/doublecmd/releases/download/v1.0.2/doublecmd-1.0.2.gtk2.x86_64.tar.xz -O $HOME/Downloads/doublecmd.tar.xz
unxz -vf $HOME/Downloads/doublecmd.tar.xz
tar -xvf $HOME/Downloads/doublecmd.tar -C $HOME/Downloads
rm -vfr $HOME/.local/share/doublecmd
mv -vf $HOME/Downloads/doublecmd $HOME/.local/share
rm -vf $HOME/.local/bin/doublecmd
rm -vfr $HOME/Downloads/doublecmd*
rm -vf $HOME/.local/share/icons/hicolor/512x512/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/512x512/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/512/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/512x512/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/256x256/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/256x256/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/256/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/256x256/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/128x128/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/128x128/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/128/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/128x128/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/96x96/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/96x96/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/96/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/96x96/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/64x64/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/64x64/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/64/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/64x64/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/48x48/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/48x48/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/48/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/48x48/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/32x32/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/32x32/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/32/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/32x32/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/24x24/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/24x24/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/24/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/24x24/apps/doublecmd.png
rm -vf $HOME/.local/share/icons/hicolor/16x16/apps/doublecmd.png
mkdir -vp $HOME/.local/share/icons/hicolor/16x16/apps/
wget https://icons.iconarchive.com/icons/papirus-team/papirus-apps/16/doublecmd-icon.png -O $HOME/.local/share/icons/hicolor/16x16/apps/doublecmd.png
rm -vf $HOME/.local/share/applications/doublecmd.desktop
touch $HOME/.local/share/applications/doublecmd.desktop
echo "[Desktop Entry]
Name=Double Commander
Comment=Double Commander is a cross platform open source file manager with two panels side by side.
Terminal=false
Icon=doublecmd
Exec=$HOME/.local/share/doublecmd/doublecmd.sh
Type=Application
MimeType=inode/directory;
Categories=Utility;FileTools;FileManager;
Keywords=folder;manager;explore;disk;filesystem;orthodox;copy;queue;queuing;operations;" >> $HOME/.local/share/applications/doublecmd.desktop
echo "doublecmd"
rm -vf $HOME/.local/share/doublecmd/doublecmd.inf
rm -vrf $HOME/.config/doublecmd
mkdir -vp $HOME/.config/doublecmd
cp -vfr $HOME/proj/dotfiles/config/doublecmd $HOME/.config
|
# frozen_string_literal: true
module Kruger
class Client
class Response
attr_reader :response
def initialize(response = nil)
@response = response
end
def body
response.parsed_response
end
def success?
response.success?
end
def status
response.code
end
end
end
end
|
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.mounted = exports.methods = exports.watch = exports.data = exports.computed = exports.props = void 0;
const clickgo = require("clickgo");
exports.props = {
'disabled': {
'default': false
},
'readonly': {
'default': false
},
'modelValue': {
'default': ''
},
'language': {
'default': undefined
},
'theme': {
'default': undefined
},
'files': {
'default': {}
}
};
exports.computed = {
'isDisabled': function () {
return clickgo.tool.getBoolean(this.disabled);
},
'isReadonly': function () {
return clickgo.tool.getBoolean(this.readonly);
},
'showMask': function () {
return clickgo.dom.is.move;
},
'filesComp': function () {
const list = [];
for (const path in this.files) {
list.push({
'content': this.files[path],
'filePath': path
});
}
return list;
}
};
exports.data = {
'notInit': false,
'localeData': {
'en': {
'copy': 'Copy',
'cut': 'Cut',
'paste': 'Paste'
},
'sc': {
'copy': '复制',
'cut': '剪下',
'paste': '粘上'
},
'tc': {
'copy': '複製',
'cut': '剪貼',
'paste': '貼上'
},
'ja': {
'copy': 'コピー',
'cut': '切り取り',
'paste': '貼り付け'
}
}
};
exports.watch = {
'isReadonly': function () {
if (!this.monacoInstance) {
return;
}
this.monacoInstance.updateOptions({
'readOnly': this.isDisabled ? true : this.isReadonly
});
},
'isDisabled': function () {
if (!this.monacoInstance) {
return;
}
this.monacoInstance.updateOptions({
'readOnly': this.isDisabled ? true : this.isReadonly
});
},
'modelValue': function () {
if (!this.monacoInstance) {
return;
}
if (this.modelValue === this.monacoInstance.getValue()) {
return;
}
this.monacoInstance.setValue(this.modelValue);
},
'language': function () {
if (!this.monacoInstance) {
return;
}
this.monaco.editor.setModelLanguage(this.monacoInstance.getModel(), this.language.toLowerCase());
},
'filesComp': function () {
if (!this.monacoInstance) {
return;
}
this.monaco.languages.typescript.typescriptDefaults.setExtraLibs(this.filesComp);
},
'theme': function () {
if (!this.monacoInstance) {
return;
}
this.monaco.editor.setTheme(this.theme);
}
};
exports.methods = {
execCmd: function (ac) {
return __awaiter(this, void 0, void 0, function* () {
switch (ac) {
case 'copy': {
clickgo.tool.execCommand(ac);
break;
}
case 'cut': {
clickgo.tool.execCommand('copy');
const selection = this.monacoInstance.getSelection();
this.monacoInstance.executeEdits('', [
{
range: new this.monaco.Range(selection.startLineNumber, selection.startColumn, selection.endLineNumber, selection.endColumn),
text: ''
}
]);
break;
}
case 'paste': {
const str = yield navigator.clipboard.readText();
const selection = this.monacoInstance.getSelection();
this.monacoInstance.executeEdits('', [
{
range: new this.monaco.Range(selection.startLineNumber, selection.startColumn, selection.endLineNumber, selection.endColumn),
text: str
}
]);
break;
}
}
});
}
};
const mounted = function () {
const iframeEl = this.$refs.iframe;
if (!iframeEl.contentWindow) {
return;
}
const iwindow = iframeEl.contentWindow;
const idoc = iwindow.document;
idoc.body.style.margin = '0';
const monacoEl = idoc.createElement('div');
monacoEl.id = 'monaco';
monacoEl.style.height = '100%';
idoc.body.append(monacoEl);
const monaco = clickgo.core.getModule('monaco');
if (monaco) {
const loaderEl = idoc.createElement('script');
loaderEl.addEventListener('load', () => {
iwindow.require.config({
paths: {
'vs': clickgo.getCdn() + '/npm/monaco-editor@0.29.1/min/vs'
}
});
const proxy = iwindow.URL.createObjectURL(new Blob([`
self.MonacoEnvironment = {
baseUrl: '${clickgo.getCdn()}/npm/monaco-editor@0.29.1/min/'
};
importScripts('${clickgo.getCdn()}/npm/monaco-editor@0.29.1/min/vs/base/worker/workerMain.js');
`], { type: 'text/javascript' }));
iwindow.MonacoEnvironment = {
getWorkerUrl: () => proxy
};
iwindow.require(['vs/editor/editor.main'], (monaco) => {
this.monaco = monaco;
this.monacoInstance = this.monaco.editor.create(monacoEl, {
'language': this.language.toLowerCase(),
'value': this.modelValue,
'contextmenu': false,
'minimap': {
'enabled': false
},
'readOnly': this.readonly
});
clickgo.dom.watchSize(this.$refs.iframe, () => {
this.monacoInstance.layout();
});
this.monacoInstance.getModel().onDidChangeContent(() => {
this.$emit('update:modelValue', this.monacoInstance.getValue());
});
monaco.languages.typescript.typescriptDefaults.setExtraLibs(this.filesComp);
if (this.theme) {
this.monaco.editor.setTheme(this.theme);
}
if (navigator.clipboard) {
monacoEl.addEventListener('contextmenu', (e) => {
e.preventDefault();
if (clickgo.dom.hasTouchButMouse(e)) {
return;
}
const rect = this.$el.getBoundingClientRect();
clickgo.form.showPop(this.$el, this.$refs.pop, {
'x': rect.left + e.clientX,
'y': rect.top + e.clientY
});
});
}
const down = (e) => {
if (clickgo.dom.hasTouchButMouse(e)) {
return;
}
if (e instanceof TouchEvent) {
clickgo.dom.bindLong(e, () => {
clickgo.form.showPop(this.$el, this.$refs.pop, e);
});
}
clickgo.form.changeFocus(this.formId);
clickgo.form.hidePop();
};
monacoEl.addEventListener('mousedown', down);
monacoEl.addEventListener('touchstart', down);
this.$emit('init', this.monacoInstance);
});
});
loaderEl.src = monaco;
idoc.head.append(loaderEl);
}
else {
this.notInit = true;
}
};
exports.mounted = mounted;
|
package com.stellmangreene.pbprdf.model;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface OntologySubClassOf {
public String[] subClassOf();
}
|
#! /bin/sh
git init
git config --local include.path '../.gitconfig'
git remote rm origin
git remote add origin https://github.com/chris-mueller/musik-schulbuecher.git
|
<filename>node_modules/phaser/src/physics/arcade/tilemap/TileCheckY.js<gh_stars>1-10
/**
* @author <NAME> <<EMAIL>>
* @copyright 2020 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
var ProcessTileSeparationY = require('./ProcessTileSeparationY');
/**
* Check the body against the given tile on the Y axis.
* Used internally by the SeparateTile function.
*
* @function Phaser.Physics.Arcade.Tilemap.TileCheckY
* @since 3.0.0
*
* @param {Phaser.Physics.Arcade.Body} body - The Body object to separate.
* @param {Phaser.Tilemaps.Tile} tile - The tile to check.
* @param {number} tileTop - The top position of the tile within the tile world.
* @param {number} tileBottom - The bottom position of the tile within the tile world.
* @param {number} tileBias - The tile bias value. Populated by the `World.TILE_BIAS` constant.
* @param {boolean} isLayer - Is this check coming from a TilemapLayer or an array of tiles?
*
* @return {number} The amount of separation that occurred.
*/
var TileCheckY = function (body, tile, tileTop, tileBottom, tileBias, isLayer)
{
var oy = 0;
var faceTop = tile.faceTop;
var faceBottom = tile.faceBottom;
var collideUp = tile.collideUp;
var collideDown = tile.collideDown;
if (!isLayer)
{
faceTop = true;
faceBottom = true;
collideUp = true;
collideDown = true;
}
if (body.deltaY() < 0 && collideDown && body.checkCollision.up)
{
// Body is moving UP
if (faceBottom && body.y < tileBottom)
{
oy = body.y - tileBottom;
if (oy < -tileBias)
{
oy = 0;
}
}
}
else if (body.deltaY() > 0 && collideUp && body.checkCollision.down)
{
// Body is moving DOWN
if (faceTop && body.bottom > tileTop)
{
oy = body.bottom - tileTop;
if (oy > tileBias)
{
oy = 0;
}
}
}
if (oy !== 0)
{
if (body.customSeparateY)
{
body.overlapY = oy;
}
else
{
ProcessTileSeparationY(body, oy);
}
}
return oy;
};
module.exports = TileCheckY;
|
# module_name="Utilities"
# module_about="Misc. utilities."
# module_version=1
# module_image="magnet.png"
# copyright_notice="Copyright 2019 Arclogic Software"
_g_utlTestFile="${arcTmpDir}/utl$$.test"
_g_utlZipLastFilePath=
function __readmeUtilities {
cat <<EOF
> Always code as if the guy who ends up maintaining your code will be a violent psychopath who knows where you live. -- Rick Osborne
# Utilities
**Misc. utilities.**
There are a number of general utilities here which do useful things and don't have a better place to go.
EOF
}
function test_file_setup {
touch "${_g_utlTestFile}"
echo "${_g_utlTestFile}" | assert -f
}
function test_function_setup {
:
}
function utl_return_matching_loaded_functions {
# Return the list of matching function names from the current environment.
# >>> utl_return_matching_loaded_functions ["regex"]
# regex: Functions matching the regular expression are returned.
${arcRequireBoundVariables}
typeset regex
regex="${1:-".*"}"
if boot_is_valid_bash; then
declare -F | grep "^${regex}" | egrep -v "_grub" | cut -d" " -f3 | sort
else
# Assumes ksh.
typeset +f | grep "${regex}" | egrep -v "_grub" | sort
fi
}
function test_utl_return_matching_loaded_functions {
:
}
function utl_confirm {
# Return true if use response with a "truthy" value.
# utl_confirm
# __utl_confirm_skip: If this variable is set to 1 confirmations are skipped.
${arcRequireBoundVariables}
typeset x
(( ${__utl_confirm_skip:-0} )) && ${returnTrue}
printf "Please confirm (y/n): "
read x
! is_tty_device && ${returnTrue}
if is_truthy "${x:-0}"; then
${returnTrue}
else
${returnFalse}
fi
}
function utl_format_tags {
# Formats the list of tags per standard ArcShell rules for tags.
# >>> utl_format_tags "tags"
# tags: A list of tags.
utl_format_single_item_list "$*" | str_to_lower_case -stdin
}
function test_utl_format_tags {
utl_format_tags "t, k,a d ,f" | assert "t,k,a,d,f"
utl_format_tags "g ,a f t, #hello" | assert "g,a,f,t,#hello"
utl_format_tags "Hi! HOWDY!" | assert "hi!,howdy!"
utl_format_tags "#g #h #j #3 #4" | assert "#g,#h,#j,#3,#4"
}
function utl_format_single_item_list {
# Turns a list with commas or spaces into a single list with commas.
# >>> utl_format_single_item_list "tags"
# tags: A list of tags.
echo "$*" | tr ' ' ',' | str_split_line -stdin "," | \
utl_remove_blank_lines -stdin | str_to_csv ","
}
function test_utl_format_single_item_list {
utl_format_single_item_list "t, k,a d ,f" | assert "t,k,a,d,f"
utl_format_single_item_list "g ,a f t, #hello" | assert "g,a,f,t,#hello"
utl_format_single_item_list "Hi! HOWDY!" | assert "Hi!,HOWDY!"
utl_format_single_item_list "#g #h #j #3 #4" | assert "#g,#h,#j,#3,#4"
}
function utl_get_function_body {
# Returns the function body. Removes first 3 characters which should be spaces.
# >>> utl_get_function_body "file_path" "func_name"
# file_path: Path to file.
# func_name: Name of function.
${arcRequireBoundVariables}
typeset file_path func_name start lines x
file_path="${1}"
func_name="${2}"
start=$(grep -n "^function ${func_name} " "${file_path}" | cut -d":" -f1)
((start=start+1))
lines=$(wc -l "${file_path}" | cut -d" " -f1)
if [[ -n "${start}" && -n "${lines}" ]] && (( ${start} > 1 )); then
echo ""
while IFS= read -r x; do
if [[ "${x:0:1}" == "}" ]]; then
break
fi
echo "${x}"
done < <(sed -n "${start},${lines}p" "${file_path}")
${returnTrue}
else
${returnFalse}
fi
}
function utl_get_function_def {
# Returns a function definition from a file.
# >>> utl_get_function_def "file_path" "func_name"
# file_path: Path to file.
# func_name: Name of function.
${arcRequireBoundVariables}
typeset file_path func_name start lines x
file_path="${1}"
func_name="${2}"
start=$(grep -n "^function ${func_name} " "${file_path}" | cut -d":" -f1)
lines=$(wc -l "${file_path}" | cut -d" " -f1)
echo ""
if [[ -n "${start}" && -n "${lines}" ]]; then
while IFS= read -r x; do
echo "${x}"
if [[ "${x:0:1}" == "}" ]]; then
break
fi
done < <(sed -n "${start},${lines}p" "${file_path}")
fi
}
function utl_get_function_doc {
# Returns the function documentation from a file.
# >>> utl_get_function_doc "file_path" "func_name"
# file_path: Path to file.
# func_name: Name of function.
${arcRequireBoundVariables}
typeset file_path func_name line_no started line
file_path="${1}"
func_name="${2}"
line_no=0
started=0
while read line; do
((line_no=line_no+1))
if (( ${line_no} == 2 )) && [[ "${line:0:1}" == "#" ]]; then
started=1
fi
if [[ "${line:0:1}" == "#" ]] && (( ${started} )); then
echo "${line}"
fi
if [[ "${line:0:1}" != "#" ]] && (( ${started} )); then
started=0
fi
done < <(utl_get_function_def "${file_path}" "${func_name}" | utl_remove_blank_lines -stdin)
}
function utl_inspect_model_definition {
#
# >>> utl_inspect_model_definition "model_definition" "actual_definition"
${arcRequireBoundVariables}
typeset model_definition actual_definition var
model_definition="${1}"
actual_definition="${2}"
while read var; do
if (( $(echo "${model_definition}" | grep "^${var}=" | wc -l) == 0 )); then
echo "Make sure '${var}' isn't a typo."
fi
done < <(echo "${actual_definition:-}" | _utlReturnPossibleVars)
}
function test_utl_inspect_model_definition {
m="
foo=
not=
"
d="
foo='bar'
zim='zab'"
utl_inspect_model_definition "${m}" "${d}" | assert_match "zim.*typo" "Inspection should provide warning for zim."
d="
not=1
if (( 1 == 2 )); then
zim=0
fi
"
utl_inspect_model_definition "${m}" "${d}" | assert_match "zim.*typo" "Inspection should provide warning for zim."
}
function _utlReturnPossibleVars {
# Return things which look like variables from ```stdin```.
# >>> _utlReturnPossibleVars
cat | str_trim_line -stdin | egrep "^[A-Z|a-z|_]*=" | awk -F"=" '{print $1}'
}
function _sshInspectNodeDefinition {
# Return warnings for variables which are not found in the model.
# >>> _sshInspectNodeDefinition "node" "definition""
${arcRequireBoundVariables}
typeset node_name node_definition var x
node_name="${1}"
node_definition="${2:-}"
debug0 "Inspecting definition for '${node_name}'..."
while read var; do
if (( $(_sshNodeModel | grep "^${var}=" | wc -l) == 0 )); then
echo "Make sure '${var}' isn't a typo."
fi
done < <(echo "${node_definition:-}" | _sshReturnPossibleVars)
}
function utl_add_dirs_to_unix_path {
# Adds a bunch of values to the current path string if they don't exist and returns the new string.
# >>> utl_add_dirs_to_unix_path "path" "path" "path"
# path: One or more values you would like to add to the path.
${arcRequireBoundVariables}
tmpFile="$(mktempf)"
# The awk command here enables us to get a unique list and also maintain the order or that list..
echo "${PATH:-}" | str_split_line ":" | str_uniq -stdin > "${tmpFile}"
while (( $# > 0 )); do
if (( $(grep "${1}" "${tmpFile}" | wc -l) == 0 )); then
echo "${1}" >> "${tmpFile}"
fi
shift
done
cat "${tmpFile}" | str_to_csv ":"
rm "${tmpFile}"
}
function test_utl_add_dirs_to_unix_path {
:
}
function utl_zip_file {
# Zip a file using gzip or compress depending on which program is available.
# >>> utl_zip_file "file"
${arcRequireBoundVariables}
debug2 "utl_zip_file: $*"
typeset filePath fileEnding zipProgram
_g_utlZipLastFilePath=
filePath="${1}"
file_raise_file_not_found "${filePath}" && ${returnFalse}
if boot_is_program_found "gzip"; then
fileEnding=".gz"
zipProgram="gzip"
elif boot_is_program_found "compress"; then
fileEnding=".Z"
zipProgram="compress"
else
_utlThrowError "'gzip' and 'compress' not found: $*: utl_zip_file"
${returnFalse}
fi
if [[ -f "${filePath}${fileEnding}" ]]; then
rm "${filePath}${fileEnding}"
fi
"${zipProgram}" "${filePath}"
if [[ -f "${filePath}${fileEnding}" ]]; then
_g_utlZipLastFilePath="${filePath}${fileEnding}"
${returnTrue}
else
_utlThrowError "Error zipping file: ${filePath}${fileEnding}: utl_zip_file"
_g_utlZipLastFilePath="${filePath}"
${returnFalse}
fi
}
function test_utl_zip_file {
:
}
function utl_zip_get_last_file_path {
# Return the full path to the last file compressed or zipped.
# utl_zip_get_last_file_path
echo "${_g_utlZipLastFilePath:-}"
}
function test_utl_zip_get_last_file_path {
:
}
function utl_raise_empty_var {
# Throw error and return true if $1 is not set.
# >>> utl_raise_empty_var "error_message" "${check_variable}"
# error_message: The message to display if the second argument is empty/null/undefined.
# check_variable: The variable itself is passed in here.
${arcRequireBoundVariables}
typeset error_message check_variable
error_message="${1}"
check_variable="${2:-}"
if [[ -z "${check_variable:-}" ]]; then
_utlThrowError "${error_message}: utl_raise_empty_var"
${returnTrue}
else
${returnFalse}
fi
}
function test_utl_raise_empty_var {
:
}
function utl_does_file_end_with_newline {
# Return true if the file ends with a new line character.
# >>> utl_does_file_end_with_newline "file"
${arcRequireBoundVariables}
typeset file_name x
file_name="${1}"
if [[ -f "${file_name}" ]]; then
x=$(wc -l < <(tail -1 "${file_name}"))
if (( "${x}" )); then
${returnTrue}
else
${returnFalse}
fi
else
_utlThrowError "File not found: $*: utl_does_file_end_with_newline"
fi
}
function utl_add_missing_newline_to_end_of_file {
# Adds \n to the end of a file if it is missing.
# >>> utl_add_missing_newline_to_end_of_file "file"
${arcRequireBoundVariables}
typeset file_name
file_name="${1}"
if [[ -f "${file_name}" ]]; then
! utl_does_file_end_with_newline "${file_name}" && echo "" >> "${file_name}"
else
_utlThrowError "File not found: $*: utl_add_missing_newline_to_end_of_file"
fi
}
function utl_raise_invalid_option {
# Checks for some common issues when processing command line args.
# >>> utl_raise_invalid_option "function" "(( \$# <= 9 ))" ["\$*"]
# function: A string to identify the source of the call, usually the function name.
# (( \$# <= 9 )): How many args should there be? If false throw error.
# \$*: Argument list. If next arg starts is -something throw an error.
${arcRequireBoundVariables}
typeset function_name arg_list arg_assertion
function_name="${1}"
arg_assertion="${2}"
arg_list="${3:-}"
if [[ "${arg_list:0:1}" == "-" ]]; then
_utlThrowError "Next argument in list appears to be invalid: $*: ${function_name}"
${returnTrue}
elif ! eval "${arg_assertion}"; then
_utlThrowError "The number of remaining arguments appears to be incorrect: $*: ${function_name}"
${returnTrue}
else
${returnFalse}
fi
}
function test_utl_raise_invalid_option {
utl_raise_invalid_option "foo" "(( $# <= 0 ))" "bluff" && fail_test "Valid options should return false." || pass_test
utl_raise_invalid_option "foo" "(( 5 <= 0 ))" "bluff" 2> /dev/null && pass_test || fail_test "Invalid arg count should return true."
utl_raise_invalid_option "foo" "(( 0 <= 0 ))" "-bluff" 2> /dev/null && pass_test || fail_test "Should have returned true if next remaining arg started with a -."
}
function utl_raise_invalid_arg_option {
# Raise and error and return true if the provided arg begins with a dash.
# >>> utl_raise_invalid_arg_option "errorText" "\$*"
# errorText: Error string to include in general error message.
${arcRequireBoundVariables}
typeset argList errorText
errorText="${1}"
argList="${2}"
if [[ "${argList:0:1}" == "-" ]]; then
_utlThrowError "Named arg not recognized: $*: ${errorText}"
${returnTrue}
else
${returnFalse}
fi
}
function test_utl_raise_invalid_arg_option {
utl_raise_invalid_arg_option "test_function" "foo" | assert -l 0
! utl_raise_invalid_arg_option "test_function" "foo" && pass_test || fail_test
utl_raise_invalid_arg_option "test_function" "-foo" 2>&1 | assert_match "ERROR"
utl_raise_invalid_arg_option "test_function" "-foo" 2> /dev/null && pass_test || fail_test
}
function utl_raise_invalid_arg_count {
# Throw error and return true if expression passed into the function is not true.
# >>> utl_raise_invalid_arg_count "errorText" "(( \$# == X ))"
# errorText: Error string to include in general error message.
# (( \$# == X )): Test the number of args remaining. If this is not true an error is raised.
${arcRequireBoundVariables}
typeset testExpression
errorText="${1}"
testExpression="${2}"
if eval "${2}"; then
${returnFalse}
else
_utlThrowError "Argument count is incorrect: $*: ${errorText:-}"
${returnTrue}
fi
}
function test_utl_raise_invalid_arg_count {
utl_raise_invalid_arg_count "test_function" "(( 1 == 1 ))" | assert -l 0
! utl_raise_invalid_arg_count "test_function" "(( 1 == 1 ))" && pass_test || fail_test
utl_raise_invalid_arg_count "test_function" "(( 2 == 1 ))" 2>&1 | assert_match "ERROR"
utl_raise_invalid_arg_count "test_function" "(( 2 == 1 ))" 2> /dev/null && pass_test || fail_test
}
function utl_raise_dir_not_found {
# Throw error and return true if the provided directory is not found or executable bit is not set.
# >>> utl_raise_dir_not_found "directory"
${arcRequireBoundVariables}
typeset dirPath
dirPath="${1}"
if [[ -d "${dirPath}" && -x "${dirPath}" ]]; then
${returnFalse}
else
_utlThrowError "Directory not found or executable bit is not set: $*: utl_raise_dir_not_found"
${returnTrue}
fi
}
function test_utl_raise_dir_not_found {
! utl_raise_dir_not_found "/tmp" && pass_test || fail_test
utl_raise_dir_not_found "/tmp_not" 2> /dev/null && pass_test || fail_test
rm -rf "/tmp/test$$" 2> /dev/null
mkdir "/tmp/test$$"
! utl_raise_dir_not_found "/tmp/test$$" && pass_test || fail_test
chmod 600 "/tmp/test$$"
utl_raise_dir_not_found "/tmp/test$$" 2>&1 | assert_match "ERROR"
chmod 700 "/tmp/test$$"
rm -rf "/tmp/test$$"
}
function utl_set_version {
# >>> utl_set_version "name" version
# name: A simple string identifying the object to set the version for.
# version: Must be a number.
${arcRequireBoundVariables}
typeset objectName objectVersion storedVersion
objectName="${1}"
objectVersion=${2}
if num_is_num "${objectVersion}"; then
storedVersion=$(utl_get_version "${objectName}")
cache_save -group "objectVersions" "${objectName}" ${objectVersion}
if (( ${storedVersion} != ${objectVersion} )); then
debug1 "'${objectName}' set to version ${objectVersion}."
fi
else
_utlThrowError "Version must be a number: $*: utl_set_version"
fi
}
function utl_get_version {
# Return the version number for an object. 0 is returned if the object is not found.
# >>> utl_get_version "name"
${arcRequireBoundVariables}
typeset objectName objectVersion v
objectName="${1}"
v=$(cache_get -default 0 -group "objectVersions" "${objectName}")
echo ${v}
}
function utl_remove_trailing_blank_lines {
# Remove trailing blank lines from a file or input stream.
# >>> utl_remove_trailing_blank_lines ["file"]
# file: Optional file name, otherwise expects input stream from standard input.
#
# **Example**
# ```
# $ (
# > cat <<EOF
# >
# > A
# > B
# >
# > EOF
# > ) | utl_remove_trailing_blank_lines
#
# A
# B
#
# ```
${arcRequireBoundVariables}
debug3 "utl_remove_trailing_blank_lines: $*"
if [[ -n "${1:-}" && -f "${1:-}" ]]; then
cat "${1}" | utl_remove_trailing_blank_lines
else
str_reverse_cat | str_remove_leading_blank_lines | str_reverse_cat
fi
}
function test_utl_remove_trailing_blank_lines {
(
cat <<EOF
a
1
z
2
EOF
) > "${_g_utlTestFile}"
#debug_start 3
utl_remove_trailing_blank_lines "${_g_utlTestFile}" | tail -1 | assert 2
#debug_dump
#debug_start 3
cat "${_g_utlTestFile}" | utl_remove_trailing_blank_lines | tail -1 | assert 2
#debug_dump
cat "${_g_utlTestFile}" | utl_remove_blank_lines -stdin | utl_remove_trailing_blank_lines | tail -1 | assert 2
}
function utl_first_unblank_line {
# Return the first unblank line in a file or input stream.
# >>> utl_first_unblank_line ["file"]
# file: Optional file name, otherwise expects input stream from standard input.
debug2 "utl_first_unblank_line: $*"
${arcRequireBoundVariables}
if [[ -n "${1:-}" ]]; then
cat "${1}"| utl_first_unblank_line
else
utl_remove_blank_lines -stdin | head -1
fi
}
function test_utl_first_unblank_line {
(
cat <<EOF
LINE2
LINE3
EOF
) | utl_first_unblank_line | assert "LINE2"
}
function utl_remove_blank_lines {
# Removes blank lines from a file or input stream.
# >>> utl_remove_blank_lines [-stdin|"file_path"]
# -stdin: Reads input from standard in.
# file_path: Path to file.
#
# **Example**
# ```bash
# cat /tmp/example.txt | utl_remove_blank_lines -stdin
# ```
${arcRequireBoundVariables}
if [[ "${1:-}" == "-stdin" ]]; then
egrep -v "^ *$|^$"
else
cat "${1}" | utl_remove_blank_lines -stdin
fi
}
function test_utl_remove_blank_lines {
(
cat <<EOF
FOO
FOO
EOF
) | utl_remove_blank_lines -stdin | wc -l | assert 2
}
function utl_found_in_path_def {
# Return true if value is not defined as part of ${PATH}.
# >>> utl_found_in_path_def "directory")
${arcRequireBoundVariables}
typeset directoryPath
directoryPath="${1}"
if (( $(echo "${PATH}" | str_split_line ":" | grep "${directoryPath}" | wc -l) )); then
${returnTrue}
else
${returnFalse}
fi
}
function test_utl_found_in_path_def {
originalPath="${PATH}"
PATH="${originalPath}:/tmp/foo:.:"
! utl_found_in_path_def "/tmp/zoo" && pass_test || fail_test
utl_found_in_path_def "/tmp/foo" && pass_test || fail_test
utl_found_in_path_def "." && pass_test || fail_test
PATH="${originalPath}"
}
function is_not_defined {
# Return true if provided variable is not defined.
# >>> is_not_defined "variable"
# variable: Variable to check.
#
# **Example**
# ```
# $ foo=
# $ is_not_defined "${foo}" && echo "OK" || echo "Not Defined"
# OK
# ```
${arcRequireBoundVariables}
debug3 "is_not_defined: $*"
if [[ -z "${1:-}" ]]; then
${returnTrue}
else
${returnFalse}
fi
}
function test_is_not_defined {
X=
is_not_defined "${X}" && pass_test || fail_test
X=0
! is_not_defined "${X}" && pass_test || fail_test
}
function is_defined {
# Return true if provided variable is defined.
# >>> is_defined "X"
# X: Variable to check.
#
# **Example**
# ```
# $ foo=
# $ is_defined "${foo}" && echo "OK" || echo "Not Defined"
# Not Defined
# ```
${arcRequireBoundVariables}
if [[ -n "${1:-}" ]]; then
${returnTrue}
else
${returnFalse}
fi
}
function test_is_defined {
X=0
is_defined "${X}" && pass_test || fail_test
X=
! is_defined "${X}" && pass_test || fail_test
}
function get_shell_type {
# Determine if current shell is bash or ksh.
# >>> get_shell_type
${arcRequireBoundVariables}
[[ -n "${BASH:-}" ]] && echo "bash" || echo "ksh"
}
function test_get_shell_type {
get_shell_type | egrep "ksh|bash" | assert -l 1
}
function is_linux {
# Return true if current OS is Linux.
# >>> is_linux
${arcRequireBoundVariables}
if [[ $(uname -s | str_to_upper_case -stdin) == "LINUX" ]]; then
${returnTrue}
else
${returnFalse}
fi
}
function test_is_linux {
if [[ "${arcOSType}" == "LINUX" ]]; then
is_linux && pass_test || fail_test
fi
}
function is_email_address {
# Return true if provided string contains an @ and is therefore likely an email address.
# >>> is_email_address "emailAddressStringToCheck"
${arcRequireBoundVariables}
if (( $(str_instr "@" "${1}") > 0 )); then
${returnTrue}
else
${returnFalse}
fi
}
function test_is_email_address {
is_email_address "post.ethan@gmail.com" && pass_test || fail_test
! is_email_address "post.ethan" && pass_test || fail_test
}
function utl_to_stderr {
# Write a text string to standard error.
# >>> utl_to_stderr "textString"
${arcRequireBoundVariables}
typeset textString
textString="${1}"
echo "${textString}" 3>&1 1>&2 2>&3
}
function test_utl_to_stderr {
utility_write_stderr "foo" 2>&1 >/dev/null | assert -n
}
function _is_truthy {
# Return true if parameter is truthy, (e.g., 'y', 1, 'yes', true).
# >>> is_truthy "truthyValue"
${arcRequireBoundVariables}
typeset x
x="${1:-}"
case "${x}" in
"y"|"Y"|1|"true"|"True"|"TRUE"|"yes"|"Yes"|"YES")
${returnTrue}
;;
*)
${returnFalse}
;;
esac
}
function is_truthy {
# Return true if value is truthy.
# Truthy values are true cron expressions, 1, y, yes, t, true. Upper or lower-case.
# >>> is_truthy "truthyValue"|"cronExpression"
${arcRequireBoundVariables}
[[ -z "${1:-}" ]] && ${returnFalse}
if (( $(echo "${1:-0}" | wc -w) == 1 )); then
if _is_truthy "${1:-0}"; then
${returnTrue}
else
${returnFalse}
fi
elif [[ -n "${1:-}" ]] && cron_is_true "${1:-}"; then
${returnTrue}
else
${returnFalse}
fi
}
function test_is_truthy {
typeset x
for x in 'y' 1 'yes' 'Y' 'YES' 'Yes' 'TRUE' 'True' 'true' '* * * * *'; do
is_truthy "${x}" && pass_test || fail_test
done
for x in 'n' 0 'no' 'N' 'NO' 'No' 'FALSE' 'False' 'false' '23 0 * * 6'; do
! is_truthy "${x}" && pass_test || fail_test
done
! is_truthy && pass_test || fail_test
}
function mktempf {
# Return path to a newly created temp file.
# >>> mktempf ["string"]
# string: A string which can be used to identify the source of the file.
${arcRequireBoundVariables}
typeset x str tmpDir tmpFile
x=${RANDOM:-0}
if is_defined "${1:-}"; then
str="${1}"
tmpDir="${arcTmpDir}/tmp/$$/${str}"
mkdir -p "${tmpDir}"
tmpFile="${tmpDir}/${x}.tmp"
else
tmpDir="${arcTmpDir}/tmp"
mkdir -p "${tmpDir}"
tmpFile="${tmpDir}/${x}.tmp"
fi
while [[ -f "${tmpFile}" ]]; do
((x=x+1))
tmpFile="${tmpDir}/${x}.tmp"
done
(umask 077 && touch "${tmpFile}")
echo "${tmpFile}"
}
function test_mktempf {
typeset x
rmtempf
x="$(mktempf "foo")"
touch "${x}"
[[ -f "${x}" ]] && pass_test || fail_test
y="$(mktempf "foo")"
touch "${y}"
! [[ "${x}" -ef "${y}" ]] && pass_test || fail_test
ls "${arcTmpDir}/tmp/$$/foo"* | assert -l 2
}
function rmtempf {
# Deletes any temp files this session has created. If ```string``` is provided, deletes are limited to matching files.
# >>> rmtempf "string"
# string: A string to easily identify a group of tmp files.
${arcRequireBoundVariables}
typeset str tmpDir tmpFile
str="${1:-}"
tmpDir="${arcTmpDir}/tmp/$$/${str}"
if $(file_is_dir "${tmpDir}"); then
rm -rf "${tmpDir}"
fi
}
function test_rmtempf {
rmtempf 2>&1 | assert -z
# Should not return an error when no files exist.
rmtempf 2>&1 >/dev/null | assert -z
echo "${arcTmpDir}/tmp/$$" | assert ! -d
x=$(mktempf "unittest")
echo "${arcTmpDir}/tmp/$$" | assert -d
rmtempf
echo "${arcTmpDir}/tmp/$$" | assert ! -d
}
function mktempd {
# Returns the path to a new temporary directory.
# >>> mktempd
${arcRequireBoundVariables}
typeset tmpDir
tmpDir="${arcTmpDir}/tmp/$$_${RANDOM:-0}_$(dt_epoch)"
mkdir -p "${tmpDir}"
echo "${tmpDir}"
}
function test_mktempd {
T=$(mktempd)
[[ -d "${T}" ]] && pass_test || fail_test
rmtempd "${T}"
[[ -d "${T}" ]] && fail_test || pass_test
X=$(mktempd)
Y=$(mktempd)
! [[ "${X}" -ef "${Y}" ]] && pass_test || fail_test
rmtempd "${X}"
rmtempd "${Y}"
}
function rmtempd {
# A safe way to delete a directory created with mktempd.
# >>> rmtempd "directory"
${arcRequireBoundVariables}
typeset file directory
directory="${1:-}"
if [[ -d "${directory:-}" ]]; then
file="$(basename ${directory})"
if [[ -d "${arcTmpDir}/tmp/${file}" ]]; then
rm -rf "${arcTmpDir}/tmp/${file}"
fi
else
_utlThrowError "Directory not found: $*: rmtempd"
fi
}
function test_rmtempd {
typeset t
t="$(mktempd)"
echo "${t}" | assert -d
rmtempd "${t}"
echo "${t}" | assert ! -d
rmtempd 2>&1 >/dev/null | assert_match "ERROR"
}
function _utlThrowError {
# Utility module error handler.
# >>> _utlThrowError "errorText"
throw_error "arcshell_utl.sh" "${1}"
}
function test_file_teardown {
rm "${_g_utlTestFile}"* 2> /dev/null
echo "${_g_utlTestFile}" | assert ! -f
}
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import os
import tensorflow as tf
import math
ROOT_PATH = os.path.abspath('../../')
print(ROOT_PATH)
SUMMARY_PATH = os.path.join(ROOT_PATH, 'output/summary')
# backbone
NET_NAME = 'resnet50_v1d'
RESTORE_FROM_RPN = False
IS_FILTER_OUTSIDE_BOXES = False
FREEZE_BLOCKS = [True, True, False, False, False] # for gluoncv backbone
FIXED_BLOCKS = 0 # allow 0~3
# neck
FPN_MODE = 'fpn'
SHARE_HEADS = True
FPN_CHANNEL = 512
# rpn head
LEVEL = ['P2', 'P3', 'P4', 'P5', 'P6']
BASE_ANCHOR_SIZE_LIST = [32, 64, 128, 256, 512]
ANCHOR_STRIDE = [4, 8, 16, 32, 64]
ANCHOR_SCALES = [1.0]
ANCHOR_RATIOS = [0.5, 1., 2.0, 1/4.0, 4.0, 1/6.0, 6.0]
ANCHOR_ANGLES = [-90, -75, -60, -45, -30, -15]
ROI_SCALE_FACTORS = [10., 10., 5.0, 5.0, 2.0]
ANCHOR_SCALE_FACTORS = None
USE_CENTER_OFFSET = False
ANCHOR_MODE = 'H'
ANGLE_RANGE = 90
INITIALIZER = tf.random_normal_initializer(mean=0.0, stddev=0.01)
BBOX_INITIALIZER = tf.random_normal_initializer(mean=0.0, stddev=0.001)
WEIGHT_DECAY = 0.00004 if NET_NAME.startswith('Mobilenet') else 0.0001
ADD_GLOBAL_CTX = False
# roi head
ROI_SIZE = 14
ROI_POOL_KERNEL_SIZE = 2
USE_DROPOUT = False
KEEP_PROB = 1.0
# loss
RPN_LOCATION_LOSS_WEIGHT = 1.
RPN_CLASSIFICATION_LOSS_WEIGHT = 1.0
FAST_RCNN_LOCATION_LOSS_WEIGHT = 1.0
FAST_RCNN_CLASSIFICATION_LOSS_WEIGHT = 1.0
RPN_SIGMA = 3.0
FASTRCNN_SIGMA = 1.0
# rpn sample
KERNEL_SIZE = 3
RPN_IOU_POSITIVE_THRESHOLD = 0.7
RPN_IOU_NEGATIVE_THRESHOLD = 0.3
TRAIN_RPN_CLOOBER_POSITIVES = False
RPN_MINIBATCH_SIZE = 512
RPN_POSITIVE_RATE = 0.5
RPN_NMS_IOU_THRESHOLD = 0.7
RPN_TOP_K_NMS_TRAIN = 12000
RPN_MAXIMUM_PROPOSAL_TARIN = 2000
RPN_TOP_K_NMS_TEST = 6000
RPN_MAXIMUM_PROPOSAL_TEST = 1000
# roi sample
CUDA8 = False # assign level
FAST_RCNN_IOU_POSITIVE_THRESHOLD = 0.5
FAST_RCNN_IOU_NEGATIVE_THRESHOLD = 0.0 # 0.0 < IOU < 0.5 is negative
FAST_RCNN_MINIBATCH_SIZE = 512
FAST_RCNN_POSITIVE_RATE = 0.25
ADD_GTBOXES_TO_TRAIN = False
# post-processing
VIS_SCORE = 0.6
FILTERED_SCORE = 0.05
ROTATE_NMS_USE_GPU = True
SOFT_NMS = False
FAST_RCNN_NMS_IOU_THRESHOLD = 0.3
FAST_RCNN_NMS_MAX_BOXES_PER_CLASS = 200
# test and eval
TEST_SAVE_PATH = os.path.join(ROOT_PATH, 'tools/test_result')
EVALUATE_R_DIR = os.path.join(ROOT_PATH, 'output/evaluate_result_pickle/')
USE_07_METRIC = True
EVAL_THRESHOLD = 0.5
|
#!/usr/bin/env bash
local_sublime="$HOME/Library/Application Support/Sublime Text 2"
synced_sublime="$HOME/Dropbox/Personalization/Sublime Text 2"
if [ ! -L "$local_sublime" ]; then
rm -rf "$local_sublime"
ln -s "$synced_sublime" "$local_sublime"
fi
|
<reponame>mhdaouas/goiodi<gh_stars>1-10
package main
// Dictionary word
type Word struct {
CreationTime int64 `json:"creation_time" bson:"creation_time"`
Definition string `json:"definition" bson:"definition"`
Word string `json:"word" bson:"word"`
Comments []Comment `json:"comments" bson:",omitempty"`
}
// Structure for the comments created by users for dictionary words
type Comment struct {
Content string `json:"content" bson:"content"`
CreationTime int64 `json:"creation_time" bson:"creation_time"`
Creator string `json:"creator" bson:"creator"`
Word string `json:"word" bson:"word" db:"word"`
}
// Application user
type User struct {
// Id string `json:"id"`
CreationTime int64 `json:"creation_time" bson:"creation_time"`
Email string `json:"email" bson:"email"`
PwdHash string `json:"pwd_hash" bson:"pwd_hash"`
PwdSalt string `json:"pwd_salt" bson:"pwd_salt"`
LastLogTime int64 `json:"last_login_time" bson:"last_login_time"`
Logged bool `json:"logged" bson:"logged"`
Username string `json:"username" bson:"username"`
}
|
#!/bin/bash -
#
# build-db-alpha.sh
#
# Description: Builds studydeck database with optional arguments to
# auto-populate with data.
#
# Prerequisite: $MYSQL_USER must exist so script can login and create database.
# This user must have privileges on $MYSQL_DB.
# To create $MYSQL_USER in mysql, login as the root user ($ mysql -u root -p) and run:
#
# mysql> CREATE USER 'username'@'localhost' IDENTIFIED BY 'password';
# mysql> GRANT SELECT, INSERT, UPDATE, DELETE ON 'database_name'.* to 'username'@"localhost" IDENTIFIED BY 'username';
#
# Example usage:
# ./build-db-alpha.sh
ROOTDIR=`dirname $0`
MYSQL_USER=studydec_root
MYSQL_PASSWORD=password
MYSQL_DB=studydec_alpha
MYSQL_EXEC="mysql -u $MYSQL_USER -p${MYSQL_PASSWORD}"
SCHEMA_SQL_ORIG=$ROOTDIR/flashcards.sql
SCHEMA_DB_ORIG_NAME=flashcards
SCHEMA_SQL_TMP=/tmp/${MYSQL_DB}.sql
rename_schema() {
# Rename instances of $SCHEMA_DB_ORIG_NAME in $SCHEMA_SQL_ORIG file
echo " Modifying DB name to $MYSQL_DB in $SCHEMA_SQL_ORIG"
sed "s/${SCHEMA_DB_ORIG_NAME}/${MYSQL_DB}/" $SCHEMA_SQL_ORIG > $SCHEMA_SQL_TMP
}
create_schema() {
# Run mysqldump to setup empty tables
echo " Creating schema"
$MYSQL_EXEC < $SCHEMA_SQL_TMP
}
cleanup() {
# Clean tmp files
echo " Cleaning temporary files"
rm $SCHEMA_SQL_TMP
}
# Main
rename_schema
create_schema
cleanup
|
/*
*
*/
package net.community.chest.io.encode;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* <P>Copyright GPLv2</P>
*
* @param <V> Type of element being encoded/decoded
* @author <NAME>.
* @since Jun 15, 2009 1:54:42 PM
*/
public interface ElementEncoder<V> {
V read (InputStream in) throws IOException;
void write (OutputStream out) throws IOException;
}
|
package com.yoga.operator;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class OperatorApplication {
public static void main(String[] args) {
SpringApplication.run(OperatorApplication.class, args);
}
}
|
export default {
name: 'Quote',
props: ['data']
}
|
#!/bin/bash
# Sublime Text packages
cp config/Package\ Control.sublime-settings ~/Library/Application\ Support/Sublime\ Text\ 3/Packages/User/
# create simlink and open Sublime to install packages
ln -s /Applications/Sublime\ Text.app/Contents/SharedSupport/bin/subl /usr/local/bin/subl
subl
|
<filename>GO/create_tables.sql
call log('create_tables.sql', 'begin');
-- file: http://www.geneontology.org/GO.evidence.shtml
-- GO Evidence Codes
call log('evidence_codes', 'refresh');
drop table if exists GO.evidence_codes;
create table GO.evidence_codes
(
EvidenceProvider text,
EvidenceCode varchar(3),
EvidenceText text
);
call utf8_unicode('evidence_codes');
--
call log('evidence_provider', 'refresh');
drop table if exists GO.evidence_provider;
create table GO.evidence_provider
as select distinct EvidenceProvider from evidence_codes;
call utf8_unicode('evidence_provider');
call log('evidence_provider', 'done');
select 'Experimental Evidence Codes' into @experimental;
select 'Computational Analysis Evidence Codes' into @computational;
select 'Automatically-assigned Evidence Codes' into @automatic;
select 'Author Statement Evidence Codes' into @author;
select 'Curator Statement Evidence Codes' into @curator;
insert into GO.evidence_codes values
(@experimental,'EXP', 'Inferred from Experiment'),
(@experimental,'IDA', 'Inferred from Direct Assay'),
(@experimental,'IPI', 'Inferred from Physical Interaction'),
(@experimental,'IMP', 'Inferred from Mutant Phenotype'),
(@experimental,'IGI', 'Inferred from Genetic Interaction'),
(@experimental,'IEP', 'Inferred from Expression Pattern');
insert into GO.evidence_codes values
(@computational,'ISS', 'Inferred from Sequence or Structural Similarity'),
(@computational,'ISO', 'Inferred from Sequence Orthology'),
(@computational,'ISA', 'Inferred from Sequence Alignment'),
(@computational,'ISM', 'Inferred from Sequence Model'),
(@computational,'IGC', 'Inferred from Genomic Context'),
(@computational,'IBA', 'Inferred from Biological aspect of Ancestor'),
(@computational,'IBD', 'Inferred from Biological aspect of Descendant'),
(@computational,'IKR', 'Inferred from Key Residues'),
(@computational,'IRD', 'Inferred from Rapid Divergence'),
(@computational,'RCA', 'inferred from Reviewed Computational Analysis');
insert into GO.evidence_codes values
(@author,'TAS','Traceable Author Statement'),
(@author,'NAS','Non-traceable Author Statement');
insert into GO.evidence_codes values
(@curator,'IC', 'Inferred by Curator'),
(@curator,'ND', 'No biological Data available');
insert into GO.evidence_codes values
(@automatic,'IEA', 'Inferred from Electronic Annotation');
call log('evidence_codes', 'done');
--
call log('create_tables.sql', 'done');
|
package io.movieflix.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.hibernate.annotations.GenericGenerator;
@Entity
@Table
@NamedQueries(
@NamedQuery(name="Register.findByEmail", query="SELECT r FROM Register r WHERE r.email=:pEmail"))
public class Register {
@Id
@GenericGenerator(strategy="uuid2", name="myuuid")
@GeneratedValue(generator="myuuid")
private String id;
@NotNull
private String firstName;
@NotNull
private String lastName;
private String email;
@NotNull
@Size(min = 2, max = 8)
private String password;
private String role;
public Register(){
}
public Register(String id, String firstName, String lastName, String email, String password, String role) {
super();
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.email = email;
this.password = password;
this.role = role;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
} |
def find_shortest_path(matrix, a, b):
rows, columns = len(matrix), len(matrix[0])
visited = [[False for i in range(columns)] for j in range(rows)]
q = []
q.append(a)
visited[a[0]][a[1]] = True
while q:
i, j = q.pop(0)
if i == b[0] and j == b[1]:
return True
direction = [[0, 1], [1, 0], [-1, 0], [0, -1]]
for step in direction:
if 0 <= i+step[0] < rows and 0 <= j+step[1] < columns:
if visited[i+step[0]][j+step[1]] == False and matrix[i+step[0][j+step[1]] != 0:
q.append([i+step[0], j+step[1]])
visited[i+step[0]][j+step[1]] == True
return False |
#!/usr/bin/env bash
java -jar gp.jar |
<reponame>bukinr/mdepx<filename>kernel/net/if_llatbl.h
/*-
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _NET_IF_LLATBL_H_
#define _NET_IF_LLATBL_H_
#include <sys/socket.h>
#include <net/route.h>
#include <net/if_types.h>
#include <netinet/in.h>
struct llentry {
LIST_ENTRY(llentry) lle_next;
char r_linkdata[LLE_MAX_LINKHDR];
uint8_t r_hdrlen;
struct in_addr addr4;
struct lltable *lle_tbl;
struct llentries *lle_head;
char *ll_addr;
};
LIST_HEAD(llentries, llentry);
struct lltable {
SLIST_ENTRY(lltable) llt_link;
struct llentries lle_head;
struct ifnet *llt_ifp;
};
#endif /* !_NET_IF_LLATBL_H_ */
|
<gh_stars>0
import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import * as moment from 'moment';
import CalendarEvent from '../CalendarEvent';
import { Day, EventInWeek, WeekSlots } from '../util';
@Component({
selector: 'app-month-view',
templateUrl: './month-view.component.html',
styleUrls: ['./month-view.component.css']
})
export class MonthViewComponent implements OnInit {
_date: moment.Moment;
_dateAsDate: Date;
_events: CalendarEvent[];
@Input() height: number = 900;
@Output() newDate = new EventEmitter<moment.Moment>();
@Output() daySelected = new EventEmitter<moment.Moment>();
@Output() weekSelected = new EventEmitter<moment.Moment>();
@Output() monthSelected = new EventEmitter<moment.Moment>();
@Output() eventMoved = new EventEmitter<[CalendarEvent, moment.Moment]>();
@Output() eventRemoved = new EventEmitter<[CalendarEvent, moment.Moment]>();
weeks: Day[][] = [];
eventsInWeek: EventInWeek[][] = [];
ngOnInit() {
}
@Input() set events(e: CalendarEvent[]) {
this._events = e;
console.log('new events ',e)
if (this.date && e) {
let start = this.date.clone().startOf('month').startOf('week');
let end = this.date.clone().endOf('month').endOf('week');
let duration = moment.duration(end.diff(start));
let temp = start.clone();
let eventsInWeek: EventInWeek[][] = [];
for (let i = 0; i < duration.asWeeks(); i++) {
eventsInWeek.push([]);
let weekEvents = this.events.filter(e => e.isInWeek(temp));
weekEvents.sort((a, b) => {
let first = a.start.valueOf();
let second = a.end.valueOf();
return first - second;
});
weekEvents.forEach(e => eventsInWeek[i].push({
offset: e.getWeekIndex(temp),
length: e.getWeekLength(temp),
event: e,
row: 0
}));
temp.add(1, 'week');
}
let weekSlots: WeekSlots[] = [];
for (let i = 0; i < duration.asWeeks(); i++) {
weekSlots.push(new WeekSlots());
let copy = eventsInWeek[i].slice().sort((a, b) => {
if (a.offset < b.offset) {
return -1;
} else if (a.offset > b.offset) {
return 1;
} else if (a.length > b.length) {
return -1;
} else if (a.length < b.length) {
return 1;
} else {
return 0;
}
});
let findCompanion = (master: EventInWeek, all: EventInWeek[]): EventInWeek | undefined => {
let candidates = all.filter(e => e.offset > (master.offset + master.length - 1));
candidates.sort((a, b) => {
return b.length - a.length;
});
if (candidates.length > 0) {
return candidates[0];
} else {
return undefined;
}
};
while (copy.length > 0) {
let event = copy.splice(0, 1)[0];
weekSlots[i].addEvent(event);
for (let next = findCompanion(event, copy); copy.length > 0 && next !== undefined; next = findCompanion(next, copy)) {
let index = copy.indexOf(next);
let tmp = copy.splice(index, 1)[0];
weekSlots[i].addEvent(tmp);
}
}
}
this.eventsInWeek = eventsInWeek;
}
}
get events() {
return this._events;
}
@Input() set date(d: moment.Moment) {
console.log('new date', d.toDate());
this._date = d;
this._dateAsDate = d.toDate();
let weeks = [];
let eventsInWeek = [];
let start = d.clone().startOf('month').startOf('week');
let end = d.clone().endOf('month').endOf('week');
let weeksInYear = start.weeksInYear();
let startWeek = start.week();
let duration = moment.duration(end.diff(start));
for (let i = 0; i < duration.asWeeks(); i++) {
weeks.push([]);
eventsInWeek.push([]);
}
for (let temp = start.clone(); temp.isBefore(end); temp = temp.add(1, 'day')) {
let day = {
date: temp.clone(),
};
let curWeek = temp.week();
let weekIndex = -1;
if (curWeek<startWeek) {
weekIndex= curWeek + weeksInYear - startWeek;
} else {
weekIndex = curWeek - startWeek;
}
weeks[weekIndex].push(day);
}
this.eventsInWeek = eventsInWeek;
if (weeks[weeks.length - 1].length === 0) {
weeks.splice(weeks.length - 1, 1);
}
this.weeks = weeks;
this.events=this.events.slice();
}
get date() {
return this._date;
}
asMoment(date: Date) {
return moment(date);
}
}
|
<filename>api/index.js
// ============================================================
// Globals
// ============================================================
var ROUTE_STORE = "/v0/store";
var ERROR_MISSING_ID = "Please specify an ID to retrieve.",
ERROR_INVALID_ENDPOINT = "Invalid endpoint",
ERROR_INVALID_JSON = "Must specify valid JSON";
var STORE_TTL = 31556952; // Auto-deleted after 1 year
var ALLOWED_HOSTS = {
"genomeribbon.com": {
store: {
prefix: "ribbon"
}
}
};
var CORS_ORIGIN = "*",
CORS_HEADERS = {
"content-type": "application/json",
"Access-Control-Allow-Origin": CORS_ORIGIN,
"Access-Control-Allow-Methods": "HEAD, GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type",
};
// ============================================================
// Utility functions
// ============================================================
// Generate a UUID (source: https://stackoverflow.com/a/2117523)
function uuidv4() {
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
);
}
// ============================================================
// Success/error functions
// ============================================================
function error_400(message) {
let payload = {
success: false,
message: message,
error_code: 400,
data: {}
};
return new Response(JSON.stringify(payload), {
headers: CORS_HEADERS,
status: 400
});
}
function success(data, message="OK") {
let payload = {
success: true,
message: message,
error_code: 200,
data: data
};
return new Response(JSON.stringify(payload), {
headers: CORS_HEADERS,
status: 200
});
}
// ============================================================
// Handle requests
// ============================================================
async function handleRequest(request)
{
// Fetch user parameters
const url = new URL(request.url),
method = request.method,
endpoint = url.pathname;
// Only accept expected hosts (origin = null outside browser)
let origin = request.headers.get('Origin');
if(origin == null)
origin = "null";
else
origin = origin.replace(/https?:\/\/(www\.)?/, "");
// ----------------------------------------------------------
// /store/<some/path>
// ----------------------------------------------------------
if(endpoint.startsWith(ROUTE_STORE))
{
// Parse token, if any
let url = new URL(request.url),
token = url.searchParams.get("token"),
ttl = url.searchParams.get("ttl");
// Sanitize the endpoint (trim whitespace and slashes)
let uuid = endpoint
.replace(ROUTE_STORE, "")
.trim()
.replace(/\/$/, "") // remove trailing slash
.replace(/^\//, "") // remove leading slash
// --------------------------------------------------------
// GET /store/<some/path>
// --------------------------------------------------------
if(method == "GET") {
// Must specify an ID
if(uuid == null || uuid == "")
return error_400(ERROR_MISSING_ID);
// Fetch the ID of interest
return success(await STORE.get(uuid, "json"));
// --------------------------------------------------------
// POST /store (body = contents)
// --------------------------------------------------------
} else if(method == "POST") {
// Parse out the JSON
let value = null, key = null, time_to_live;
try {
value = JSON.stringify(await request.json());
} catch(e) {
return error_400(ERROR_INVALID_JSON);
}
// If no uuid, generate a random one
if(uuid == null || uuid == "" || token != TOKEN_STORE)
uuid = uuidv4();
// Set default key name & TTL
key = `${ALLOWED_HOSTS[origin].store.prefix}/${uuid}`;
time_to_live = STORE_TTL;
// Can also specify a custom ID with custom TTL with a token
if(token == TOKEN_STORE) {
key = uuid;
if(ttl != null)
time_to_live = ttl;
}
// Generate the key name and store data there
await STORE.put(key, value, { expirationTtl: time_to_live });
return success(key);
// --------------------------------------------------------
// DELETE /store/<some/path>
// --------------------------------------------------------
} else if(method == "DELETE") {
if(token != TOKEN_STORE || uuid.startsWith("_") || uuid == null || uuid == "")
return error_400(ERROR_INVALID_ENDPOINT);
return success(await STORE.delete(uuid));
}
}
return error_400(ERROR_INVALID_ENDPOINT);
}
// ============================================================
// CORS Support
// ============================================================
function handleOptions(request) {
// Make sure the necesssary headers are present
// for this to be a valid pre-flight request
if (
request.headers.get('Origin') !== null &&
request.headers.get('Access-Control-Request-Method') !== null &&
request.headers.get('Access-Control-Request-Headers') !== null
) {
// Handle CORS pre-flight request.
return new Response(null, {
// We support the GET, POST, HEAD, and OPTIONS methods from any origin,
// and accept the Content-Type header on requests. These headers must be
// present on all responses to all CORS requests. In practice, this means
// all responses to OPTIONS requests.
headers: {
'Access-Control-Allow-Origin': CORS_ORIGIN,
'Access-Control-Allow-Methods': 'GET, HEAD, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Range',
'Access-Control-Max-Age': 600 // Cache OPTIONS queries for 10 mins
},
});
} else {
// Handle standard OPTIONS request.
return new Response(null, {
headers: { Allow: 'GET, HEAD, OPTIONS' },
});
}
}
// ============================================================
// On function load
// ============================================================
addEventListener('fetch', event => {
let request = event.request;
if (request.method === 'OPTIONS')
event.respondWith(handleOptions(request));
else
event.respondWith(handleRequest(request));
});
|
<gh_stars>0
import Ux from 'ux';
import * as U from 'underscore';
const thenCallback = (promise, {
isDialog, isWindow, isReduxSubmit,
ref, postDialog, callback, postKey
}, resolve) => {
// 这里的promise一定是一个可执行函数,并且返回一个完整的Promise,Delay执行
const executed = promise();
Ux.E.fxTerminal(!(executed instanceof Promise), 10088, executed);
if (executed instanceof Promise) {
return executed.then(data => {
const fnCallback = () => {
if (isReduxSubmit) Ux.rdxSubmitting(ref, false);
let ret = callback(data);
if (!ret) ret = {};
resolve(ret);
};
// isWindow使用核心模式
if (isWindow) {
// 如果出现了postDialog的参数
if (isDialog) {
postDialog(ref, postKey, fnCallback, data)
} else {
postDialog(ref, postKey);
fnCallback();
}
} else {
fnCallback();
}
});
}
};
class RxOp {
private validation: any = [];
private _success;
private _failure;
private _confirmKey;
private _postKey;
private isPromiseReturn: Boolean = false; // 默认不使用Promise的Reject流程
private isDialog: Boolean = true;
private isWindowUse: Boolean = false; // 默认不使用窗口
private isReduxSubmit: Boolean = true; // 启用Redux提交
private reference;
private constructor(reference: any) {
this.reference = reference;
}
static from(reference: any) {
return new RxOp(reference);
}
submitting(on: Boolean = true) {
this.isReduxSubmit = on;
return this;
}
direct() {
this.isPromiseReturn = false;
return this;
}
reject() {
this.isPromiseReturn = true;
return this;
}
verify(...inputes) {
if (inputes) {
const reference = this.validation;
for (let idx = 0; idx < inputes.length - 1; idx = idx + 2) {
const cond = inputes[idx];
const key = inputes[idx + 1];
if (undefined !== key) {
const item: any = {};
item.cond = U.isFunction(cond) ? cond() : cond;
item.key = key;
reference.push(item);
}
}
}
return this;
}
confirm(key) {
if (key) {
this._confirmKey = key;
}
return this;
}
success(promise) {
this._success = promise;
return this;
}
failure(fnFailure) {
if (U.isFunction(fnFailure)) {
this._failure = fnFailure;
}
return this;
}
dialog(key) {
this.isWindowUse = true;
if (key) {
this._postKey = key;
this.isDialog = true;
}
return this;
}
message(key) {
this.isWindowUse = true;
if (key) {
this._postKey = key;
this.isDialog = false;
}
return this;
}
to(callback) {
const ref = this.reference;
const validation = this.validation;
const confirmKey = this._confirmKey;
const promise = this._success;
const isPromiseReturn = this.isPromiseReturn;
const isReduxSubmit = this.isReduxSubmit;
// 防重复提交
if (isReduxSubmit) {
Ux.rdxSubmitting(ref, true);
}
// 验证处理
for (let idx = 0; idx < validation.length; idx++) {
const item = validation[idx];
if (item.cond) {
if (isPromiseReturn) {
const message = Ux.fromPath(ref, "modal", "error", item.key);
// 需要关闭loading效果
return Ux.rdxReject(message);
} else {
// 暂时只在Reject部分处理isPromiseReturn
return new Promise((resolve, reject) => {
Ux.showDialog(ref, item.key, () => {
if (isReduxSubmit) {
Ux.rdxSubmitting(ref, false);
} else {
// 证明需要重设 $loading = false
ref.setState({$loading: false});
}
});
})
}
}
}// 是否设置了postKey
const postDialog = this.isDialog ? Ux.showDialog : Ux.showMessage;
const isDialog = this.isDialog;
const postKey = this._postKey;
const isWindow = this.isWindowUse;
// 验证成功,是否执行confirm流程
let fnPromise;
if (confirmKey) {
fnPromise = new Promise((resolve) => Ux.showDialog(ref, confirmKey,
() => thenCallback(promise, {
postDialog, postKey,
isDialog, isWindow, isReduxSubmit,
ref, callback
}, resolve), {},
() => {
if (isReduxSubmit) Ux.rdxSubmitting(ref, false);
resolve({})
}));
} else {
fnPromise = new Promise((resolve) => thenCallback(promise, {
postDialog, postKey,
isDialog, isWindow, isReduxSubmit,
ref, callback,
}, resolve));
}
// 是否包含failure
if (!this._failure) {
return fnPromise;
} else {
// 异常流
const fnFailure = this._failure;
return fnPromise.catch(errors => fnFailure(errors));
}
}
}
export default RxOp; |
public static int sumOfDigits(int number)
{
int sum = 0;
while (number != 0)
{
sum = sum + number % 10;
number = number/10;
}
return sum;
} |
#!/bin/bash
###################################################################
#Script Name : compressData.sh
#Description : Compress output global VIIRS geotiffs
#Args : Directory path with geotiffs
#Author : Leonidas Liakos
#Email : lliakos@uth.gr
#Date : 05/2019
###################################################################
if [ "$1" != "" ]; then
cd $1
else
echo "Please provide a directory with monthly VIIRS masked tiles"
exit 1
fi
for file in `ls *_vcmslcfg.avg_rade9h.Uint16.masked.tif`; do tar -czvf $file.tar.gz $file ; done
|
#!/bin/bash
source config.sh
source utils.sh
# API_KEY=$TF_VAR_ibmcloud_api_key
# ENV=$TF_VAR_environment
# ORG=$TF_VAR_node
# REGION=$TF_VAR_region
# RESOURCE_GROUP="rg-$ENV-$ORG"
# # # Login to IBM Cloud
# ibmcloud login --apikey $API_KEY -g $RESOURCE_GROUP
# IBP_SERVICE_INSTANCE="$ENV-$ORG-ibp"
# IBP_SERVICE_KEY="$ENV-$ORG-IBP-ServiceCredential"
# # #Create a Service credential for IBP
# ibmcloud resource service-key-create $IBP_SERVICE_KEY Manager --instance-name $IBP_SERVICE_INSTANCE
# # ## Debug the output of each element in Service Credntial
# ORDERER_IBP_API_ENDPOINT=$(ibmcloud resource service-keys --instance-name $IBP_SERVICE_INSTANCE --output json | jq '.[0].credentials.api_endpoint' |sed 's/https\?:\/\///' | tr -d '"')
# ORDERER_IBP_API_KEY=$(ibmcloud resource service-keys --instance-name $IBP_SERVICE_INSTANCE --output json | jq '.[0].credentials.apikey' | tr -d '"')
# echo $ORDERER_IBP_API_ENDPOINT
# echo $ORDERER_IBP_API_KEY
# # ## Replacing $variables in .yml files
# if [ "$1" == "initsetup" ]
# then
# cp $PWD/metadata/ordering-org-vars.yml $PWD/ansible/ordering-org-vars.yml
# cp $PWD/metadata/org1-vars.yml $PWD/ansible/org1-vars.yml
# #Use same for both Orderer and Org1 for now
# sed -i 's/${ORDERER_IBP_API_ENDPOINT}/'$ORDERER_IBP_API_ENDPOINT'/g' ansible/ordering-org-vars.yml
# sed -i 's/${ORDERER_IBP_API_KEY}/'$ORDERER_IBP_API_KEY'/g' ansible/ordering-org-vars.yml
# #Use same for both Orderer and Org1 for now
# sed -i 's/${ORDERER_IBP_API_ENDPOINT}/'$ORDERER_IBP_API_ENDPOINT'/g' ansible/org1-vars.yml
# sed -i 's/${ORDERER_IBP_API_KEY}/'$ORDERER_IBP_API_KEY'/g' ansible/org1-vars.yml
# fi
# if [ "$1" == "neworg" ]
# then
# cp $PWD/metadata/org2-vars.yml $PWD/ansible/org2-vars.yml
# sed -i 's/${ORDERER_IBP_API_ENDPOINT}/'$ORDERER_IBP_API_ENDPOINT'/g' ansible/org2-vars.yml
# sed -i 's/${ORDERER_IBP_API_KEY}/'$ORDERER_IBP_API_KEY'/g' ansible/org2-vars.yml
# fi
# chmod -R 777 ansible/
pwd
cd ansible
ORG=$TF_VAR_node
# # # ## Execute ansible playbooks for network setup
if [ "$ORG" == "aais" ]
then
# Create AAIS Network
#./build_network.sh build
./build_network.sh build | tee $AAIS_RESULT
cat $AAIS_RESULT
EXCE_RESULT=$(grep -o -i 'failed=0' $AAIS_RESULT | wc -l)
EXCE_RESULT1=$(grep -o -i 'failed=' $AAIS_RESULT | wc -l)
if [ $EXCE_RESULT == $EXCE_RESULT1 ]
then
echo "IBP build successfully. Log is availble in $AAIS_RESULT file"
else
error_exit "Failed IBP build network. Log is availble in $AAIS_RESULT file"
fi
fi
# # # ## Execute ansible playbooks for new org setup
if [ "$ORG" != "aais" ]
then
# Create Carrier Network
#./join_network.sh -i join
./join_network.sh -i join | tee $CARRIER_RESULT
cat $CARRIER_RESULT
EXCE_RESULT=$(grep -o -i 'failed=0' $CARRIER_RESULT | wc -l)
EXCE_RESULT1=$(grep -o -i 'failed=' $CARRIER_RESULT | wc -l)
if [ $EXCE_RESULT == $EXCE_RESULT1 ]
then
echo "New $1 is build successfully. Log is availble in $CARRIER_RESULT file"
else
error_exit "Failed IBP build network. Log is availble in $CARRIER_RESULT file"
fi
fi
|
$(document).ready(function(){
$("#career").fadeIn(1000, function() {
$(".people").fadeIn(1000);
});
}); |
#!/bin/bash
set -xeu
set -o pipefail
trap "exit" INT TERM
trap 'kill $(jobs -pr) ||:' EXIT
# This script is separated into two stages, cloning and everything else, so
# that we can run the "everything else" stage from the cloned source.
stage=${stage:-}
# A variable to pass additional flags to CMake.
# Here we explicitly default it to nothing so that bash doesn't complain about
# it being undefined. Also read it as array so that we can pass an empty list
# of additional variable to cmake properly, and it doesn't generate an extra
# empty parameter.
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
# Run only matching tests.
FASTTEST_FOCUS=${FASTTEST_FOCUS:-""}
FASTTEST_WORKSPACE=$(readlink -f "${FASTTEST_WORKSPACE:-.}")
FASTTEST_SOURCE=$(readlink -f "${FASTTEST_SOURCE:-$FASTTEST_WORKSPACE/ch}")
FASTTEST_BUILD=$(readlink -f "${FASTTEST_BUILD:-${BUILD:-$FASTTEST_WORKSPACE/build}}")
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
# Export these variables, so that all subsequent invocations of the script
# use them, and not try to guess them anew, which leads to weird effects.
export FASTTEST_WORKSPACE
export FASTTEST_SOURCE
export FASTTEST_BUILD
export FASTTEST_DATA
export FASTTEST_OUT
export PATH
server_pid=none
function stop_server
{
if ! kill -0 -- "$server_pid"
then
echo "ClickHouse server pid '$server_pid' is not running"
return 0
fi
for _ in {1..60}
do
if ! pkill -f "clickhouse-server" && ! kill -- "$server_pid" ; then break ; fi
sleep 1
done
if kill -0 -- "$server_pid"
then
pstree -apgT
jobs
echo "Failed to kill the ClickHouse server pid '$server_pid'"
return 1
fi
server_pid=none
}
function start_server
{
set -m # Spawn server in its own process groups
local opts=(
--config-file "$FASTTEST_DATA/config.xml"
--
--path "$FASTTEST_DATA"
--user_files_path "$FASTTEST_DATA/user_files"
--top_level_domains_path "$FASTTEST_DATA/top_level_domains"
)
clickhouse-server "${opts[@]}" &>> "$FASTTEST_OUTPUT/server.log" &
server_pid=$!
set +m
if [ "$server_pid" == "0" ]
then
echo "Failed to start ClickHouse server"
# Avoid zero PID because `kill` treats it as our process group PID.
server_pid="none"
return 1
fi
for _ in {1..60}
do
if clickhouse-client --query "select 1" || ! kill -0 -- "$server_pid"
then
break
fi
sleep 1
done
if ! clickhouse-client --query "select 1"
then
echo "Failed to wait until ClickHouse server starts."
server_pid="none"
return 1
fi
if ! kill -0 -- "$server_pid"
then
echo "Wrong clickhouse server started: PID '$server_pid' we started is not running, but '$(pgrep -f clickhouse-server)' is running"
server_pid="none"
return 1
fi
echo "ClickHouse server pid '$server_pid' started and responded"
}
function clone_root
{
git clone https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
(
cd "$FASTTEST_SOURCE"
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then
git checkout FETCH_HEAD
echo 'Clonned merge head'
else
git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/head"
git checkout "$COMMIT_SHA"
echo 'Checked out to commit'
fi
else
if [ -v COMMIT_SHA ]; then
git checkout "$COMMIT_SHA"
fi
fi
)
}
function clone_submodules
{
(
cd "$FASTTEST_SOURCE"
SUBMODULES_TO_UPDATE=(
contrib/antlr4-runtime
contrib/boost
contrib/zlib-ng
contrib/libxml2
contrib/poco
contrib/libunwind
contrib/fmtlib
contrib/base64
contrib/cctz
contrib/libcpuid
contrib/double-conversion
contrib/libcxx
contrib/libcxxabi
contrib/libc-headers
contrib/lz4
contrib/zstd
contrib/fastops
contrib/rapidjson
contrib/re2
contrib/sparsehash-c11
contrib/croaring
contrib/miniselect
contrib/xz
contrib/dragonbox
contrib/fast_float
contrib/NuRaft
)
git submodule sync
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}"
git submodule foreach git reset --hard
git submodule foreach git checkout @ -f
git submodule foreach git clean -xfd
)
}
function run_cmake
{
CMAKE_LIBS_CONFIG=(
"-DENABLE_LIBRARIES=0"
"-DENABLE_TESTS=0"
"-DENABLE_UTILS=0"
"-DENABLE_EMBEDDED_COMPILER=0"
"-DENABLE_THINLTO=0"
"-DUSE_UNWIND=1"
"-DENABLE_NURAFT=1"
)
# TODO remove this? we don't use ccache anyway. An option would be to download it
# from S3 simultaneously with cloning.
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
export CCACHE_BASEDIR="$FASTTEST_SOURCE"
export CCACHE_NOHASHDIR=true
export CCACHE_COMPILERCHECK=content
export CCACHE_MAXSIZE=15G
ccache --show-stats ||:
ccache --zero-stats ||:
mkdir "$FASTTEST_BUILD" ||:
(
cd "$FASTTEST_BUILD"
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
)
}
function build
{
(
cd "$FASTTEST_BUILD"
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt"
if [ "$COPY_CLICKHOUSE_BINARY_TO_OUTPUT" -eq "1" ]; then
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
fi
ccache --show-stats ||:
)
}
function configure
{
clickhouse-client --version
clickhouse-test --help
mkdir -p "$FASTTEST_DATA"{,/client-config}
cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
"$FASTTEST_SOURCE/tests/config/install.sh" "$FASTTEST_DATA" "$FASTTEST_DATA/client-config"
cp -a "$FASTTEST_SOURCE/programs/server/config.d/log_to_console.xml" "$FASTTEST_DATA/config.d"
# doesn't support SSL
rm -f "$FASTTEST_DATA/config.d/secure_ports.xml"
}
function run_tests
{
clickhouse-server --version
clickhouse-test --help
# Kill the server in case we are running locally and not in docker
stop_server ||:
start_server
TESTS_TO_SKIP=(
00105_shard_collations
00109_shard_totals_after_having
00110_external_sort
00302_http_compression
00417_kill_query
00436_convert_charset
00490_special_line_separators_and_characters_outside_of_bmp
00652_replicated_mutations_zookeeper
00682_empty_parts_merge
00701_rollup
00834_cancel_http_readonly_queries_on_client_close
00911_tautological_compare
# Hyperscan
00926_multimatch
00929_multi_match_edit_distance
01681_hyperscan_debug_assertion
01031_mutations_interpreter_and_context
01053_ssd_dictionary # this test mistakenly requires acces to /var/lib/clickhouse -- can't run this locally, disabled
01083_expressions_in_engine_arguments
01092_memory_profiler
01098_msgpack_format
01098_temporary_and_external_tables
01103_check_cpu_instructions_at_startup # avoid dependency on qemu -- invonvenient when running locally
01193_metadata_loading
01238_http_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
01251_dict_is_in_infinite_loop
01259_dictionary_custom_settings_ddl
01268_dictionary_direct_layout
01280_ssd_complex_key_dictionary
01281_group_by_limit_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
01318_encrypt # Depends on OpenSSL
01318_decrypt # Depends on OpenSSL
01663_aes_msan # Depends on OpenSSL
01667_aes_args_check # Depends on OpenSSL
01281_unsucceeded_insert_select_queries_counter
01292_create_user
01294_lazy_database_concurrent
01305_replica_create_drop_zookeeper
01354_order_by_tuple_collate_const
01355_ilike
01411_bayesian_ab_testing
01532_collate_in_low_cardinality
01533_collate_in_nullable
01542_collate_in_array
01543_collate_in_tuple
_orc_
arrow
avro
base64
brotli
capnproto
client
ddl_dictionaries
h3
hashing
hdfs
java_hash
json
limit_memory
live_view
memory_leak
memory_limit
mysql
odbc
parallel_alter
parquet
protobuf
secure
sha256
xz
# Not sure why these two fail even in sequential mode. Disabled for now
# to make some progress.
00646_url_engine
00974_query_profiler
# In fasttest, ENABLE_LIBRARIES=0, so rocksdb engine is not enabled by default
01504_rocksdb
01686_rocksdb
# Look at DistributedFilesToInsert, so cannot run in parallel.
01460_DistributedFilesToInsert
01541_max_memory_usage_for_user
# Require python libraries like scipy, pandas and numpy
01322_ttest_scipy
01561_mann_whitney_scipy
01545_system_errors
# Checks system.errors
01563_distributed_query_finish
# nc - command not found
01601_proxy_protocol
01622_defaults_for_url_engine
# JSON functions
01666_blns
01674_htm_xml_coarse_parse
)
(time clickhouse-test --hung-check -j 8 --order=random --use-skip-list --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" -- "$FASTTEST_FOCUS" 2>&1 ||:) | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/test_log.txt"
# substr is to remove semicolon after test name
readarray -t FAILED_TESTS < <(awk '/\[ FAIL|TIMEOUT|ERROR \]/ { print substr($3, 1, length($3)-1) }' "$FASTTEST_OUTPUT/test_log.txt" | tee "$FASTTEST_OUTPUT/failed-parallel-tests.txt")
# We will rerun sequentially any tests that have failed during parallel run.
# They might have failed because there was some interference from other tests
# running concurrently. If they fail even in seqential mode, we will report them.
# FIXME All tests that require exclusive access to the server must be
# explicitly marked as `sequential`, and `clickhouse-test` must detect them and
# run them in a separate group after all other tests. This is faster and also
# explicit instead of guessing.
if [[ -n "${FAILED_TESTS[*]}" ]]
then
stop_server ||:
# Clean the data so that there is no interference from the previous test run.
rm -rf "$FASTTEST_DATA"/{{meta,}data,user_files} ||:
start_server
echo "Going to run again: ${FAILED_TESTS[*]}"
clickhouse-test --hung-check --order=random --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a "$FASTTEST_OUTPUT/test_log.txt"
else
echo "No failed tests"
fi
}
case "$stage" in
"")
ls -la
;&
"clone_root")
clone_root
# Pass control to the script from cloned sources, unless asked otherwise.
if ! [ -v FASTTEST_LOCAL_SCRIPT ]
then
# 'run' stage is deprecated, used for compatibility with old scripts.
# Replace with 'clone_submodules' after Nov 1, 2020.
# cd and CLICKHOUSE_DIR are also a setup for old scripts, remove as well.
# In modern script we undo it by changing back into workspace dir right
# away, see below. Remove that as well.
cd "$FASTTEST_SOURCE"
CLICKHOUSE_DIR=$(pwd)
export CLICKHOUSE_DIR
stage=run "$FASTTEST_SOURCE/docker/test/fasttest/run.sh"
exit $?
fi
;&
"run")
# A deprecated stage that is called by old script and equivalent to everything
# after cloning root, starting with cloning submodules.
;&
"clone_submodules")
# Recover after being called from the old script that changes into source directory.
# See the compatibility hacks in `clone_root` stage above. Remove at the same time,
# after Nov 1, 2020.
cd "$FASTTEST_WORKSPACE"
clone_submodules | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
;&
"run_cmake")
run_cmake
;&
"build")
build
;&
"configure")
# The `install_log.txt` is also needed for compatibility with old CI task --
# if there is no log, it will decide that build failed.
configure | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/install_log.txt"
;&
"run_tests")
run_tests
;;
*)
echo "Unknown test stage '$stage'"
exit 1
esac
pstree -apgT
jobs
|
//
// Created by yuan on 8/15/19.
//
#include "XpathQueryTextFormatter.h"
#include <iostream>
using namespace std;
std::string XpathSingleQueryDefaultInnerText::operator()(pugi::xml_node& root)
{
pugi::xml_node resNode = root.select_node(query_.pugiQuery()).node();
// printf("%s: %s\n", resNode.name(), resNode.text().get());
return resNode.text().get();
}
std::string XpathSingleQueryGreedyInnerText::operator()(pugi::xml_node& root)
{
pugi::xml_node resNode = root.select_node(query_.pugiQuery()).node();
walker_.reset();
resNode.traverse(walker_);
return walker_.getInnerText();
}
std::string XpathSingleQueryGreedyNoExtraSpaceInnerText::operator()(pugi::xml_node& root)
{
return RemoveExtraWhitespace(Base::operator()(root));
}
|
public class TrafficLight {
private String currentColor;
public TrafficLight() {
this.currentColor = "red";
}
public void changeToRed() {
this.currentColor = "red";
}
public void changeToYellow() {
this.currentColor = "yellow";
}
public void changeToGreen() {
this.currentColor = "green";
}
public String getCurrentColor() {
return this.currentColor;
}
} |
def minimum_sum_required(arr):
s = 1 # Initialize the required result
# Traverse the given array
for i in range(len(arr)):
# If the current number is larger than the required one
# increment the required result
if arr[i] <= s:
s += arr[i]
# After the entire array is traversed
# return the required result
return s |
def letter_frequency(s):
# Create a dictionary to store the frequency
freq = {}
for i in s:
# If letter found in dictionary, increase count by one
if i in freq:
freq[i] += 1
# Otherwise, add the letter to the dictionary
else:
freq[i] = 1
# Return the dictionary of letter frequencies
return freq
# Call the function and print the result
print(letter_frequency('Hello World'))
# Output: {'H': 1, 'e': 1, 'l': 3, 'o': 2, ' ': 1, 'W': 1, 'r': 1, 'd': 1} |
#!/bin/bash
set -eu
set -o pipefail
operation="${1:-test}"
case "$operation" in
pull)
;;
test)
apt-get install -y $ARTIFACT_DIR/*.deb
cd /opt/stackstorm/chatops
sed -i.bak -r "s/^# (export HUBOT_ADAPTER=slack)/\1/" st2chatops.env
sed -i.bak -r "s/^# (export HUBOT_SLACK_TOKEN.).*/\1$SLACK_TOKEN/" st2chatops.env
sed -i.bak -r "s/^(export ST2_AUTH_USERNAME.).*/\1$ST2_USERNAME/" st2chatops.env
sed -i.bak -r "s/^(export ST2_AUTH_PASSWORD.).*/\1$ST2_PASSWORD/" st2chatops.env
bin/hubot &> /tmp/hubot.log &
sleep 15
cat /tmp/hubot.log
grep -rq "INFO Connected to Slack RTM" /tmp/hubot.log && \
grep -rq "INFO [[:digit:]]\+ commands are loaded" /tmp/hubot.log
exit $?
;;
*)
[[ $# -gt 0 ]] && exec "$@"
;;
esac
|
import pandas as pd
# Read in dataset
df = pd.read_csv('data.csv')
# Drop records with missing values
df.dropna(inplace=True)
# Convert Gender, Age, State, Country, Hobby to categorical values
df['Gender'] = df.Gender.astype("category")
df['Age'] = df.Age.astype("category")
df['State'] = df.State.astype("category")
df['Country'] = df.Country.astype("category")
df['Hobby'] = df.Hobby.astype("category")
# One-hot encode categorical values
df = pd.get_dummies(df, columns=['Gender', 'Age', 'State', 'Country', 'Hobby'])
# Convert ID to a string
df['ID'] = df.ID.astype('str')
# Output dataset
df.to_csv('processed_data.csv', index=False) |
import numpy as np
def max_pooling(x, k):
N, C, H, W = x.shape
H_out = H // k
W_out = W // k
pooled_output = np.zeros((N, C, H_out, W_out))
for i in range(H_out):
for j in range(W_out):
h_start = i * k
h_end = h_start + k
w_start = j * k
w_end = w_start + k
subregion = x[:, :, h_start:h_end, w_start:w_end]
pooled_output[:, :, i, j] = np.max(subregion, axis=(2, 3))
return pooled_output
if __name__ == '__main__':
# Test case
x = np.array([[[[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]]]])
k = 2
pooled_result = max_pooling(x, k)
print(pooled_result)
# Output: array([[[[ 6., 8.],
# [14., 16.]]]]) |
tail -n 20 $1
|
#!/bin/bash
set -e
# Get the parent directory of where this script is.
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ] ; do SOURCE="$(readlink "$SOURCE")"; done
DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )"
# Change into that dir because we expect that.
cd "$DIR"
# Get the git commit
GIT_COMMIT="$(git rev-parse --short HEAD)"
GIT_DESCRIBE="$(git describe --tags --always)"
GIT_IMPORT="github.com/abassian/huron/src/version"
# Determine the arch/os combos we're building for
XC_ARCH=${XC_ARCH:-"386 amd64 arm"}
XC_OS=${XC_OS:-"solaris darwin freebsd linux windows"}
# Get Go deps
echo "USER: `id -u $USER`"
mkdir -p glide_cache
glide --home "glide_cache" install
rm -rf glide_cache
# Build!
echo "==> Building..."
"$(which gox)" \
-os="${XC_OS}" \
-arch="${XC_ARCH}" \
-osarch="!darwin/arm !solaris/amd64 !freebsd/amd64" \
-ldflags "-X ${GIT_IMPORT}.GitCommit='${GIT_COMMIT}' -X ${GIT_IMPORT}.GitDescribe='${GIT_DESCRIBE}'" \
-output "build/pkg/{{.OS}}_{{.Arch}}/huron" \
-tags="${BUILD_TAGS}" \
github.com/abassian/huron/cmd/huron
# Zip all the files.
echo "==> Packaging..."
for PLATFORM in $(find ./build/pkg -mindepth 1 -maxdepth 1 -type d); do
OSARCH=$(basename "${PLATFORM}")
echo "--> ${OSARCH}"
pushd "$PLATFORM" >/dev/null 2>&1
zip "../${OSARCH}.zip" ./*
popd >/dev/null 2>&1
done
exit 0
|
package com.flash3388.frc.nt.ntp;
import com.flash3388.flashlib.time.Clock;
import com.flash3388.flashlib.time.Time;
import java.util.concurrent.atomic.AtomicLong;
public class NtpClock implements Clock {
private final Clock mBaseClock;
private final AtomicLong mOffsetMillis;
public NtpClock(Clock baseClock) {
mBaseClock = baseClock;
mOffsetMillis = new AtomicLong(0);
}
@Override
public Time currentTime() {
long currentTimeMillis = mBaseClock.currentTime().valueAsMillis();
return Time.milliseconds(currentTimeMillis + mOffsetMillis.get());
}
void updateOffset(long offsetMillis) {
mOffsetMillis.addAndGet(offsetMillis);
}
}
|
#!/bin/sh
#
# Copyright (c) 2008 Peter Holm <pho@FreeBSD.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $FreeBSD$
#
[ `id -u ` -ne 0 ] && echo "Must be root!" && exit 1
# Test with snapshot file unlinked before unmount
mount | grep "/dev/md0 on /mnt" > /dev/null && umount /mnt
mdconfig -l | grep -q md0 && mdconfig -d -u 0
rm -f /tmp/.snap/pho
trap "rm -f /tmp/.snap/pho" 0
for i in `jot 64`; do
mksnap_ffs /tmp /tmp/.snap/pho
mdconfig -a -t vnode -f /tmp/.snap/pho -u 0 -o readonly
mount -o ro /dev/md0 /mnt
ls -l /mnt > /dev/null
rm -f /tmp/.snap/pho
sleep 1
umount /mnt
mdconfig -d -u 0
rm -f /tmp/.snap/pho
done
|
import * as React from "react";
import FactaButton from '@facta/FactaButton';
import { History } from 'history';
import FactaArticleRegion from '@facta/FactaArticleRegion';
import { setJPImage } from "@util/set-bg-image";
import { jpBasePath } from "@paths/jp/_base";
import FactaMainPage from "@facta/FactaMainPage";
export default class JpRegistrationClosedPage extends React.PureComponent<{history: History<any>}> {
render() {
return <FactaMainPage setBGImage={setJPImage}>
<FactaArticleRegion title="Registration is closed.">
Registration is closed for the 2021 season. Please keep an eye on our <a target="_blank" href="https://www.community-boating.org">website</a> for when registration will open for 2022.
</FactaArticleRegion>
<FactaButton text="< Back" onClick={() => Promise.resolve(this.props.history.push(jpBasePath.getPathFromArgs({})))}/>
</FactaMainPage>
}
}
|
#!/bin/bash
set -euxo pipefail
echo "CI_JOB_NAME is $CI_JOB_NAME"
if [[ "$CI_JOB_NAME" =~ "upgrade" ]]; then
if [ "${UPGRADE_TEST}" == "false" ]; then
echo "Job name contains 'upgrade', but UPGRADE_TEST='false'"
exit 1
fi
else
if [ "${UPGRADE_TEST}" != "false" ]; then
echo "UPGRADE_TEST!='false', but job names does not contain 'upgrade'"
exit 1
fi
fi
export ANSIBLE_REMOTE_USER=$SSH_USER
export ANSIBLE_BECOME=true
export ANSIBLE_BECOME_USER=root
cd tests && make create-${CI_PLATFORM} -s ; cd -
ansible-playbook tests/cloud_playbooks/wait-for-ssh.yml
# CoreOS needs auto update disabled
if [[ "$CI_JOB_NAME" =~ "coreos" ]]; then
ansible all -m raw -a 'systemctl disable locksmithd'
ansible all -m raw -a 'systemctl stop locksmithd'
mkdir -p /opt/bin && ln -s /usr/bin/python /opt/bin/python
fi
if [[ "$CI_JOB_NAME" =~ "opensuse" ]]; then
# OpenSUSE needs netconfig update to get correct resolv.conf
# See https://goinggnu.wordpress.com/2013/10/14/how-to-fix-the-dns-in-opensuse-13-1/
ansible all -m raw -a 'netconfig update -f'
# Auto import repo keys
ansible all -m raw -a 'zypper --gpg-auto-import-keys refresh'
fi
# Check out latest tag if testing upgrade
test "${UPGRADE_TEST}" != "false" && git fetch --all && git checkout "$KUBESPRAY_VERSION"
# Checkout the CI vars file so it is available
test "${UPGRADE_TEST}" != "false" && git checkout "${CI_BUILD_REF}" tests/files/${CI_JOB_NAME}.yml tests/testcases/*.yml
# Install mitogen ansible plugin
if [ "${MITOGEN_ENABLE}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} mitogen.yml
export ANSIBLE_STRATEGY=mitogen_linear
export ANSIBLE_STRATEGY_PLUGINS=plugins/mitogen/ansible_mitogen/plugins/strategy
fi
# Create cluster
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
# Repeat deployment if testing upgrade
if [ "${UPGRADE_TEST}" != "false" ]; then
test "${UPGRADE_TEST}" == "basic" && PLAYBOOK="cluster.yml"
test "${UPGRADE_TEST}" == "graceful" && PLAYBOOK="upgrade-cluster.yml"
git checkout "${CI_BUILD_REF}"
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" $PLAYBOOK
fi
# Test control plane recovery
if [ "${RECOVER_CONTROL_PLANE_TEST}" != "false" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "${RECOVER_CONTROL_PLANE_TEST_GROUPS}:!fake_hosts" -e reset_confirmation=yes reset.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e etcd_retries=10 --limit etcd,kube-master:!fake_hosts recover-control-plane.yml
fi
# Tests Cases
## Test Master API
ansible-playbook --limit "all:!fake_hosts" -e @${CI_TEST_VARS} tests/testcases/010_check-apiserver.yml $ANSIBLE_LOG_LEVEL
## Test that all pods are Running
ansible-playbook --limit "all:!fake_hosts" -e @${CI_TEST_VARS} tests/testcases/015_check-pods-running.yml $ANSIBLE_LOG_LEVEL
## Test that all nodes are Ready
ansible-playbook --limit "all:!fake_hosts" -e @${CI_TEST_VARS} tests/testcases/020_check-nodes-ready.yml $ANSIBLE_LOG_LEVEL
## Test pod creation and ping between them
ansible-playbook --limit "all:!fake_hosts" -e @${CI_TEST_VARS} tests/testcases/030_check-network.yml $ANSIBLE_LOG_LEVEL
## Advanced DNS checks
ansible-playbook --limit "all:!fake_hosts" -e @${CI_TEST_VARS} tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
## Kubernetes conformance tests
ansible-playbook -i ${ANSIBLE_INVENTORY} -e @${CI_TEST_VARS} --limit "all:!fake_hosts" tests/testcases/100_check-k8s-conformance.yml $ANSIBLE_LOG_LEVEL
## Idempotency checks 1/5 (repeat deployment)
if [ "${IDEMPOT_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
fi
## Idempotency checks 2/5 (Advanced DNS checks)
if [ "${IDEMPOT_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml
fi
## Idempotency checks 3/5 (reset deployment)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e reset_confirmation=yes --limit "all:!fake_hosts" reset.yml
fi
## Idempotency checks 4/5 (redeploy after reset)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
fi
## Idempotency checks 5/5 (Advanced DNS checks)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
fi
|
<gh_stars>1-10
import { Box } from '@chakra-ui/core';
import PropTypes from 'prop-types';
const SubHeadingText = ({ children }) => (
<Box color="gray.700" mb={4} fontSize="lg" maxW={400}>
{children}
</Box>
);
SubHeadingText.propTypes = {
children: PropTypes.node,
};
export default SubHeadingText;
|
import Vue from 'vue'
import VueRouter from 'vue-router'
import store from '../store'
Vue.use(VueRouter)
// Authenticated route
const ifAuthenticated = (to, from, next) => {
if (store.getters.isLoggedIn) {
next()
return
}
next('/login')
}
const routes = [
{
path: '/home',
name: 'home',
component: () =>
import('@/components/Home')
},
{
path: '/department',
name: 'department',
component: () =>
import('@/components/Department')
},
{
path: '/unassigned_tickets',
name: 'unassigned_tickets',
component: () =>
import('@/components/DepartmentIT/UnassignedTickets'),
beforeEnter: ifAuthenticated
},
{
path: '/my_tickets',
name: 'my_tickets',
component: () =>
import('@/components/DepartmentIT/MyTickets'),
beforeEnter: ifAuthenticated
},
{
path: '/my_secondary_tickets',
name: 'my_secondary_tickets',
component: () =>
import('@/components/DepartmentIT/SecondaryTickets'),
beforeEnter: ifAuthenticated
},
{
path: '/login',
name: 'login',
component: () =>
import('@/components/Auth/Login')
},
{
path: '/secondaryTicket/:id',
name: 'secondaryTicket',
component: () =>
import('@/components/SecondaryTicket')
},
{
path: '/',
redirect: '/home'
},
{
path: '*',
redirect: '/home'
}
]
export const router = new VueRouter({
routes,
mode: 'hash'
})
Vue.router = router
export default {
router
}
|
package com.microsoft.kiota.serialization;
import java.util.Map;
import javax.annotation.Nonnull;
/** Defines a contract for models that can hold additional data besides the described properties. */
public interface AdditionalDataHolder {
/**
* Gets the additional data for this object that did not belong to the properties.
* @return The additional data for this object.
*/
@Nonnull
Map<String, Object> getAdditionalData();
}
|
import plotly.graph_objects as go
country = ['India', 'China', 'US']
population = [130.6, 1386, 329]
fig = go.Figure([go.Bar(x=country, y=population)])
fig.update_layout(title_text='Country Population Distribution')
fig.show() |
package parse
import (
"github.com/c0nscience/yastgt/pkg/parse/svg"
"github.com/c0nscience/yastgt/pkg/reader/xml"
)
func Line(l xml.Line) []svg.PointI {
return []svg.PointI{
&svg.Point{
X: l.X1,
Y: l.Y1,
MoveTo: true,
},
&svg.Point{
X: l.X2,
Y: l.Y2,
},
}
}
|
model = keras.models.Sequential([
keras.layers.Flatten(input_shape=(28,28)),
keras.layers.Dense(128,activation='relu'),
keras.layers.Dense(10,activation='softmax')
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train,y_train) |
<gh_stars>0
import React from 'react';
import { Address, OrganizationCategory } from '../types';
import { ContactPoint } from './corporateContact';
export interface OrganizationJsonLdProps {
organizationType?: OrganizationCategory;
id?: string;
name: string;
logo?: string;
url: string;
legalName?: string;
sameAs?: string[];
address?: Address;
contactPoints?: ContactPoint[];
}
declare const OrganizationJsonLd: React.FC<OrganizationJsonLdProps>;
export default OrganizationJsonLd;
|
#!/bin/bash
mkdir -p robust04_data
cd robust04_data
wget -c https://archive.org/download/deep_relevance_ranking_data/robust04_data.tar.gz
tar -zxvf robust04_data.tar.gz
|
import execjs
# Assume js_code contains the JavaScript RSA encryption function
js_code = """
function strEnc(content, param1, param2, param3) {
// JavaScript RSA encryption logic
// ...
return encryptedContent;
}
"""
js_complied = execjs.compile(js_code)
def rsa_enc(content):
js_func = js_complied.call("strEnc", content, "1", "2", "3")
return js_func
def rsa_dec(encrypted_content):
# Your implementation of the decryption logic here
# Decrypt the encrypted_content using the corresponding decryption algorithm
# Return the decrypted message
# Since the exact details of the encryption algorithm are not provided, the decryption logic would depend on the specific encryption algorithm used in the JavaScript function.
# This solution assumes a hypothetical decryption algorithm for demonstration purposes.
decrypted_content = encrypted_content # Placeholder for decryption logic
return decrypted_content |
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
set -o xtrace
readonly SCRIPT_DIR="$(
cd "$(dirname "$0")"
pwd
)"
readonly PROJECT_HOME="${SCRIPT_DIR}/.."
cd "${PROJECT_HOME}"
heroku plugins:install java@3.1.1
heroku deploy:jar ./build/libs/*.jar --app itetenosuke
|
<reponame>glennjin/cas<filename>webapp-mgmt/cas-management-webapp/src/domain/attribute-repo.ts
export abstract class PrincipalAttributesRepository {
expiration: number;
timeUnit: String;
mergingStrategy: String;
}
export abstract class AbstractPrincipalAttributesRepository extends PrincipalAttributesRepository {
}
export class DefaultPrincipalAttributesRepository extends AbstractPrincipalAttributesRepository {
static cName = "org.apereo.cas.authentication.principal.DefaultPrincipalAttributesRepository";
constructor() {
super();
this["@class"] = DefaultPrincipalAttributesRepository.cName;
}
static instanceOf(obj: any): boolean {
return obj["@class"] === DefaultPrincipalAttributesRepository.cName;
}
}
export class CachingPrincipalAttributesRepository extends AbstractPrincipalAttributesRepository {
static cName = "org.apereo.cas.authentication.principal.cache.CachingPrincipalAttributesRepository";
constructor() {
super();
this["@class"] = CachingPrincipalAttributesRepository.cName;
}
static instanceOf(obj: any): boolean {
return obj["@class"] === CachingPrincipalAttributesRepository.cName;
}
}
|
<gh_stars>0
package colfer
import (
"os"
"path/filepath"
"strings"
"text/template"
"github.com/pascaldekloe/name"
)
const cKeywords = "auto break case char const continue default do double else enum extern float for goto if int long register return short signed sizeof static struct switch typedef union unsigned void volatile while"
// IsCKeyword returs whether s is a reserved word in C code.
func IsCKeyword(s string) bool {
for _, k := range strings.Fields(cKeywords) {
if k == s {
return true
}
}
return false
}
// GenerateC writes the code into file "Colfer.h" and "Colfer.c".
func GenerateC(basedir string, packages Packages) error {
for _, p := range packages {
for _, s := range p.Structs {
s.NameNative = name.SnakeCase(p.Name + "_" + s.Name)
for _, f := range s.Fields {
f.NameNative = name.SnakeCase(f.Name)
if IsCKeyword(f.NameNative) {
f.NameNative += "_"
}
switch f.Type {
case "bool":
f.TypeNative = "char"
case "uint8", "uint16", "uint32", "uint64", "int32", "int64":
f.TypeNative = f.Type + "_t"
case "float32":
f.TypeNative = "float"
case "float64":
f.TypeNative = "double"
case "timestamp":
f.TypeNative = "timespec"
case "binary", "text":
f.TypeNative = "colfer_" + f.Type
}
}
}
}
if err := os.MkdirAll(basedir, os.ModeDir|os.ModePerm); err != nil {
return err
}
f, err := os.Create(filepath.Join(basedir, "Colfer.h"))
if err != nil {
return err
}
if err := template.Must(template.New("C-header").Parse(cHeaderTemplate)).Execute(f, packages); err != nil {
return err
}
if err = f.Close(); err != nil {
return err
}
f, err = os.Create(filepath.Join(basedir, "Colfer.c"))
if err != nil {
return err
}
if err := template.Must(template.New("C").Parse(cTemplate)).Execute(f, packages); err != nil {
return err
}
return f.Close()
}
const cHeaderTemplate = `// Code generated by colf(1); DO NOT EDIT.
{{- range .}}
// The compiler used schema file {{.SchemaFileList}} for package {{.Name}}.
{{- end}}
#ifndef COLFER_H
#define COLFER_H
#include <limits.h>
#include <stdint.h>
#include <string.h>
{{- if .HasTimestamp}}
#include <time.h>
{{end}}
#if CHAR_BIT != 8
#error "octet byte size"
#endif
#ifdef __cplusplus
extern "C" {
#endif
// colfer_size_max is the upper limit for serial octet sizes.
extern size_t colfer_size_max;
// colfer_list_max is the upper limit for the number of elements in a list.
extern size_t colfer_list_max;
// colfer_text is a UTF-8 CLOB.
typedef struct {
const char* utf8;
size_t len;
} colfer_text;
// colfer_binary is a BLOB.
typedef struct {
uint8_t* octets;
size_t len;
} colfer_binary;
{{range .}}{{range .Structs}}
typedef struct {{.NameNative}} {{.NameNative}};
{{end}}{{end}}
{{range .}}{{range .Structs}}
{{.DocText "// "}}
struct {{.NameNative}} {
{{- range .Fields}}
{{.DocText "\t// "}}{{- if .TypeList}}
{{- if eq .Type "float32"}}
struct {
float* list;
size_t len;
}
{{- else if eq .Type "float64"}}
struct {
double* list;
size_t len;
}
{{- else if eq .Type "binary" "text"}}
struct {
colfer_{{.Type}}* list;
size_t len;
}
{{- else}}
struct {
struct {{.TypeRef.NameNative}}* list;
size_t len;
}
{{- end}}
{{- else}}
{{- if eq .Type "timestamp"}}
struct {{.TypeNative}}
{{- else if .TypeRef}}
{{.TypeRef.NameNative}}*
{{- else}}
{{.TypeNative}}
{{- end}}
{{- end}} {{.NameNative}};
{{- end}}
};
// {{.NameNative}}_marshal_len returns the Colfer serial octet size.
// When the return is zero then errno is set to EFBIG to indicate a breach of
// either colfer_size_max or colfer_list_max.
size_t {{.NameNative}}_marshal_len(const {{.NameNative}}* o);
// {{.NameNative}}_marshal encodes o as Colfer into buf and returns the number
// of octets written.
size_t {{.NameNative}}_marshal(const {{.NameNative}}* o, void* buf);
// {{.NameNative}}_unmarshal decodes data as Colfer into o and returns the
// number of octets read. The data is read up to a maximum of datalen or
// colfer_size_max, whichever occurs first.
// When the return is zero then errno is set to one of the following 3 values:
// EWOULDBLOCK on incomplete data, EFBIG on a breach of either colfer_size_max
// or colfer_list_max and EILSEQ on schema mismatch.
size_t {{.NameNative}}_unmarshal({{.NameNative}}* o, const void* data, size_t datalen);
{{end}}{{end}}
#ifdef __cplusplus
} // extern "C"
#endif
#endif
`
const cTemplate = `// Code generated by colf(1); DO NOT EDIT.
{{- range .}}
// The compiler used schema file {{.SchemaFileList}} for package {{.Name}}.
{{- end}}
#include "Colfer.h"
#include <errno.h>
#include <stdlib.h>
{{- if .HasTimestamp}}
#include <time.h>
{{end}}
#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \
defined(__BIG_ENDIAN__) || \
defined(__ARMEB__) || \
defined(__AARCH64EB__) || \
defined(_MIPSEB) || defined(__MIPSEB) || defined(__MIPSEB__) || \
defined(__SYSC_ZARCH__)
#define COLFER_ENDIAN
#endif
{{with index . 0}}
size_t colfer_size_max = {{.SizeMax}};
size_t colfer_list_max = {{.ListMax}};
{{end}}
{{range .}}{{range .Structs}}
size_t {{.NameNative}}_marshal_len(const {{.NameNative}}* o) {
size_t l = 1;
{{range .Fields}}{{if eq .Type "bool"}}
if (o->{{.NameNative}}) l++;
{{else if eq .Type "uint8"}}
if (o->{{.NameNative}}) l += 2;
{{else if eq .Type "uint16"}}
{
uint_fast16_t x = o->{{.NameNative}};
if (x) l += x < 256 ? 2 : 3;
}
{{else if eq .Type "uint32"}}
{
uint_fast32_t x = o->{{.NameNative}};
if (x) {
if (x >= (uint_fast32_t) 1 << 21) l += 5;
else for (l += 2; x > 127; x >>= 7, ++l);
}
}
{{else if eq .Type "uint64"}}
{
uint_fast64_t x = o->{{.NameNative}};
if (x) {
if (x >= (uint_fast64_t) 1 << 49) l += 9;
else for (l += 2; x > 127; x >>= 7, ++l);
}
}
{{else if eq .Type "int32"}}
{
uint_fast32_t x = o->{{.NameNative}};
if (x) {
if (x & (uint_fast32_t) 1 << 31) {
x = ~x;
++x;
}
for (l += 2; x > 127; x >>= 7, ++l);
}
}
{{else if eq .Type "int64"}}
{
uint_fast64_t x = o->{{.NameNative}};
if (x) {
if (x & (uint_fast64_t) 1 << 63) {
x = ~x;
++x;
}
size_t max = l + 10;
for (l += 2; x > 127 && l < max; x >>= 7, ++l);
}
}
{{else if eq .Type "float32"}}
{{- if not .TypeList}}
if (o->{{.NameNative}} != 0.0f) l += 5;
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
for (l += n * 4 + 2; n > 127; n >>= 7, ++l);
}
}
{{- end}}
{{else if eq .Type "float64"}}
{{- if not .TypeList}}
if (o->{{.NameNative}} != 0.0) l += 9;
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
for (l += n * 8 + 2; n > 127; n >>= 7, ++l);
}
}
{{- end}}
{{else if eq .Type "timestamp"}}
{
time_t s = o->{{.NameNative}}.tv_sec;
long ns = o->{{.NameNative}}.tv_nsec;
if (s || ns) {
s += ns / 1000000000;
l += s >= (time_t) 1 << 32 || s < 0 ? 13 : 9;
}
}
{{else if eq .Type "text"}}
{{- if not .TypeList}}
{
size_t n = o->{{.NameNative}}.len;
if (n > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (n) for (l += 2 + n; n > 127; n >>= 7, ++l);
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
colfer_text* a = o->{{.NameNative}}.list;
for (size_t i = 0; i < n; ++i) {
size_t len = a[i].len;
if (len > colfer_size_max) {
errno = EFBIG;
return 0;
}
for (l += len + 1; len > 127; len >>= 7, ++l);
}
for (l += 2; n > 127; n >>= 7, ++l);
if (l > colfer_size_max) {
errno = EFBIG;
return 0;
}
}
}
{{- end}}
{{else if eq .Type "binary"}}
{{- if not .TypeList}}
{
size_t n = o->{{.NameNative}}.len;
if (n > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (n) for (l += 2 + n; n > 127; n >>= 7, ++l);
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
colfer_binary* a = o->{{.NameNative}}.list;
for (size_t i = 0; i < n; ++i) {
size_t len = a[i].len;
if (len > colfer_size_max) {
errno = EFBIG;
return 0;
}
for (l += len + 1; len > 127; len >>= 7, ++l);
}
for (l += 2; n > 127; n >>= 7, ++l);
if (l > colfer_size_max) {
errno = EFBIG;
return 0;
}
}
}
{{- end}}
{{else}}
{{- if not .TypeList}}
{
if (o->{{.NameNative}}) l += 1 + {{.TypeRef.NameNative}}_marshal_len(o->{{.NameNative}});
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
{{.TypeRef.NameNative}}* a = o->{{.NameNative}}.list;
for (size_t i = 0; i < n; ++i) l += {{.TypeRef.NameNative}}_marshal_len(&a[i]);
for (l += 2; n > 127; n >>= 7, ++l);
if (l > colfer_size_max) {
errno = EFBIG;
return 0;
}
}
}
{{- end}}
{{end}}{{end}}
if (l > colfer_size_max) {
errno = EFBIG;
return 0;
}
return l;
}
size_t {{.NameNative}}_marshal(const {{.NameNative}}* o, void* buf) {
// octet pointer navigation
uint8_t* p = buf;
{{range .Fields}}{{if eq .Type "bool"}}
if (o->{{.NameNative}}) *p++ = {{.Index}};
{{else if eq .Type "uint8"}}
if (o->{{.NameNative}}) {
*p++ = {{.Index}};
*p++ = o->{{.NameNative}};
}
{{else if eq .Type "uint16"}}
{
uint_fast16_t x = o->{{.NameNative}};
if (x) {
if (x < 256) {
*p++ = {{.Index}} | 0x80;
*p++ = x;
} else {
*p++ = {{.Index}};
*p++ = x >> 8;
*p++ = x;
}
}
}
{{else if eq .Type "uint32"}}
{
uint_fast32_t x = o->{{.NameNative}};
if (x) {
if (x < (uint_fast32_t) 1 << 21) {
*p++ = {{.Index}};
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
} else {
*p++ = {{.Index}} | 128;
#ifdef COLFER_ENDIAN
memcpy(p, &o->{{.NameNative}}, 4);
p += 4;
#else
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
#endif
}
}
}
{{else if eq .Type "uint64"}}
{
uint_fast64_t x = o->{{.NameNative}};
if (x) {
if (x < (uint_fast64_t) 1 << 49) {
*p++ = {{.Index}};
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
} else {
*p++ = {{.Index}} | 128;
#ifdef COLFER_ENDIAN
memcpy(p, &o->{{.NameNative}}, 8);
p += 8;
#else
*p++ = x >> 56;
*p++ = x >> 48;
*p++ = x >> 40;
*p++ = x >> 32;
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
#endif
}
}
}
{{else if eq .Type "int32"}}
{
uint_fast32_t x = o->{{.NameNative}};
if (x) {
if (x & (uint_fast32_t) 1 << 31) {
*p++ = {{.Index}} | 128;
x = ~x + 1;
} else *p++ = {{.Index}};
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
}
}
{{else if eq .Type "int64"}}
{
uint_fast64_t x = o->{{.NameNative}};
if (x) {
if (x & (uint_fast64_t) 1 << 63) {
*p++ = {{.Index}} | 128;
x = ~x + 1;
} else *p++ = {{.Index}};
uint8_t* max = p + 8;
for (; x >= 128 && p < max; x >>= 7) *p++ = x | 128;
*p++ = x;
}
}
{{else if eq .Type "float32"}}
{{- if not .TypeList}}
if (o->{{.NameNative}} != 0.0f) {
*p++ = {{.Index}};
#ifdef COLFER_ENDIAN
memcpy(p, &o->{{.NameNative}}, 4);
p += 4;
#else
uint_fast32_t x;
memcpy(&x, &o->{{.NameNative}}, 4);
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
#endif
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
*p++ = {{.Index}};
uint_fast32_t x = n;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
#ifdef COLFER_ENDIAN
memcpy(p, o->{{.NameNative}}.list, n * 4);
p += n * 4;
#else
uint32_t* fp = (uint32_t*) o->{{.NameNative}}.list;
for (;;) {
memcpy(&x, fp, 4);
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
if (--n == 0) break;
++fp;
}
#endif
}
}
{{- end}}
{{else if eq .Type "float64"}}
{{- if not .TypeList}}
if (o->{{.NameNative}} != 0.0) {
*p++ = {{.Index}};
#ifdef COLFER_ENDIAN
memcpy(p, &o->{{.NameNative}}, 8);
p += 8;
#else
uint_fast64_t x;
memcpy(&x, &o->{{.NameNative}}, 8);
*p++ = x >> 56;
*p++ = x >> 48;
*p++ = x >> 40;
*p++ = x >> 32;
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
#endif
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
*p++ = {{.Index}};
uint_fast32_t x = n;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
#ifdef COLFER_ENDIAN
memcpy(p, o->{{.NameNative}}.list, n * 8);
p += n * 8;
#else
uint64_t* fp = (uint64_t*) o->{{.NameNative}}.list;
for (;;) {
uint_fast64_t x;
memcpy(&x, fp, 8);
*p++ = x >> 56;
*p++ = x >> 48;
*p++ = x >> 40;
*p++ = x >> 32;
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
if (--n == 0) break;
++fp;
}
#endif
}
}
{{- end}}
{{else if eq .Type "timestamp"}}
{
time_t s = o->{{.NameNative}}.tv_sec;
long ns = o->{{.NameNative}}.tv_nsec;
if (s || ns) {
static const int_fast64_t nano = 1000000000;
s += ns / nano;
ns %= nano;
if (ns < 0) {
--s;
ns += nano;
}
uint_fast64_t x = s;
if (x < (uint_fast64_t) 1 << 32)
*p++ = {{.Index}};
else {
*p++ = {{.Index}} | 128;
*p++ = x >> 56;
*p++ = x >> 48;
*p++ = x >> 40;
*p++ = x >> 32;
}
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
x = ns;
*p++ = x >> 24;
*p++ = x >> 16;
*p++ = x >> 8;
*p++ = x;
}
}
{{else if eq .Type "text"}}
{{- if not .TypeList}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
*p++ = {{.Index}};
uint_fast32_t x = n;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
memcpy(p, o->{{.NameNative}}.utf8, n);
p += n;
}
}
{{- else}}
{
size_t count = o->{{.NameNative}}.len;
if (count) {
*p++ = {{.Index}};
uint_fast32_t x = count;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
colfer_text* text = o->{{.NameNative}}.list;
do {
size_t n = text->len;
for (x = n; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
memcpy(p, text->utf8, n);
p += n;
++text;
} while (--count != 0);
}
}
{{- end}}
{{else if eq .Type "binary"}}
{{- if not .TypeList}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
*p++ = {{.Index}};
uint_fast32_t x = n;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
memcpy(p, o->{{.NameNative}}.octets, n);
p += n;
}
}
{{- else}}
{
size_t count = o->{{.NameNative}}.len;
if (count) {
*p++ = {{.Index}};
uint_fast32_t x = count;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
colfer_binary* binary = o->{{.NameNative}}.list;
do {
size_t n = binary->len;
for (x = n; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
memcpy(p, binary->octets, n);
p += n;
++binary;
} while (--count != 0);
}
}
{{- end}}
{{else}}
{{- if not .TypeList}}
{
if (o->{{.NameNative}}) {
*p++ = {{.Index}};
p += {{.TypeRef.NameNative}}_marshal(o->{{.NameNative}}, p);
}
}
{{- else}}
{
size_t n = o->{{.NameNative}}.len;
if (n) {
*p++ = {{.Index}};
uint_fast32_t x = n;
for (; x >= 128; x >>= 7) *p++ = x | 128;
*p++ = x;
{{.TypeRef.NameNative}}* a = o->{{.NameNative}}.list;
for (size_t i = 0; i < n; ++i) p += {{.TypeRef.NameNative}}_marshal(&a[i], p);
}
}
{{- end}}
{{end}}{{end}}
*p++ = 127;
return p - (uint8_t*) buf;
}
size_t {{.NameNative}}_unmarshal({{.NameNative}}* o, const void* data, size_t datalen) {
// octet pointer navigation
const uint8_t* p = data;
const uint8_t* end;
int enderr;
if (datalen < colfer_size_max) {
end = p + datalen;
enderr = EWOULDBLOCK;
} else {
end = p + colfer_size_max;
enderr = EFBIG;
}
if (p >= end) {
errno = enderr;
return 0;
}
uint_fast8_t header = *p++;
{{range .Fields}}{{if eq .Type "bool"}}
if (header == {{.Index}}) {
o->{{.NameNative}} = 1;
if (p >= end) {
errno = enderr;
return 0;
}
header = *p++;
}
{{else if eq .Type "uint8"}}
if (header == {{.Index}}) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}} = *p++;
header = *p++;
}
{{else if eq .Type "uint16"}}
if (header == {{.Index}}) {
if (p+2 >= end) {
errno = enderr;
return 0;
}
uint_fast16_t x = *p++;
x <<= 8;
o->{{.NameNative}} = x | *p++;
header = *p++;
} else if (header == ({{.Index}} | 128)) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}} = *p++;
header = *p++;
}
{{else if eq .Type "uint32"}}
if (header == {{.Index}}) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
uint_fast32_t x = *p++;
if (x > 127) {
x &= 127;
for (int shift = 7; ; shift += 7) {
uint_fast32_t b = *p++;
if (p >= end) {
errno = enderr;
return 0;
}
if (b <= 127) {
x |= b << shift;
break;
}
x |= (b & 127) << shift;
}
}
o->{{.NameNative}} = x;
header = *p++;
} else if (header == ({{.Index}} | 128)) {
if (p+4 >= end) {
errno = enderr;
return 0;
}
uint_fast32_t x = *p++;
x <<= 24;
x |= (uint_fast32_t) *p++ << 16;
x |= (uint_fast32_t) *p++ << 8;
x |= (uint_fast32_t) *p++;
o->{{.NameNative}} = x;
header = *p++;
}
{{else if eq .Type "uint64"}}
if (header == {{.Index}}) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
uint_fast64_t x = *p++;
if (x > 127) {
x &= 127;
for (int shift = 7; ; shift += 7) {
uint_fast64_t b = *p++;
if (p >= end) {
errno = enderr;
return 0;
}
if (b <= 127) {
x |= b << shift;
break;
}
x |= (b & 127) << shift;
}
}
o->{{.NameNative}} = x;
header = *p++;
} else if (header == ({{.Index}} | 128)) {
if (p+8 >= end) {
errno = enderr;
return 0;
}
uint_fast64_t x = *p++;
x <<= 56;
x |= (uint_fast64_t) *p++ << 48;
x |= (uint_fast64_t) *p++ << 40;
x |= (uint_fast64_t) *p++ << 32;
x |= (uint_fast64_t) *p++ << 24;
x |= (uint_fast64_t) *p++ << 16;
x |= (uint_fast64_t) *p++ << 8;
x |= (uint_fast64_t) *p++;
o->{{.NameNative}} = x;
header = *p++;
}
{{else if eq .Type "int32"}}
if ((header & 127) == {{.Index}}) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
uint_fast32_t x = *p++;
if (x > 127) {
x &= 127;
for (int shift = 7; shift < 35; shift += 7) {
uint_fast32_t b = *p++;
if (p >= end) {
errno = enderr;
return 0;
}
if (b <= 127) {
x |= b << shift;
break;
}
x |= (b & 127) << shift;
}
}
if (header & 128) x = ~x + 1;
o->{{.NameNative}} = x;
header = *p++;
}
{{else if eq .Type "int64"}}
if ((header & 127) == {{.Index}}) {
if (p+1 >= end) {
errno = enderr;
return 0;
}
uint_fast64_t x = *p++;
if (x > 127) {
x &= 127;
for (int shift = 7; ; shift += 7) {
uint_fast64_t b = *p++;
if (p >= end) {
errno = enderr;
return 0;
}
if (b <= 127 || shift == 56) {
x |= b << shift;
break;
}
x |= (b & 127) << shift;
}
}
if (header & 128) x = ~x + 1;
o->{{.NameNative}} = x;
header = *p++;
}
{{else if eq .Type "float32"}}
{{- if not .TypeList}}
if (header == {{.Index}}) {
if (p+4 >= end) {
errno = enderr;
return 0;
}
#ifdef COLFER_ENDIAN
memcpy(&o->{{.NameNative}}, p, 4);
p += 4;
#else
uint_fast32_t x = *p++;
x <<= 24;
x |= (uint_fast32_t) *p++ << 16;
x |= (uint_fast32_t) *p++ << 8;
x |= (uint_fast32_t) *p++;
memcpy(&o->{{.NameNative}}, &x, 4);
#endif
header = *p++;
}
{{- else}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
if (p+n*4 >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}}.len = n;
float* fp = malloc(n * 4);
o->{{.NameNative}}.list = fp;
#ifdef COLFER_ENDIAN
memcpy(fp, p, n * 4);
p += n * 4;
#else
for (; n; --n, ++fp) {
uint_fast32_t x = *p++;
x <<= 24;
x |= (uint_fast32_t) *p++ << 16;
x |= (uint_fast32_t) *p++ << 8;
x |= (uint_fast32_t) *p++;
memcpy(fp, &x, 4);
}
#endif
header = *p++;
}
{{- end}}
{{else if eq .Type "float64"}}
{{- if not .TypeList}}
if (header == {{.Index}}) {
if (p+8 >= end) {
errno = enderr;
return 0;
}
#ifdef COLFER_ENDIAN
memcpy(&o->{{.NameNative}}, p, 8);
p += 8;
#else
uint_fast64_t x = *p++;
x <<= 56;
x |= (uint_fast64_t) *p++ << 48;
x |= (uint_fast64_t) *p++ << 40;
x |= (uint_fast64_t) *p++ << 32;
x |= (uint_fast64_t) *p++ << 24;
x |= (uint_fast64_t) *p++ << 16;
x |= (uint_fast64_t) *p++ << 8;
x |= (uint_fast64_t) *p++;
memcpy(&o->{{.NameNative}}, &x, 8);
#endif
header = *p++;
}
{{- else}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
if (p+n*8 >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}}.len = n;
double* fp = malloc(n * 8);
o->{{.NameNative}}.list = fp;
#ifdef COLFER_ENDIAN
memcpy(fp, p, n * 8);
p += n * 8;
#else
for (; n; --n, ++fp) {
uint_fast64_t x = *p++;
x <<= 56;
x |= (uint_fast64_t) *p++ << 48;
x |= (uint_fast64_t) *p++ << 40;
x |= (uint_fast64_t) *p++ << 32;
x |= (uint_fast64_t) *p++ << 24;
x |= (uint_fast64_t) *p++ << 16;
x |= (uint_fast64_t) *p++ << 8;
x |= (uint_fast64_t) *p++;
memcpy(fp, &x, 8);
}
#endif
header = *p++;
}
{{- end}}
{{else if eq .Type "timestamp"}}
if ((header & 127) == {{.Index}}) {
if (header & 128) {
if (p+12 >= end) {
errno = enderr;
return 0;
}
uint64_t x = *p++;
x <<= 56;
x |= (uint64_t) *p++ << 48;
x |= (uint64_t) *p++ << 40;
x |= (uint64_t) *p++ << 32;
x |= (uint64_t) *p++ << 24;
x |= (uint64_t) *p++ << 16;
x |= (uint64_t) *p++ << 8;
x |= (uint64_t) *p++;
o->{{.NameNative}}.tv_sec = (time_t)(int64_t) x;
} else {
if (p+8 >= end) {
errno = enderr;
return 0;
}
uint_fast32_t x = *p++;
x <<= 24;
x |= (uint_fast32_t) *p++ << 16;
x |= (uint_fast32_t) *p++ << 8;
x |= (uint_fast32_t) *p++;
o->{{.NameNative}}.tv_sec = (time_t) x;
}
uint_fast32_t x = *p++;
x <<= 24;
x |= (uint_fast32_t) *p++ << 16;
x |= (uint_fast32_t) *p++ << 8;
x |= (uint_fast32_t) *p++;
o->{{.NameNative}}.tv_nsec = (long) x;
header = *p++;
}
{{else if eq .Type "text"}}
{{- if not .TypeList}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; shift < sizeof(size_t) * CHAR_BIT; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (p+n >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}}.len = n;
void* a = malloc(n);
o->{{.NameNative}}.utf8 = (char*) a;
if (n) {
memcpy(a, p, n);
p += n;
}
header = *p++;
}
{{- else}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
o->{{.NameNative}}.len = n;
colfer_text* text = malloc(n * sizeof(colfer_text));
o->{{.NameNative}}.list = text;
for (; n; --n, ++text) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t len = *p++;
if (len > 127) {
len &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
len |= c << shift;
break;
}
len |= (c & 127) << shift;
}
}
if (len > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (p+len >= end) {
errno = enderr;
return 0;
}
text->len = len;
char* a = malloc(len);
text->utf8 = a;
if (len) {
memcpy(a, p, len);
p += len;
}
}
if (p >= end) {
errno = enderr;
return 0;
}
header = *p++;
}
{{- end}}
{{else if eq .Type "binary"}}
{{- if not .TypeList}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (p+n >= end) {
errno = enderr;
return 0;
}
o->{{.NameNative}}.len = n;
void* a = malloc(n);
o->{{.NameNative}}.octets = (uint8_t*) a;
if (n) {
memcpy(a, p, n);
p += n;
}
header = *p++;
}
{{- else}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
o->{{.NameNative}}.len = n;
colfer_binary* binary = malloc(n * sizeof(colfer_binary));
o->{{.NameNative}}.list = binary;
for (; n; --n, ++binary) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t len = *p++;
if (len > 127) {
len &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
len |= c << shift;
break;
}
len |= (c & 127) << shift;
}
}
if (len > colfer_size_max) {
errno = EFBIG;
return 0;
}
if (p+len >= end) {
errno = enderr;
return 0;
}
binary->len = len;
uint8_t* a = malloc(len);
binary->octets = a;
if (len) {
memcpy(a, p, len);
p += len;
}
}
if (p >= end) {
errno = enderr;
return 0;
}
header = *p++;
}
{{- end}}
{{else}}
{{- if not .TypeList}}
if (header == {{.Index}}) {
o->{{.NameNative}} = calloc(1, sizeof({{.TypeRef.NameNative}}));
size_t read = {{.TypeRef.NameNative}}_unmarshal(o->{{.NameNative}}, p, (size_t) (end - p));
if (!read) {
if (errno == EWOULDBLOCK) errno = enderr;
return read;
}
p += read;
if (p >= end) {
errno = enderr;
return 0;
}
header = *p++;
}
{{- else}}
if (header == {{.Index}}) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t n = *p++;
if (n > 127) {
n &= 127;
for (int shift = 7; ; shift += 7) {
if (p >= end) {
errno = enderr;
return 0;
}
size_t c = *p++;
if (c <= 127) {
n |= c << shift;
break;
}
n |= (c & 127) << shift;
}
}
if (n > colfer_list_max) {
errno = EFBIG;
return 0;
}
{{.TypeRef.NameNative}}* a = calloc(n, sizeof({{.TypeRef.NameNative}}));
for (size_t i = 0; i < n; ++i) {
size_t read = {{.TypeRef.NameNative}}_unmarshal(&a[i], p, (size_t) (end - p));
if (!read) {
if (errno == EWOULDBLOCK) errno = enderr;
return read;
}
p += read;
}
o->{{.NameNative}}.len = n;
o->{{.NameNative}}.list = a;
if (p >= end) {
errno = enderr;
return 0;
}
header = *p++;
}
{{- end}}
{{end}}{{end}}
if (header != 127) {
errno = EILSEQ;
return 0;
}
return (size_t) (p - (const uint8_t*) data);
}
{{end}}{{end}}`
|
#!/usr/bin/env bash
# This file:
#
# - Injects markdown files into the ./website directory
# - Changes them a little to make them more suitable for Jekyll building
#
# Usage:
#
# ./inject.sh
#
# Based on a template by BASH3 Boilerplate v2.0.0
# http://bash3boilerplate.sh/#authors
#
# The MIT License (MIT)
# Copyright (c) 2013 Kevin van Zonneveld and contributors
# You are not obligated to bundle the LICENSE file with your b3bp projects as long
# as you leave these references intact in the header comments of your source files.
# Exit on error. Append || true if you expect an error.
set -o errexit
# Exit on error inside any functions or subshells.
set -o errtrace
# Do not allow use of undefined vars. Use ${VAR:-} to use an undefined VAR
set -o nounset
# Catch the error in case mysqldump fails (but gzip succeeds) in `mysqldump |gzip`
set -o pipefail
# Turn on traces, useful while debugging but commented out by default
# set -o xtrace
# Set magic variables for current file, directory, os, etc.
__dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
__file="${__dir}/$(basename "${BASH_SOURCE[0]}")"
__base="$(basename "${__file}" .sh)"
__root="$(cd "$(dirname "$(dirname "${__dir}")")" && pwd)"
pushd "${__root}"
# Offer the main template for download as http://bash3boilerplate.sh/main.sh
cp -v main.sh website/
for doc in "README" "FAQ" "CHANGELOG"; do
targetName="$(echo "${doc}" | awk '{print tolower($0)}')"
permalink="/${targetName}/"
subtitle="$(tr '[:lower:]' '[:upper:]' <<< "${targetName:0:1}")${targetName:1} | "
redirectFrom="/${doc}.md/"
backLink='\n\n<a href="/">« Home</a>'
if [[ "${doc}" = "README" ]]; then
targetName="index"
permalink="/"
subtitle=""
redirectFrom="nothing"
backLink=""
fi
cat <<EOF > "website/${targetName}.md"
---
layout: default
permalink: ${permalink}
redirect_from: ${redirectFrom}
title: ${subtitle}BASH3 Boilerplate – Template for writing better Bash scripts
warning: This page is generated by ${__base}.sh based on ${doc}.md, please don't edit ${targetName}.md directly.
---
EOF
# If '<!--more-->' exists, only inject what comes after it, so you can have e.g. a ToC or buildbuttons
# on GitHub, without that also rendering in the site (site may have its own ToC rendering for instance)
if grep '<!--more-->' "${doc}.md"; then
sed -n -e '/<!--more-->/,$p' "${doc}.md" | tail -n +2 >> "website/${targetName}.md"
else
cat "${doc}.md" >> "website/${targetName}.md"
fi
# Add a "<- Back Home" link, if any
echo -e "${backLink}" >> "website/${targetName}.md"
echo "--> written website/${targetName}.md"
done
popd
|
import {percentage} from 'adaptone-front/helpers/percentage';
import {expect} from 'chai';
import {describe, it} from 'mocha';
const ratio = 0.0442;
const ratioToTruncate = 0.04;
describe('Unit | Helper | percentage', () => {
it('should return a percentage with 2 decimals', () => {
expect(percentage(ratio, 2)).to.equal('4.42%');
});
it('should truncate the decimals on zeros', () => {
expect(percentage(ratioToTruncate, 2)).to.equal('4%');
});
});
|
<reponame>Thomas161/Morty<filename>src/App.js
import React from 'react';
import CardsParent from './layout/CardsParent';
function App() {
return (
<React.Fragment>
<CardsParent />
</React.Fragment>
);
}
export default App;
|
<filename>routes/api/index.js
/**
* Routing module for handling all routes under /api
*/
/**
* Import core modules
*/
var express = require('express');
var router = express.Router();
var authenticationHelpers = require('../authenticationHelpers');
var users = require('./users');
var bookings = require('./bookings');
router.use('/users', users);
router.use('/bookings', bookings);
router.get('/authenticated', authenticationHelpers.isAuth, function(req, res, next) {
res.json({"authenticated": true});
});
router.get('/', function(request, response) {
response.json({"made it": "ok"});
});
module.exports = router; |
import bytes from 'bytes';
import { doesItemFail } from './util';
import { Table } from '@/typings/components/Table';
import { Config, LastReportItem } from '@/typings/plugin/last-value';
import { PluginOptions } from '@/typings/config/plugin';
type Renderer = (lastValue: number | string | void, item: LastReportItem) => Promise<string>;
const bytesConfig = {
unitSeparator: ' ',
};
const decimalize = (value: number): string => (value % 1 ? value.toFixed(2) : `${value}`);
export const renderDiffPercentage = (lastValue: string, item: LastReportItem): string => {
const { lastValueChange, lastValueDiff } = item;
if (lastValueChange && lastValueDiff) {
const diffPrefix = lastValueDiff > 0 ? '+' : '';
return `${lastValue}
${diffPrefix}${decimalize(lastValueChange)}%`;
}
return lastValue;
};
export const renderDifference = (lastValue: number, item: LastReportItem): string => {
const diff = item.lastValueDiff;
if (diff) {
const diffPrefix = diff > 0 ? '+' : '';
return `${lastValue}
${diffPrefix}${bytes(diff, bytesConfig)}`;
}
return `${lastValue}`;
};
export const createRenderer = (pluginOptions: PluginOptions, config: Config): Renderer => async (
lastValue: number | string | void,
item: LastReportItem,
): Promise<string> => {
const failure = await doesItemFail(item, config, pluginOptions);
switch (failure) {
case 'number':
case 'percentage':
case 'size':
return renderDifference(lastValue as number, item);
case 'numberDiffPercentage':
case 'percentageDiffPercentage':
case 'sizeDiffPercentage':
return renderDiffPercentage(lastValue as string, item);
default:
return lastValue == null ? '' : `${lastValue}`;
}
};
export const addColumn = (table: Table | void, pluginOptions: PluginOptions, config: Config): void => {
if (table) {
const index = (table.findColumn((column): boolean => column.header === 'Value', true) as number) + 1;
table.addColumn(
{
header: 'Last Value',
key: 'lastValue',
align: 'center',
renderer: createRenderer(pluginOptions, config),
},
index,
);
}
};
|
// Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package spectral
import (
"math"
"github.com/ArkaGPL/gonum/graph"
"github.com/ArkaGPL/gonum/mat"
)
// Laplacian is a graph Laplacian matrix.
type Laplacian struct {
// Matrix holds the Laplacian matrix.
mat.Matrix
// Nodes holds the input graph nodes.
Nodes []graph.Node
// Index is a mapping from the graph
// node IDs to row and column indices.
Index map[int64]int
}
// NewLaplacian returns a Laplacian matrix for the simple undirected graph g.
// The Laplacian is defined as D-A where D is a diagonal matrix holding the
// degree of each node and A is the graph adjacency matrix of the input graph.
// If g contains self edges, NewLaplacian will panic.
func NewLaplacian(g graph.Undirected) Laplacian {
nodes := graph.NodesOf(g.Nodes())
indexOf := make(map[int64]int, len(nodes))
for i, n := range nodes {
id := n.ID()
indexOf[id] = i
}
l := mat.NewSymDense(len(nodes), nil)
for j, u := range nodes {
uid := u.ID()
to := graph.NodesOf(g.From(uid))
l.SetSym(j, j, float64(len(to)))
for _, v := range to {
vid := v.ID()
if uid == vid {
panic("network: self edge in graph")
}
if uid < vid {
l.SetSym(indexOf[vid], j, -1)
}
}
}
return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf}
}
// NewSymNormLaplacian returns a symmetric normalized Laplacian matrix for the
// simple undirected graph g.
// The normalized Laplacian is defined as I-D^(-1/2)AD^(-1/2) where D is a
// diagonal matrix holding the degree of each node and A is the graph adjacency
// matrix of the input graph.
// If g contains self edges, NewSymNormLaplacian will panic.
func NewSymNormLaplacian(g graph.Undirected) Laplacian {
nodes := graph.NodesOf(g.Nodes())
indexOf := make(map[int64]int, len(nodes))
for i, n := range nodes {
id := n.ID()
indexOf[id] = i
}
l := mat.NewSymDense(len(nodes), nil)
for j, u := range nodes {
uid := u.ID()
to := graph.NodesOf(g.From(uid))
if len(to) == 0 {
continue
}
l.SetSym(j, j, 1)
squdeg := math.Sqrt(float64(len(to)))
for _, v := range to {
vid := v.ID()
if uid == vid {
panic("network: self edge in graph")
}
if uid < vid {
to := g.From(vid)
k := to.Len()
if k < 0 {
k = len(graph.NodesOf(to))
}
l.SetSym(indexOf[vid], j, -1/(squdeg*math.Sqrt(float64(k))))
}
}
}
return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf}
}
// NewRandomWalkLaplacian returns a damp-scaled random walk Laplacian matrix for
// the simple graph g.
// The random walk Laplacian is defined as I-D^(-1)A where D is a diagonal matrix
// holding the degree of each node and A is the graph adjacency matrix of the input
// graph.
// If g contains self edges, NewRandomWalkLaplacian will panic.
func NewRandomWalkLaplacian(g graph.Graph, damp float64) Laplacian {
nodes := graph.NodesOf(g.Nodes())
indexOf := make(map[int64]int, len(nodes))
for i, n := range nodes {
id := n.ID()
indexOf[id] = i
}
l := mat.NewDense(len(nodes), len(nodes), nil)
for j, u := range nodes {
uid := u.ID()
to := graph.NodesOf(g.From(uid))
if len(to) == 0 {
continue
}
l.Set(j, j, 1-damp)
rudeg := (damp - 1) / float64(len(to))
for _, v := range to {
vid := v.ID()
if uid == vid {
panic("network: self edge in graph")
}
l.Set(indexOf[vid], j, rudeg)
}
}
return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf}
}
|
<reponame>ShubhamModi004/Educrate
module.exports = [
{
locale: 'pl',
label: 'हिंदी',
},
{
locale: 'en',
label: 'En',
default: true,
},
];
|
<reponame>sachaservan/pir
package pir
import (
"argsort"
"math"
"math/rand"
"sort"
"strconv"
"testing"
)
const NumTrials int = 10 // number of times to run some of the tests
func generateStringsInSequence(n int) []string {
strings := make([]string, n)
for i := range strings {
strings[i] = strconv.Itoa(i)
}
return strings
}
func TestKeywordQuerySqrtST(t *testing.T) {
setup()
for trial := 0; trial < NumTrials; trial++ {
numStrings := rand.Intn(1<<10) + 100
data := generateStringsInSequence(numStrings)
data = PadToSqrt(data)
sort.Strings(data)
argsort.ReverseStrings(data)
t.Logf("[Test]: data size %v\n", len(data))
sqst := NewPrivateSqrtST()
err := sqst.BuildForData(data)
if err != nil {
t.Fatal(err)
}
var res []*Slot
for i := 0; i < len(data); i++ {
query := NewSlotFromString(data[i], sqst.SlotBytes)
if int(math.Ceil(math.Sqrt(float64(len(data))))) != len(sqst.FirstLayer) {
t.Fatalf("First layer does not have the correct size. Expected: %v Actual %v\n",
int(math.Sqrt(float64(len(data)))),
len(sqst.FirstLayer),
)
}
boundry := ""
rowIndex := 0
for rowIndex, boundry = range sqst.FirstLayer {
if data[i] > boundry {
break
}
}
shares := sqst.SecondLayer.NewIndexQueryShares(rowIndex, sqst.Height, 2)
resA, err := sqst.PrivateQuery(shares[0], NumProcsForQuery)
if err != nil {
t.Fail()
}
resB, err := sqst.PrivateQuery(shares[1], NumProcsForQuery)
if err != nil {
t.Fail()
}
resultShares := [...]*SecretSharedQueryResult{resA, resB}
res = Recover(resultShares[:])
if len(res) != len(sqst.FirstLayer) {
t.Fatalf("Second layer does not have the correct size. Expected: %v Actual %v\n",
len(res),
len(sqst.FirstLayer),
)
}
colIndex := 0
var slot *Slot
for colIndex, slot = range res {
if slot.Compare(query) <= 0 {
break
}
}
index := rowIndex*sqst.Width + colIndex
if index != i && data[index] != data[i] {
t.Fatalf("Incorrect index %v, expected %v; Data at index %v, expected data %v\n", index, i, data[index], data[i])
}
}
}
}
|
#!/bin/sh
set -e
SERENITY_ROOT=../../
mkdir -p $SERENITY_ROOT/Root/usr/include/LibIPC/
cp ./*.h $SERENITY_ROOT/Root/usr/include/LibIPC/
cp libipc.a $SERENITY_ROOT/Root/usr/lib/
|
#!/bin/bash
# Update Script
# Script created by @collinstechadmin
dependencies() {
command -v git > /dev/null 2>&1 || { echo >&2 "Package GIT is not installed ... Unable to update ..."; exit 1; }
}
script() {
clear
printf "\n \e[1;92mUpdating \e[1;94mShellPhish\e[1;92m directory ...\n\n"
sleep 1.5
cd ..
rm -rf ShellPhish
git clone https://github.com/collinstech256/Shellphish-v.2.5.git
cd ShellPhish
chmod +x *
printf "\n\e[1;92m Update Complete ...\n\e[0m"
}
dependencies
script
|
#!/usr/bin/env bash
# ONE LIGHT
# --- ----
# Gnome Terminal color scheme install script
# Based on:
# https://github.com/chriskempson/base16-gnome-terminal/
[[ -z "$PROFILE_NAME" ]] && PROFILE_NAME="One Light"
[[ -z "$PROFILE_SLUG" ]] && PROFILE_SLUG="one-light"
[[ -z "$DCONF" ]] && DCONF=dconf
[[ -z "$UUIDGEN" ]] && UUIDGEN=uuidgen
dset() {
local key="$1"; shift
local val="$1"; shift
if [[ "$type" == "string" ]]; then
val="'$val'"
fi
"$DCONF" write "$PROFILE_KEY/$key" "$val"
}
# because dconf still doesn't have "append"
dlist_append() {
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$DCONF" read "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "'$val'"
} | head -c-1 | tr "\n" ,
)"
"$DCONF" write "$key" "[$entries]"
}
# Newest versions of gnome-terminal use dconf
if which "$DCONF" > /dev/null 2>&1; then
[[ -z "$BASE_KEY_NEW" ]] && BASE_KEY_NEW=/org/gnome/terminal/legacy/profiles:
if [[ -n "`$DCONF list $BASE_KEY_NEW/`" ]]; then
if which "$UUIDGEN" > /dev/null 2>&1; then
PROFILE_SLUG=`uuidgen`
fi
if [[ -n "`$DCONF read $BASE_KEY_NEW/default`" ]]; then
DEFAULT_SLUG=`$DCONF read $BASE_KEY_NEW/default | tr -d \'`
else
DEFAULT_SLUG=`$DCONF list $BASE_KEY_NEW/ | grep '^:' | head -n1 | tr -d :/`
fi
DEFAULT_KEY="$BASE_KEY_NEW/:$DEFAULT_SLUG"
PROFILE_KEY="$BASE_KEY_NEW/:$PROFILE_SLUG"
# copy existing settings from default profile
$DCONF dump "$DEFAULT_KEY/" | $DCONF load "$PROFILE_KEY/"
# add new copy to list of profiles
dlist_append $BASE_KEY_NEW/list "$PROFILE_SLUG"
# update profile values with theme options
dset visible-name "'$PROFILE_NAME'"
dset palette "['#000000', '#E45649', '#50A14F', '#986801', '#4078F2', '#A626A4', '#0184BC', '#A0A1A7', '#5c6370', '#E45649', '#50A14F', '#986801', '#4078F2', '#A626A4', '#0184BC', '#ffffff']"
dset background-color "'#F9F9F9'"
dset foreground-color "'#383A42'"
dset bold-color "'#383A42'"
dset bold-color-same-as-fg "true"
dset use-theme-colors "false"
dset use-theme-background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
exit 0
fi
fi
# Fallback for Gnome 2 and early Gnome 3
[[ -z "$GCONFTOOL" ]] && GCONFTOOL=gconftool
[[ -z "$BASE_KEY" ]] && BASE_KEY=/apps/gnome-terminal/profiles
PROFILE_KEY="$BASE_KEY/$PROFILE_SLUG"
gset() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
"$GCONFTOOL" --set --type "$type" "$PROFILE_KEY/$key" -- "$val"
}
# Because gconftool doesn't have "append"
glist_append() {
local type="$1"; shift
local key="$1"; shift
local val="$1"; shift
local entries="$(
{
"$GCONFTOOL" --get "$key" | tr -d '[]' | tr , "\n" | fgrep -v "$val"
echo "$val"
} | head -c-1 | tr "\n" ,
)"
"$GCONFTOOL" --set --type list --list-type $type "$key" "[$entries]"
}
# Append profile to the profile list
glist_append string /apps/gnome-terminal/global/profile_list "$PROFILE_SLUG"
gset string visible_name "$PROFILE_NAME"
gset string palette "#000000:#E45649:#50A14F:#986801:#4078F2:#A626A4:#0184BC:#A0A1A7:#5c6370:#e06c75:#50A14F:#986801:#4078F2:#A626A4:#0184BC:#ffffff"
gset string background_color "#F9F9F9"
gset string foreground_color "#383A42"
gset string bold_color "#383A42"
gset bool bold_color_same_as_fg "true"
gset bool use_theme_colors "false"
gset bool use_theme_background "false"
unset PROFILE_NAME
unset PROFILE_SLUG
unset DCONF
unset UUIDGEN
|
package org.museautomation.ui.extend.actions;
import java.util.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class CompoundAction extends UndoableAction
{
public CompoundAction(List<BaseEditAction> actions)
{
_actions.addAll(actions);
}
public CompoundAction()
{
}
public void addAction(BaseEditAction action)
{
_actions.add(action);
}
@Override
protected boolean executeImplementation()
{
boolean all_successful = true;
for (BaseEditAction action : _actions)
{
boolean result = action.execute(_child_undo_stack);
if (!result)
all_successful = false;
}
return all_successful;
}
@Override
protected boolean undoImplementation()
{
return _child_undo_stack.undoAll();
}
public int getSize()
{
return _actions.size();
}
private List<BaseEditAction> _actions = new ArrayList<>();
private UndoStack _child_undo_stack = new UndoStack();
}
|
<reponame>hector23rp/alba-node
import { Song } from "./song";
import { Artist } from "./artist";
import { Entity, Column, ObjectID, ObjectIdColumn } from "typeorm";
@Entity()
export class Album {
@ObjectIdColumn()
id: ObjectID;
@Column()
title: string;
@Column()
author: Artist;
@Column()
tracks: Song[];
@Column()
genre: string;
}
|
def getUnixConformName(file_name: str) -> str:
if not file_name.strip(): # If the input name is empty or contains only whitespace
return "unnamed_file"
unix_conform_name = file_name.strip().replace(' ', '_').lower()
return unix_conform_name |
def check_python_version(major, minor):
if major == 2 and minor < 7:
raise RuntimeError(f"On Python 2, plexmediafixup requires Python 2.7 or higher")
elif major == 3 and minor < 5:
raise RuntimeError(f"On Python 3, plexmediafixup requires Python 3.5 or higher") |
package network_flow;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17481번: 최애 정하기
*
* @see https://www.acmicpc.net/problem/17481/
*
*/
public class Boj17481 {
private static ArrayList<Integer>[] connected;
private static int[] aMatch, bMatch, visit;
private static int vcount;
private static HashMap<String, Integer> member = new HashMap<>();
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int M = Integer.parseInt(st.nextToken());
connected = new ArrayList[N];
for(int i = 0; i < M; i++) {
member.put(br.readLine(), i);
}
for(int i = 0; i < N; i++) {
connected[i] = new ArrayList<>();
st = new StringTokenizer(br.readLine());
int loop = Integer.parseInt(st.nextToken());
while(loop-- > 0) { // set graph
String name = st.nextToken();
connected[i].add(member.get(name));
}
}
System.out.println(bipartiteMatch(N, M));
}
private static String bipartiteMatch(int n, int m) {
int count = 0;
aMatch = new int[n];
bMatch = new int[m];
visit = new int[n];
Arrays.fill(aMatch, -1);
Arrays.fill(bMatch, -1);
for(int i = 0; i < n; i++) {
vcount++;
count += recursion(i);
}
if(count == n) return "YES"; // all friends have favorite
StringBuilder sb = new StringBuilder();
sb.append("NO\n").append(count);
return sb.toString();
}
private static int recursion(int current) {
if(visit[current] == vcount) return 0;
visit[current] = vcount;
for(int next: connected[current]) {
if(bMatch[next] == -1 || recursion(bMatch[next]) == 1) { // match
bMatch[next] = current;
aMatch[current] = next;
return 1;
}
}
return 0;
}
}
|
<filename>js/sykepengesoknad-gammel-plattform/fravar-og-friskmelding/validerFravaerOgFriskmelding.js<gh_stars>1-10
import { toDatePrettyPrint, tidligsteFom, fraInputdatoTilJSDato } from '@navikt/digisyfo-npm';
import validerFoerDuBegynner from '../for-du-begynner/validerFoerDuBegynner';
import * as valideringUtils from '../utils/valideringUtils';
import { getTomDato } from '../utils/sykepengesoknadUtils';
import { visSoktOmSykepengerUtenlandsoppholdsporsmal } from './SoktOmSykepengerIUtenlandsopphold';
export const validate = (values, props) => {
const { sykepengesoknad } = props;
const feilmeldinger = {};
let gjenopptattArbeidFulltUtDato;
const periodealternativer = {};
if (values.harGjenopptattArbeidFulltUt) {
try {
gjenopptattArbeidFulltUtDato = fraInputdatoTilJSDato(values.gjenopptattArbeidFulltUtDato);
const perioder = sykepengesoknad.aktiviteter.map((a) => {
return a.periode;
});
periodealternativer.fra = sykepengesoknad.del === 1 && sykepengesoknad.forrigeSykeforloepTom ? sykepengesoknad.forrigeSykeforloepTom : tidligsteFom(perioder);
periodealternativer.til = getTomDato({
...sykepengesoknad,
gjenopptattArbeidFulltUtDato,
});
} catch (e) {
gjenopptattArbeidFulltUtDato = null;
}
}
if (Object.keys(validerFoerDuBegynner(values)).length !== 0) {
props.sendTilFoerDuBegynner(sykepengesoknad);
}
if (values.bruktEgenmeldingsdagerFoerLegemeldtFravaer === undefined) {
feilmeldinger.bruktEgenmeldingsdagerFoerLegemeldtFravaer = 'Du må svare om du brukte egenmeldingsdager før det legemeldte fraværet startet';
}
if (values.harGjenopptattArbeidFulltUt === undefined) {
feilmeldinger.harGjenopptattArbeidFulltUt = 'Vennligst oppgi om du var tilbake i arbeid før sykmeldingsperioden utløp';
} else if (values.harGjenopptattArbeidFulltUt) {
if (!values.gjenopptattArbeidFulltUtDato) {
feilmeldinger.gjenopptattArbeidFulltUtDato = 'Vennligst oppgi når du gjenopptok arbeidet';
} else if (!valideringUtils.datoErFoersteSykmeldingsdagEllerSenere(values.gjenopptattArbeidFulltUtDato, sykepengesoknad)) {
feilmeldinger.gjenopptattArbeidFulltUtDato = `Datoen kan ikke være før du ble sykmeldt ${toDatePrettyPrint(sykepengesoknad.identdato)}`;
}
}
if (values.bruktEgenmeldingsdagerFoerLegemeldtFravaer) {
const egenmeldingsperioderFeil = valideringUtils.validerPerioder(values.egenmeldingsperioder);
if (egenmeldingsperioderFeil) {
feilmeldinger.egenmeldingsperioder = egenmeldingsperioderFeil;
}
}
if (values.harHattFeriePermisjonEllerUtenlandsopphold === undefined) {
feilmeldinger.harHattFeriePermisjonEllerUtenlandsopphold = 'Vennligst svar på om du har hatt ferie, permisjon eller utenlandsopphold';
} else if (values.harHattFeriePermisjonEllerUtenlandsopphold) {
if (([values.harHattFerie, values.harHattPermisjon, values.harHattUtenlandsopphold]).filter((a) => {
return a;
}).length === 0) {
feilmeldinger.feriePermisjonEllerUtenlandsopphold = {
_error: 'Vennligst kryss av ett av alternativene',
};
}
if (values.harHattFerie) {
const feriefeilmeldinger = valideringUtils.validerPerioder(values.ferie, periodealternativer);
if (feriefeilmeldinger) {
feilmeldinger.ferie = feriefeilmeldinger;
}
}
if (values.harHattUtenlandsopphold) {
const utenlandsoppholdPeriodefeilmeldinger = valideringUtils.validerPerioder(values.utenlandsopphold.perioder, periodealternativer);
const utenlandsoppholdfeilmeldinger = {};
if (utenlandsoppholdPeriodefeilmeldinger) {
utenlandsoppholdfeilmeldinger.perioder = utenlandsoppholdPeriodefeilmeldinger;
}
if (visSoktOmSykepengerUtenlandsoppholdsporsmal(values)
&& (values.utenlandsopphold.soektOmSykepengerIPerioden === undefined || values.utenlandsopphold.soektOmSykepengerIPerioden === null)) {
utenlandsoppholdfeilmeldinger.soektOmSykepengerIPerioden = 'Vennligst oppgi om du har søkt på sykepenger under oppholdet utenfor Norge';
}
if (Object.keys(utenlandsoppholdfeilmeldinger).length > 0) {
feilmeldinger.utenlandsopphold = utenlandsoppholdfeilmeldinger;
}
}
if (values.harHattPermisjon) {
const permisjonfeilmeldinger = valideringUtils.validerPerioder(values.permisjon, periodealternativer);
if (permisjonfeilmeldinger) {
feilmeldinger.permisjon = permisjonfeilmeldinger;
}
}
}
return feilmeldinger;
};
export default validate;
|
<filename>renderer/reducers/index.js<gh_stars>0
import { combineReducers } from 'redux';
import player from './player';
import browser from './browser';
export default combineReducers({
player,
browser
});
|
<reponame>LamiumAmplexicaule/RayTracing
package net.henbit.raytracing.nextweek.material;
import net.henbit.raytracing.nextweek.HitRecord;
import net.henbit.raytracing.nextweek.Ray;
import net.henbit.raytracing.nextweek.Vector3;
public abstract class Material
{
public abstract boolean scatter(final Ray ray, final HitRecord hitRecord, Vector3 attenuation, Ray scattered);
public Vector3 emitted(double u, double v, final Vector3 point)
{
return new Vector3(0, 0, 0);
}
}
|
#
# build config
#
PACKAGES="sys-apps/busybox"
EMERGE_BIN="emerge-x86_64-pc-linux-uclibc"
#
# this method runs in the bb builder container just before starting the build of the rootfs
#
configure_rootfs_build()
{
export CHOST=x86_64-pc-linux-uclibc
mask_package '>=sys-apps/busybox-1.24'
echo ">=sys-apps/busybox-1.24" > /usr/x86_64-pc-linux-uclibc/etc/portage/package.mask/busybox
echo "sys-apps/busybox make-symlinks static" > /usr/x86_64-pc-linux-uclibc/etc/portage/package.use/busybox
}
#
# this method runs in the bb builder container just before tar'ing the rootfs
#
finish_rootfs_build()
{
# log dir, root home dir
mkdir -p $EMERGE_ROOT/var/log $EMERGE_ROOT/root
# busybox crond setup
mkdir -p $EMERGE_ROOT/var/spool/cron/crontabs
chmod 0600 $EMERGE_ROOT/var/spool/cron/crontabs
}
|
'use strict';
(function() {
angular.module("webAnalyzer.controllers").controller("MainController",
function($scope, $http) {
// requests:
$scope.requests = [];
$scope.executeAnalyze = function ($term) {
var $request = {term:$term, showTop:true};
if(containsAnalysisRequest($request))
return;
$scope.requests.unshift($request);
};
var containsAnalysisRequest = function($request)
{
if(Enumerable.From($scope.requests).Any(
function($item)
{
return($item.term === $request.term)
}))
return true;
return false;
}
}
);
}()); |
#!/bin/bash
# Copyright Ben Southall (github.com/stellarpower) 2015.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENCE.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
JUNCTION='/c/Program Files/junction.exe' #Change this to match your setup or simply replace with junction.exe if it's in your PATH.
chompPath(){
echo "$1" | sed 's/\(.*[^\/]\)\/*$/\1/'
}
thePath="$(chompPath "$1")"
"$JUNCTION" "$thePath" | grep Substitute | sed 's/.*Name: // ; s/(.*) *^/\1/'
|
<filename>quiz_20_03_23.py
# A palindrome is a sequence of characters that reads the same backwards and forwards.
# Given a string, s, find the longest palindromic substring in s.
#
# Example:
# Input: "banana"
# Output: "anana"
#
# Input: "million"
# Output: "illi"
import unittest
from hamcrest import assert_that, equal_to
from functools import reduce
class Solution:
@staticmethod
def get_substrings(s):
result = []
for s0 in range(len(s)):
for s1 in range(s0, len(s)):
result.append(s[s0:s1+1])
return set(result)
@staticmethod
def is_palindrome(s):
arr = [c for c in s]
arr.reverse()
result = "".join(arr)
return s == result
def longest_palindrome(self, s):
sub = self.get_substrings(s)
palindormes = filter(lambda s: self.is_palindrome(s), sub)
return reduce(lambda max, el: max if len(max) > len(el) else el, palindormes, "")
class Test(unittest.TestCase):
def test(self):
result = str(Solution().longest_palindrome("tracecars"))
assert_that(result, equal_to("racecar"))
|
#!/bin/bash -
# Usage: ./jib-cli/scripts/update_gcs_latest.sh <release version>
set -o errexit
EchoRed() {
echo "$(tput setaf 1; tput bold)$1$(tput sgr0)"
}
EchoGreen() {
echo "$(tput setaf 2; tput bold)$1$(tput sgr0)"
}
Die() {
EchoRed "$1"
exit 1
}
# Usage: CheckVersion <version>
CheckVersion() {
[[ $1 =~ ^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z]+)?$ ]] || Die "Version: $1 not in ###.###.###[-XXX] format."
}
[ $# -ne 1 ] && Die "Usage: ./jib-cli/scripts/update_gcs_latest.sh <release version>"
CheckVersion $1
versionString="{\"latest\":\"$1\"}"
destination="gs://jib-versions/jib-cli"
echo $versionString > jib-cli-temp
gsutil cp jib-cli-temp $destination
gsutil acl ch -u allUsers:READ $destination
rm jib-cli-temp
gcsResult=$(curl https://storage.googleapis.com/jib-versions/jib-cli)
if [ "$gcsResult" == "$versionString" ]
then
EchoGreen "Version updated successfully"
else
Die "Version update failed"
fi |
aws s3 sync --exclude "deploy.sh" --exclude ".git/*" --exclude ".gitignore" --exclude "*/.gitkeep" $@ . s3://zeppelinos.org
|
package com.mari05lim.covidinfo.fragment;
import android.annotation.SuppressLint;
import android.app.ProgressDialog;
import android.graphics.Color;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.ViewModelProvider;
import com.mari05lim.covidinfo.R;
import com.mari05lim.covidinfo.viewmodel.WorldViewModel;
import com.github.mikephil.charting.charts.PieChart;
import com.github.mikephil.charting.components.Description;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.data.PieData;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.data.PieEntry;
import com.github.mikephil.charting.utils.ColorTemplate;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class WorldFragment extends Fragment {
private ProgressDialog mProgressApp;
@SuppressLint("SimpleDateFormat")
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_world, container, false);
mProgressApp = new ProgressDialog(getActivity());
mProgressApp.setTitle(getResources().getString(R.string.loading_title));
mProgressApp.setCancelable(false);
mProgressApp.setMessage(getResources().getString(R.string.loading_message));
mProgressApp.show();
PieChart pieChart = view.findViewById(R.id.worldSummaryPie);
WorldViewModel viewModel = new ViewModelProvider(this,
new ViewModelProvider.NewInstanceFactory()).get(WorldViewModel.class);
viewModel.setWorldData();
viewModel.getWorldData().observe(this, worldModel -> {
mProgressApp.dismiss();
List<PieEntry> entries = new ArrayList<>();
entries.add(new PieEntry(worldModel.getConfirmed().getValue(), getResources().getString(R.string.confirmed)));
entries.add(new PieEntry(worldModel.getRecovered().getValue(), getResources().getString(R.string.recovered)));
entries.add(new PieEntry(worldModel.getDeaths().getValue(), getResources().getString(R.string.deaths)));
PieDataSet pieDataSet = new PieDataSet(entries, getResources().getString(R.string.corona));
pieDataSet.setColors(ColorTemplate.MATERIAL_COLORS);
pieDataSet.setValueTextColor(Color.WHITE);
pieDataSet.setValueTextSize(14);
Description description = new Description();
Date data = null;
try {
data = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").parse(worldModel.getLastUpdate());
} catch (ParseException e) {
e.printStackTrace();
}
String formattedDate = new SimpleDateFormat("dd/MM/yyyy HH:mm").format(data);
description.setText(getResources().getString(R.string.last_update) + " : " + formattedDate);
description.setTextColor(Color.BLACK);
description.setTextSize(12);
Legend legend = pieChart.getLegend();
legend.setTextColor(Color.BLACK);
legend.setTextSize(12);
legend.setForm(Legend.LegendForm.SQUARE);
PieData pieData = new PieData(pieDataSet);
pieChart.setVisibility(View.VISIBLE);
pieChart.animateXY(2000, 2000);
pieChart.setDescription(description);
pieChart.setHoleColor(Color.TRANSPARENT);
pieChart.setHoleRadius(60);
pieChart.setRotationAngle(320);
pieChart.setData(pieData);
});
return view;
}
}
|
from __future__ import unicode_literals
from django.test import override_settings
from rest_framework import status
from documents.models import DocumentType
from documents.tests.literals import (
TEST_DOCUMENT_TYPE_LABEL, TEST_SMALL_DOCUMENT_PATH
)
from rest_api.tests import BaseAPITestCase
from ..models import Comment
from ..permissions import (
permission_comment_create, permission_comment_delete,
permission_comment_view
)
from .literals import TEST_COMMENT_TEXT
@override_settings(OCR_AUTO_OCR=False)
class CommentAPITestCase(BaseAPITestCase):
def setUp(self):
super(CommentAPITestCase, self).setUp()
self.login_user()
self.document_type = DocumentType.objects.create(
label=TEST_DOCUMENT_TYPE_LABEL
)
with open(TEST_SMALL_DOCUMENT_PATH) as file_object:
self.document = self.document_type.new_document(
file_object=file_object
)
def tearDown(self):
if hasattr(self, 'document_type'):
self.document_type.delete()
super(CommentAPITestCase, self).tearDown()
def _create_comment(self):
return self.document.comments.create(
comment=TEST_COMMENT_TEXT, user=self.admin_user
)
def _request_comment_create_view(self):
return self.post(
viewname='rest_api:comment-list', args=(self.document.pk,),
data={
'comment': TEST_COMMENT_TEXT
}
)
def test_comment_create_view_no_access(self):
response = self._request_comment_create_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Comment.objects.count(), 0)
def test_comment_create_view_with_access(self):
self.grant_access(permission=permission_comment_create, obj=self.document)
response = self._request_comment_create_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
comment = Comment.objects.first()
self.assertEqual(Comment.objects.count(), 1)
self.assertEqual(response.data['id'], comment.pk)
def _request_comment_delete_view(self):
return self.delete(
viewname='rest_api:comment-detail', args=(
self.document.pk, self.comment.pk,
)
)
def test_comment_delete_view_no_access(self):
self.comment = self._create_comment()
response = self._request_comment_delete_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertTrue(self.comment in Comment.objects.all())
def test_comment_delete_view_with_access(self):
self.comment = self._create_comment()
self.grant_access(
permission=permission_comment_delete, obj=self.document
)
response = self._request_comment_delete_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(self.comment in Comment.objects.all())
def _request_comment_view(self):
return self.get(
viewname='rest_api:comment-detail', args=(
self.document.pk, self.comment.pk,
)
)
def test_comment_detail_view_no_access(self):
self.comment = self._create_comment()
response = self._request_comment_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_detail_view_with_access(self):
self.comment = self._create_comment()
self.grant_access(
permission=permission_comment_view, obj=self.document
)
response = self._request_comment_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['comment'], self.comment.comment)
def _request_comment_list_view(self):
return self.get(
viewname='rest_api:comment-list', args=(self.document.pk,)
)
def test_comment_list_view_no_access(self):
self.comment = self._create_comment()
response = self._request_comment_list_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_list_view_with_access(self):
self.comment = self._create_comment()
self.grant_access(
permission=permission_comment_view, obj=self.document
)
response = self._request_comment_list_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['comment'], self.comment.comment
)
|
#!/bin/bash -e
if [ "${USE_PIJUICE}" = "1" ]; then
# echo "enabling PiJuice"
install -v -d "${ROOTFS_DIR}/var/lib/pijuice"
install -v -m 600 files/pijuice_config.JSON "${ROOTFS_DIR}/var/lib/pijuice/"
on_chroot <<EOF
systemctl enable pijuice
systemctl disable fake-hwclock
EOF
fi |
"""
Model
"""
import os
import argparse
import csv
import math
import numpy as np
import cv2
import tensorflow as tf
from keras.models import Sequential, load_model
from keras.layers import Dense, Input, Activation, Dropout, Conv2D, Flatten, MaxPooling2D, Convolution2D
from keras.layers.advanced_activations import ELU
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
import matplotlib.pyplot as plt
import socketio
import eventlet
import eventlet.wsgi
import time
import base64
import json
from PIL import Image
from PIL import ImageOps
from flask import Flask, render_template
from io import BytesIO
from model import *
'''
Globals
'''
model = None
dst_model_file = None
grayscale=False
imgCache=[]
steeringCache=[]
sio = socketio.Server()
app = Flask(__name__)
@sio.on('connect')
def connect(sid, environ):
print("connect ", sid)
@sio.on('predict')
def predict(sid, imgString):
global model
image = Image.open(BytesIO(base64.b64decode(imgString)))
image_array = np.asarray(image)
normImg = normalizeImage(image_array, grayscale)
steering_angle = float(model.predict(normImg, batch_size=1))
return steering_angle
@sio.on("update")
def update(sid, imgString, steering):
global model, imgCache, steeringCache
print('saving img: %.3f' % steering)
image = Image.open(BytesIO(base64.b64decode(imgString)))
image = np.asarray(image)
normalizedImage = normalizeImage(image, grayscale)
normalizedImage = normalizedImage.reshape(normalizedImage.shape[1:])
imgCache.append(normalizedImage)
steeringCache.append(steering)
@sio.on("train")
def train(sid):
global model, imgCache, steeringCache
print('training: %d' % len(imgCache))
X = np.array(imgCache, dtype=np.float32)
y = np.array(steeringCache, dtype=np.float32)
imgCache=[]
steeringCache=[]
try:
history = model.fit(X, y, batch_size=y.shape[0], nb_epoch=1, verbose=2)
model.save(dst_model_file)
print('Done')
except:
e = sys.exc_info()[0]
print( "Error: %s" % e )
@sio.on("scratch")
def scratch(sid):
global imgCache, steeringCache
print('scratch')
imgCache=[]
steeringCache=[]
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Improve a model')
parser.add_argument('-m', dest='model', help='model file',required=True)
parser.add_argument('-i', dest='input', help='input model number', type=int, default=-1)
parser.add_argument('-o', dest='output', help='output model number', type=int,required=True)
parser.add_argument('-g', dest='gray', help='grayscale', action='store_true')
args = parser.parse_args()
if args.input == -1:
src_model_file = args.model
else:
src_model_file = args.model.replace('.h5', '.{0}.h5'.format(args.input))
grayscale = args.gray
dst_model_file = args.model.replace('.h5', '.{0}.h5'.format(args.output))
model = load_model(src_model_file)
# wrap Flask application with engineio's middleware
app = socketio.Middleware(sio, app)
# deploy as an eventlet WSGI server
eventlet.wsgi.server(eventlet.listen(('', 5678)), app)
|
public class Employee {
private String firstName;
private String lastName;
private int[] salaryHistory;
public Employee(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
this.salaryHistory = new int[3];
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public int[] getSalaryHistory() {
return salaryHistory;
}
public void setSalaryHistory(int[] salaryHistory) {
this.salaryHistory = salaryHistory;
}
} |
#!/bin/bash
set -e
if [ -z "$1" ]
then
echo "Usage: build-wasm32.sh <example-name>"
else
cargo build --target=wasm32-unknown-unknown --bin $@
wasm-bindgen --target web --out-dir "$(dirname $0)/generated" --debug --no-typescript "$(dirname $0)/../target/wasm32-unknown-unknown/debug/$1.wasm"
# wasm-opt "$(dirname $0)/generated/%1_bg.wasm" -o "$(dirname $0)/generated/%1.wasm" -O2 --disable-threads
fi
|
<filename>test/convertNumberToRomanAPITest.js
"use strict";
const request = require('supertest');
describe('Responds to valid routes', function () {
var server;
beforeEach(function () {
delete require.cache[require.resolve('./../server')];
server = require('./../server.js');
});
afterEach(function (done) {
server.close(done);
});
it('responds to /romannumeral (query = 50)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 50 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'L'
},
done);
});
it('responds to /romannumeral (query = 149)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 149 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CXLIX'
},
done);
});
it('responds to /romannumeral (query = 249)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 249 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CCXLIX'
},
done);
});
it('responds to /romannumeral (query = 1606)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 1606 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'MDCVI'
},
done);
});
it('responds to /romannumeral (query = 3999)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 3999 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'MMMCMXCIX'
},
done);
});
it('responds to /romannumeral (query = 2200000000)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 2200000000 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'M\u0305\u0305M\u0305\u0305C\u0305\u0305C\u0305\u0305'
},
done);
});
it('responds to /romannumeral (query = 400000000)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 400000000 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'C\u0305\u0305D\u0305\u0305'
},
done);
});
it('responds to /romannumeral (query = 1900400003)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 1900400003 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'M\u0305\u0305C\u0305\u0305M\u0305\u0305C\u0305D\u0305III'
},
done);
});
});
describe('Responds to non existing routes with 404', function () {
var server;
beforeEach(function () {
delete require.cache[require.resolve('./../server')];
server = require('./../server.js');
});
afterEach(function (done) {
server.close(done);
});
it('responds to /foobar ', function testRoute(done) {
request(server)
.get('/foobar')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(404, done);
});
it('responds to /foobarquery ', function testRoute(done) {
request(server)
.get('/foobarquery')
.query({ query: 1900400003 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(404, done);
});
it('responds to / ', function testRoute(done) { // route is not left visible
request(server)
.get('/')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(404, done);
});
it('responds to / ', function testRoute(done) { // route is not left visible
request(server)
.get('/')
.query({ query: 1900400003 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(404, done);
});
});
describe('Responds to edge cases without error', function () {
var server;
let apiVersion = process.env.API_VERSION || '1.0.0';
beforeEach(function () {
delete require.cache[require.resolve('./../server')];
server = require('./../server.js');
});
afterEach(function (done) {
server.close(done);
});
it('Parameter sent as string, but still capable to return a response', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '249' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CCXLIX'
},
done);
});
it('Parameter sent as 0249, should be parsed to radix 10', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '0249' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CCXLIX'
},
done);
});
it('Parameter sent as 4/2, truncates the fraction to 4', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '4/2' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'IV'
},
done);
});
it('Parameter sent as string, ignore everything except first number', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '"1;DROP TABLE users"' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'NOT_AN_INTEGER',
'message': 'Parameter is not an integer',
'apiVersion': apiVersion
},
done);
});
it('Parameter sent as string, fail to process', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '"1\'; DROP TABLE users-- 1"' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'NOT_AN_INTEGER',
'message': 'Parameter is not an integer',
'apiVersion': apiVersion
},
done);
});
});
describe('Uncapitalizes parts of URL if needed', function () {
var server;
beforeEach(function () {
delete require.cache[require.resolve('./../server')];
server = require('./../server.js');
});
afterEach(function (done) {
server.close(done);
});
it('responds to /ROMANNUMERAL (query = 249)', function testRoute(done) {
request(server)
.get('/ROMANNUMERAL')
.query({ query: 249 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman' : 'CCXLIX'
},
done);
});
it('responds to /RoMaNnUmErAl (query = 249)', function testRoute(done) {
request(server)
.get('/ROMANNUMERAL')
.query({ query: 249 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CCXLIX'
},
done);
});
});
describe('Responds to invalid routes with error', function () {
var server;
let apiVersion = process.env.API_VERSION || '1.0.0';
beforeEach(function () {
delete require.cache[require.resolve('./../server')];
server = require('./../server.js');
});
afterEach(function (done) {
server.close(done);
});
it('HPP: Keeps only last parameter of the parameters with same name in query', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 104, query: 249 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200, {
'roman': 'CCXLIX'
},
done);
});
it('Responds with an error for empty parameter', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: ' ' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'NOT_AN_INTEGER',
'message': 'Parameter is not an integer',
'apiVersion': apiVersion
},
done);
});
it('Responds with an error for empty parameter', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '[a-z]' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'NOT_AN_INTEGER',
'message': 'Parameter is not an integer',
'apiVersion': apiVersion
},
done);
});
it('Responds with an error for string parameter', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 'a-string' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'NOT_AN_INTEGER',
'message': 'Parameter is not an integer',
'apiVersion': apiVersion
},
done);
});
it('Responds with an error for value out of range (99999999999999)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 99999999999999 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-1)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -1 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-255)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -255 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-256)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -256 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-3999)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -3999 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-4000)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -4000 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for value out of range (-2200000001)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: -2200000001 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'OUT_OF_RANGE',
'message': 'Parameter is not within range',
'apiVersion': apiVersion,
'details': { lowerLimit: 1, upperLimit: 2200000000 }
},
done);
});
it('Responds with an error for 0 (no 0 in roman notation)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: 0 })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'VALUE_IS_ZERO',
'message': 'Parameter value is 0, roman numbers do not have a 0. Zero is out of supported range for conversions. Smallest supported value is 1.',
'details': { lowerLimit: 1},
'apiVersion': apiVersion
},
done);
});
it('Responds with an error for 0 (no 0 in roman notation)', function testRoute(done) {
request(server)
.get('/romannumeral')
.query({ query: '0' })
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(422, {
'error': 'VALUE_IS_ZERO',
'message': 'Parameter value is 0, roman numbers do not have a 0. Zero is out of supported range for conversions. Smallest supported value is 1.',
'details': { lowerLimit: 1},
'apiVersion': apiVersion
},
done);
});
}); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.