text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
set -ue
# Copy all the local pcap files to the remote machine.
if [[ "$#" -ne 2 ]]; then
echo "Usage: $0 <target machine> <max pcap size>"
fi
host=$1
max_size=$2
# Generate all sizes of PCAP file up to max size:
pushd ../pcap_files/
for i in $(seq 1 ${max_size}); do
if [[ ! -f $i.cap ]]; then
python ../general/generate_pcap.py $i
mv variable_length.pcap $i.cap
fi
done
popd
echo "PCAP file generated! Copying them to ${host}..."
# Now, copy them all.
source ../general/remote_scp.sh
source ../general/remote_exists.sh
files=( ../pcap_files/*.cap )
export host=$1
ssh $host 'mkdir -p /root/jcw78/pcap_files'
existing_files=$(ssh $host 'cd /root/jcw78/pcap_files; find . -name "*"')
parallel -j 32 --retries 5 --progress 'source ../general/remote_scp.sh; remote_scp $host {} /root/jcw78/pcap_files/$(basename {})' ::: ${files[@]}
wait
|
#!/bin/sh
cd $(dirname $0)
cd ../complete
./mvnw clean package
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew build
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
cd ../initial
./mvnw clean compile
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf target
./gradlew compileJava
ret=$?
if [ $ret -ne 0 ]; then
exit $ret
fi
rm -rf build
exit
|
from typing import List
def extract_html_comments(html_file_path: str) -> List[str]:
with open(html_file_path, 'r', encoding='utf-8') as file:
html_content = file.read()
comments = []
start_index = html_content.find("<!--")
while start_index != -1:
end_index = html_content.find("-->", start_index)
if end_index != -1:
comment = html_content[start_index + 4:end_index].strip()
comments.append(comment)
start_index = html_content.find("<!--", end_index)
else:
break
return comments
|
<reponame>schinmayee/nimbus
//#####################################################################
// Copyright 2002-2004, <NAME>, <NAME>, <NAME>
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class SHALLOW_WATER_1D_SPECIALIZED
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform_Arrays/FACE_ARRAYS.h>
#include <PhysBAM_Fluids/PhysBAM_Compressible/Shallow_Water_Equations/SHALLOW_WATER_1D_SPECIALIZED.h>
using namespace PhysBAM;
//#####################################################################
// Function Euler_Step
//#####################################################################
template<class T> void SHALLOW_WATER_1D_SPECIALIZED<T>::
Euler_Step(const T dt,const T time)
{
int m=grid.counts.x;
int ghost_cells=3;
ARRAY<TV_DIMENSION,VECTOR<int,1> > U_ghost(1-ghost_cells,m+ghost_cells);boundary->Fill_Ghost_Cells(grid,U,U_ghost,dt,time,ghost_cells);
ARRAY<bool,VECTOR<int,1> > psi(1,m,false);psi.Fill(true); // no cut out grids
static ARRAY<bool,VECTOR<int,1> > zero_height(1-ghost_cells,m+ghost_cells);zero_height.Fill(false);
for(int i=1-ghost_cells;i<=grid.counts.x+ghost_cells;i++) if(U_ghost(i)(1)<=min_height) zero_height(i)=true;
ARRAY<T,VECTOR<int,1> > ground_ghost(1-ghost_cells,grid.counts.x+ghost_cells);
if(ground) BOUNDARY_UNIFORM<GRID<TV>,T>().Fill_Ghost_Cells(grid,*ground,ground_ghost,dt,time,ghost_cells);
for(int i=1-ghost_cells;i<=grid.counts.x+ghost_cells;i++) eta_ghost(i)=U_ghost(i)(1)+ground_ghost(i);
T_FACE_ARRAYS_BOOL psi_N(grid.Get_MAC_Grid_At_Regular_Positions());
T_FACE_ARRAYS_SCALAR face_velocities(grid.Get_MAC_Grid_At_Regular_Positions());
conservation->Save_Fluxes();
VECTOR<EIGENSYSTEM<T,VECTOR<T,2> >*,1> eigensystem(&eigensystem_F);
conservation->Update_Conservation_Law(grid,U,U_ghost,psi,dt,eigensystem,eigensystem,psi_N,face_velocities);
ARRAY_VIEW<TV_DIMENSION,VECTOR<int,1> >& old_flux=conservation->fluxes.Component(1);
for(int i=0;i<=grid.counts.x;i++){
bool zero_left=zero_height(i),zero_right=zero_height(i+1),update_flux=false;
ARRAY<T,VECTOR<int,1> > new_flux_left(1,2),new_flux_right(1,2);new_flux_left(1)=new_flux_right(1)=old_flux(i+1)(1);new_flux_left(2)=new_flux_right(2)=old_flux(i+1)(2);
if(zero_left && zero_right){ // no mass flux between empty cells
new_flux_left(1)=new_flux_right(1)=0;
update_flux=true;}
else if(eta_ghost(i) < ground_ghost(i+1) || (zero_left && !zero_right)){ // cliff to right OR only flow to left via wetting
T extra_momentum_flux=0;
if(U_ghost(i+1)(2)<0 && !zero_right){ // water flowing off of cliff
new_flux_left(1)=new_flux_right(1)=U_ghost(i+1)(1)*U_ghost(i+1)(2);
extra_momentum_flux=(T).5*sqr(U_ghost(i+1)(2));}
else new_flux_left(1)=new_flux_right(1)=0;
new_flux_left(2)=gravity*eta_ghost(i)+extra_momentum_flux;
new_flux_right(2)=gravity*(T).5*(eta_ghost(i)+eta_ghost(i+1))+extra_momentum_flux;
update_flux=true;}
else if(ground_ghost(i) > eta_ghost(i+1) || (!zero_left && zero_right)){ // cliff to left OR only flow right via wetting
T extra_momentum_flux=0;
if(U_ghost(i)(2)>0 && !zero_left){ // water flowing off of cliff
new_flux_left(1)=new_flux_right(1)=U_ghost(i)(1)*U_ghost(i)(2);
extra_momentum_flux=(T).5*sqr(U_ghost(i)(2));}
else new_flux_left(1)=new_flux_right(1)=0;
new_flux_right(2)=gravity*eta_ghost(i+1)+extra_momentum_flux;
new_flux_left(2)=gravity*(T).5*(eta_ghost(i)+eta_ghost(i+1))+extra_momentum_flux;
update_flux=true;}
if(update_flux){
if(i>0) for(int k=1;k<=d;k++) U(i)(k)+=dt*(old_flux(i+1)(k)-new_flux_left(k))*grid.one_over_dX.x; // update cell on left
if(i<grid.counts.x) for(int k=1;k<=d;k++) U(i+1)(k)-=dt*(old_flux(i+1)(k)-new_flux_right(k))*grid.one_over_dX.x;} // update cell on right
// for debugging
postprocessed_flux(i)(1)=new_flux_left(1);postprocessed_flux(i)(2)=new_flux_left(2);postprocessed_flux(i)(3)=new_flux_right(1);postprocessed_flux(i)(4)=new_flux_right(2);}
T two_min_height=(T)2.01*min_height;
for(int i=1;i<=grid.counts.x;i++){
if(U_ghost(i)(1)<=two_min_height) U(i)(2)=0; // correct for where we have zero fluxes due to small average h
if(U(i)(1)<0) U(i)(1)=0;}
boundary->Apply_Boundary_Condition(grid,U,time+dt);
}
//#####################################################################
// Function CFL
//#####################################################################
template<class T> T SHALLOW_WATER_1D_SPECIALIZED<T>::
CFL()
{
T max_speed=0;
for(int i=1;i<=grid.counts.x;i++){
T u=U(i)(2),celerity=sqrt(gravity*U(i)(1));
max_speed=max(max_speed,abs(u)+celerity);}
T dt_convect=max_speed*grid.one_over_dX.x;
return 1/dt_convect;
}
//#####################################################################
template class SHALLOW_WATER_1D_SPECIALIZED<float>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class SHALLOW_WATER_1D_SPECIALIZED<double>;
#endif
|
from .const import CLI_STATUS_STARTLINE, DOWNPOSTFIX, DOWN_CHUNK_SIZE, DEFAULT_CONN_TIMEOUT
from . import utils
from .torrunner import TorRunner
from .segfile import SegFileLoader, SegFileMonitor
from .page import Page
import colors
import requests
import os
import sys
import multiprocessing as mp
import time
from datetime import timedelta
from types import FunctionType
class Downloader:
cli_initialized: bool
terminating: bool
processes: slice
captcha_process: mp.Process
monitor: mp.Process
captcha_solve_func: FunctionType
download_url_queue: mp.Queue
parts: int
def __init__(self, captcha_solve_func):
self.captcha_solve_func = captcha_solve_func
self.cli_initialized = False
self.monitor = None
self.conn_timeout = None
def terminate(self):
self.terminating = True
if self.cli_initialized:
sys.stdout.write("\033[{};{}H".format(
self.parts + CLI_STATUS_STARTLINE + 2, 0))
sys.stdout.write("\033[?25h") # show cursor
self.cli_initialized = False
print('Terminating download. Please wait for stopping all processes.')
if hasattr(self, "captcha_process") and self.captcha_process is not None:
self.captcha_process.terminate()
print('Terminate download processes')
if hasattr(self, "processes") and self.processes is not None:
for p in self.processes:
p.terminate()
print('Download terminated.')
if hasattr(self, "monitor") and self.monitor is not None:
self.monitor.terminate()
print('End download monitor')
def _captcha_print_func_wrapper(self, text):
if not self.cli_initialized:
sys.stdout.write(colors.blue(
"[Link solve]\t") + text + "\033[K\r")
else:
utils.print_captcha_status(text, self.parts)
def _captcha_breaker(self, page, parts):
msg = ""
if page.isDirectDownload:
msg = "Solve direct dlink .."
else:
msg = "Solve CAPTCHA dlink .."
# utils.print_captcha_status(msg, parts)
for url in self.captcha_download_links_generator:
utils.print_captcha_status(msg, parts)
self.download_url_queue.put(url)
@staticmethod
def _save_progress(filename, parts, size, interval_sec):
m = SegFileMonitor(filename, utils.print_saved_status, interval_sec)
t_start = time.time()
s_start = m.size()
last_bps = [(s_start, t_start)]
while True:
time.sleep(interval_sec)
s = m.size()
t = time.time()
total_bps = (s - s_start) / (t - t_start)
# Average now bps for last 10 measurements
if len(last_bps) >= 10:
last_bps = last_bps[1:]
(s_last, t_last) = last_bps[0]
now_bps = (s - s_last) / (t - t_last)
last_bps.append((s, t))
remaining = (size - s) / total_bps if total_bps > 0 else 0
utils.print_saved_status(
f"{(s / 1024 ** 2):.2f} MB"
f" ({(s / size * 100):.2f} %)"
f"\tavg. speed: {(total_bps / 1024 ** 2):.2f} MB/s"
f"\tcurr. speed: {(now_bps / 1024 ** 2):.2f} MB/s"
f"\tremaining: {timedelta(seconds=round(remaining))}",
parts
)
@staticmethod
def _download_part(part, download_url_queue):
"""Download given part of the download.
Arguments:
part (dict): Specification of the part to download
"""
id = part.id
utils.print_part_status(id, "Starting download")
part.started = time.time()
part.now_downloaded = 0
# Note the stream=True parameter
r = requests.get(part.download_url, stream=True, allow_redirects=True, headers={
"Range": "bytes={}-{}".format(part.pfrom + part.downloaded, part.pto),
"Connection": "close",
})
if r.status_code == 429:
utils.print_part_status(id, colors.yellow(
"Status code 429 Too Many Requests returned... will try again in few seconds"))
time.sleep(5)
return Downloader._download_part(part, download_url_queue)
if r.status_code != 206 and r.status_code != 200:
utils.print_part_status(id, colors.red(
f"Status code {r.status_code} returned: {part.pfrom + part.downloaded}/{part.pto}"))
sys.exit(1)
# reimplement as multisegment write file class
for chunk in r.iter_content(chunk_size=DOWN_CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
part.write(chunk)
part.now_downloaded += len(chunk)
elapsed = time.time() - part.started
# Print status line downloaded and speed
# speed in bytes per second:
speed = part.now_downloaded / elapsed if elapsed > 0 else 0
# remaining time in seconds:
remaining = (part.size - part.downloaded) / speed if speed > 0 else 0
utils.print_part_status(id, "{:.2f}%\t{:.2f}/{:.2f} MB\tspeed: {:.2f} KB/s\telapsed: {}\tremaining: {}".format(
round(part.downloaded / part.size * 100, 2),
round(part.downloaded / 1024**2,
2), round(part.size / 1024**2, 2),
round(speed / 1024, 2),
str(timedelta(seconds=round(elapsed))),
str(timedelta(seconds=round(remaining))),
))
# download end status
r.close()
part.elapsed = time.time() - part.started
utils.print_part_status(id, colors.green("Successfully downloaded {}{} MB in {} (speed {} KB/s)".format(
round(part.now_downloaded / 1024**2, 2),
"" if part.now_downloaded == part.downloaded else (
"/"+str(round(part.downloaded / 1024**2, 2))
),
str(timedelta(seconds=round(part.elapsed))),
round(part.now_downloaded / part.elapsed / 1024, 2) if part.elapsed > 0 else 0
)))
# close part file files
part.close()
# reuse download link if need
download_url_queue.put(part.download_url)
def download(self, url, parts=10, target_dir="", conn_timeout=DEFAULT_CONN_TIMEOUT):
"""Download file from Uloz.to using multiple parallel downloads.
Arguments:
url (str): URL of the Uloz.to file to download
parts (int): Number of parts that will be downloaded in parallel (default: 10)
target_dir (str): Directory where the download should be saved (default: current directory)
"""
self.url = url
self.parts = parts
self.target_dir = target_dir
self.conn_timeout = conn_timeout
self.processes = []
self.captcha_process = None
self.terminating = False
self.isLimited = False
self.isCaptcha = False
started = time.time()
previously_downloaded = 0
# 1. Prepare downloads
print("Starting downloading for url '{}'".format(url))
# 1.1 Get all needed information
print("Getting info (filename, filesize, ...)")
try:
tor = TorRunner()
page = Page(url, target_dir, parts, tor, self.conn_timeout)
page.parse()
except RuntimeError as e:
print(colors.red('Cannot download file: ' + str(e)))
sys.exit(1)
# Do check - only if .udown status file not exists get question
output_filename = os.path.join(target_dir, page.filename)
if os.path.isfile(output_filename) and not os.path.isfile(output_filename+DOWNPOSTFIX):
print(colors.yellow(
"WARNING: File '{}' already exists, overwrite it? [y/n] ".format(output_filename)), end="")
if input().strip() != 'y':
sys.exit(1)
if page.quickDownloadURL is not None:
print("You are VERY lucky, this is QUICK direct download without CAPTCHA, downloading as 1 quick part :)")
self.download_type = "fullspeed direct download (without CAPTCHA)"
download_url = page.quickDownloadURL
self.captcha_solve_func = None
if page.slowDownloadURL is not None:
self.isLimited = True
if page.isDirectDownload:
print("You are lucky, this is slow direct download without CAPTCHA :)")
self.download_type = "slow direct download (without CAPTCHA)"
else:
self.isCaptcha = True
print(
"CAPTCHA protected download - CAPTCHA challenges will be displayed\n")
self.download_type = "CAPTCHA protected download"
self.captcha_download_links_generator = page.captcha_download_links_generator(
captcha_solve_func=self.captcha_solve_func,
print_func=self._captcha_print_func_wrapper
)
download_url = next(self.captcha_download_links_generator)
head = requests.head(download_url, allow_redirects=True)
total_size = int(head.headers['Content-Length'])
try:
file_data = SegFileLoader(output_filename, total_size, parts)
downloads = file_data.make_writers()
except Exception as e:
print(colors.red(
f"Failed: Can not create '{output_filename}' error: {e} "))
self.terminate()
sys.exit()
# 2. Initialize cli status table interface
# if windows, use 'cls', otherwise use 'clear'
os.system('cls' if os.name == 'nt' else 'clear')
sys.stdout.write("\033[?25l") # hide cursor
self.cli_initialized = True
page.cli_initialized = True # for tor in Page
print(colors.blue("File:\t\t") + colors.bold(page.filename))
print(colors.blue("URL:\t\t") + page.url)
print(colors.blue("Download type:\t") + self.download_type)
print(colors.blue("Size / parts: \t") +
colors.bold(f"{round(total_size / 1024**2, 2)}MB => " +
f"{file_data.parts} x {round(file_data.part_size / 1024**2, 2)}MB"))
# fill placeholder before download started
for part in downloads:
if page.isDirectDownload:
utils.print_part_status(part.id, "Waiting for direct link...")
else:
utils.print_part_status(part.id, "Waiting for CAPTCHA...")
# Prepare queue for recycling download URLs
self.download_url_queue = mp.Queue(maxsize=0)
# limited must use TOR and solve links or captcha
if self.isLimited:
# Reuse already solved links
self.download_url_queue.put(download_url)
# Start CAPTCHA breaker in separate process
self.captcha_process = mp.Process(
target=self._captcha_breaker, args=(page, self.parts)
)
cpb_started = False
page.alreadyDownloaded = 0
# save status monitor
self.monitor = mp.Process(target=Downloader._save_progress, args=(
file_data.filename, file_data.parts, file_data.size, 1/3))
self.monitor.start()
# 3. Start all downloads fill self.processes
for part in downloads:
if self.terminating:
return
id = part.id
if part.downloaded == part.size:
utils.print_part_status(id, colors.green(
"Already downloaded from previous run, skipping"))
page.alreadyDownloaded += 1
continue
if self.isLimited:
if not cpb_started:
self.captcha_process.start()
cpb_started = True
part.download_url = self.download_url_queue.get()
else:
part.download_url = download_url
# Start download process in another process (parallel):
p = mp.Process(target=Downloader._download_part,
args=(part, self.download_url_queue))
p.start()
self.processes.append(p)
if self.isLimited:
# no need for another CAPTCHAs
self.captcha_process.terminate()
if self.isCaptcha:
utils.print_captcha_status(
"All downloads started, no need to solve another CAPTCHAs..", self.parts)
else:
utils.print_captcha_status(
"All downloads started, no need to solve another direct links..", self.parts)
# 4. Wait for all downloads to finish
success = True
for p in self.processes:
p.join()
if p.exitcode != 0:
success = False
# clear cli
sys.stdout.write("\033[{};{}H".format(
parts + CLI_STATUS_STARTLINE + 2, 0))
sys.stdout.write("\033[K")
sys.stdout.write("\033[?25h") # show cursor
self.cli_initialized = False
# result end status
if not success:
print(colors.red("Failure of one or more downloads, exiting"))
sys.exit(1)
elapsed = time.time() - started
# speed in bytes per second:
speed = (total_size - previously_downloaded) / elapsed if elapsed > 0 else 0
print(colors.green("All downloads finished"))
print("Stats: Downloaded {}{} MB in {} (average speed {} MB/s)".format(
round((total_size - previously_downloaded) / 1024**2, 2),
"" if previously_downloaded == 0 else (
"/"+str(round(total_size / 1024**2, 2))
),
str(timedelta(seconds=round(elapsed))),
round(speed / 1024**2, 2)
))
# remove resume .udown file
udown_file = output_filename + DOWNPOSTFIX
if os.path.exists(udown_file):
print(f"Delete file: {udown_file}")
os.remove(udown_file)
|
<filename>src/app/send-pw/send-pw.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { SendPwComponent } from './send-pw.component';
describe('SendPwComponent', () => {
let component: SendPwComponent;
let fixture: ComponentFixture<SendPwComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ SendPwComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SendPwComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
<filename>ArtemShchepets/src/test/java/week1/controller/ProxyITerminalControllerImplTest.java
package week1.controller;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import week1.model.Bill;
import week1.model.Product;
import week1.model.Seller;
import java.time.LocalDateTime;
public class ProxyITerminalControllerImplTest {
private ITerminalController terminalController;
@Before
public void setUp() throws Exception {
terminalController = new ProxyITerminalControllerImpl(ITerminalControllerFactory.create(new Seller()));
}
@After
public void tearDown() throws Exception {
terminalController.getAllBills().clear();
terminalController = null;
}
@Test
public void login() throws Exception {
terminalController.login("login", "password");
Assert.assertNull(null);
}
@Test
public void createBill() throws Exception {
terminalController.createBill();
Assert.assertNull(null);
}
@Test
public void addProduct() throws Exception {
Bill bill = terminalController.createBill();
terminalController.addProduct(bill.getId(), new Product());
Assert.assertNull(null);
}
@Test
public void getAllBills() throws Exception {
terminalController.getAllBills();
Assert.assertNull(null);
}
@Test
public void closeBill() throws Exception {
Bill bill = terminalController.createBill();
terminalController.closeBill(bill.getId());
Assert.assertNull(null);
}
@Test
public void findBillById() throws Exception {
Bill bill = terminalController.createBill();
terminalController.findBillById(bill.getId());
Assert.assertNull(null);
}
@Test
public void findSellerByLoginOrFullName() throws Exception {
terminalController.findSellerByLoginOrFullName("worker1");
Assert.assertNull(null);
}
@Test
public void getTopOfSalesman() throws Exception {
terminalController.createBill();
terminalController.addProduct(0, new Product("Milk", 2.220));
terminalController.addProduct(0, new Product("Cheese", 0.30));
terminalController.addProduct(0, new Product("Milk", 15.50));
terminalController.addProduct(0, new Product("Milk", 2.220));
terminalController.addProduct(0, new Product("Milk", 0.010));
terminalController.addProduct(0, new Product("Milk", 0.010));
terminalController.addProduct(0, new Product("Milk", 0.010));// 20.27
terminalController.login("worker123", "<PASSWORD>");
terminalController.createBill();
terminalController.addProduct(1, new Product("Cake", 27.170));
terminalController.addProduct(1, new Product("Cheese", 33.30));
terminalController.addProduct(1, new Product("Water", 15.50));
terminalController.addProduct(1, new Product("Milk", 2.220)); // 78.19
terminalController.login("worker", "password");
terminalController.createBill();
terminalController.addProduct(2, new Product("Watermelon", 3.990));
terminalController.addProduct(2, new Product("Cheese", 0.30));
terminalController.addProduct(2, new Product("Milk", 15.50));
terminalController.addProduct(2, new Product("Juice", 9.0)); // 28.79
terminalController.getTopOfSalesman();
Assert.assertNull(null);
}
@Test
public void doSomeStatisticStuff() throws Exception {
terminalController.createBill();
terminalController.addProduct(0, new Product("Milk", 2.220));
terminalController.addProduct(0, new Product("Cheese", 0.30));
terminalController.addProduct(0, new Product("Milk", 15.50));
terminalController.addProduct(0, new Product("Milk", 2.220));
terminalController.addProduct(0, new Product("Milk", 0.010));
terminalController.addProduct(0, new Product("Milk", 0.010));
terminalController.addProduct(0, new Product("Milk", 0.010));// 20.27
terminalController.login("worker123", "<PASSWORD>");
terminalController.createBill();
terminalController.addProduct(1, new Product("Cake", 27.170));
terminalController.addProduct(1, new Product("Cheese", 33.30));
terminalController.addProduct(1, new Product("Water", 15.50));
terminalController.addProduct(1, new Product("Milk", 2.220)); // 78.19
terminalController.login("worker", "password");
terminalController.createBill();
terminalController.addProduct(2, new Product("Watermelon", 3.990));
terminalController.addProduct(2, new Product("Cheese", 0.30));
terminalController.addProduct(2, new Product("Milk", 15.50));
terminalController.addProduct(2, new Product("Juice", 9.0)); // 28.79
terminalController.doSomeStatisticStuff();
Assert.assertNull(null);
}
@Test
public void filter() throws Exception {
LocalDateTime startTime = LocalDateTime.parse("2014-01-01T00:00:00");
LocalDateTime endTime = LocalDateTime.parse("2016-12-31T23:59:59");
terminalController.filter(startTime, endTime, new Bill.CreationDateComparator());
Assert.assertNull(null);
}
@Test
public void turnOnTerminalLogger() throws Exception {
terminalController.turnOnTerminalLogger();
Assert.assertNull(null);
}
@Test
public void turnOnDatabaseLogger() throws Exception {
terminalController.turnOnDatabaseLogger();
Assert.assertNull(null);
}
}
|
#!/bin/bash
# -*- coding: UTF8 -*-
##
# Drupal simple Shell install script for rapid dev start
# Requires drush
#
# @see http://drush.ws
# @see http://drushmake.me
#
# @version 2014/11/26 01:39:08
# @author Paulmicha
#
SITE_NAME="This is your site name"
DRUPAL_PROFILE="minimal"
DB_HOST="localhost"
DB_NAME="my_database_name"
DB_USERNAME="my_database_user"
DB_PASSWORD="my_database_password"
DB_ADMIN_USERNAME="my_database_admin_user"
DB_ADMIN_PASSWORD="my_database_admin_password"
DRUPAL_ADMIN_USERNAME="drupal_admin_user"
DRUPAL_ADMIN_PASSWORD="drupal_admin_password"
DRUPAL_ADMIN_EMAIL="drupal.admin-user@email.com"
DRUPAL_SITE_EMAIL="drupal.site@email.com"
DRUPAL_FILES_FOLDER="sites/default/files"
DRUPAL_TMP_FOLDER="sites/default/tmp"
DRUPAL_PRIVATE_FILES_FOLDER="sites/default/private"
DEFAULT_UNIX_OWNER="www-data"
DEFAULT_UNIX_GROUP="www-data"
DEFAULT_UNIX_MOD="770"
WRITEABLE_UNIX_OWNER="www-data"
WRITEABLE_UNIX_GROUP="www-data"
WRITEABLE_UNIX_MOD="770"
PROTECTED_CFG_UNIX_MOD="550"
#--------------------------------------
# DB installation
echo "CREATE DATABASE IF NOT EXISTS $DB_NAME;
GRANT ALL PRIVILEGES ON $DB_NAME.* TO '$DB_USERNAME'@'localhost' IDENTIFIED BY '$DB_PASSWORD';" | mysql -u $DB_ADMIN_USERNAME -p$DB_ADMIN_PASSWORD
#--------------------------------------
# Drupal installation
chown $DEFAULT_UNIX_OWNER:$DEFAULT_UNIX_GROUP . -R
chmod $DEFAULT_UNIX_MOD . -R
# Drupal download + initialisation
echo -n "api = 2
core = 7.x
projects[] = drupal" > tmp.make
drush make tmp.make -y
rm tmp.make
drush si $DRUPAL_PROFILE --db-url=mysql://$DB_USERNAME:$DB_PASSWORD@$DB_HOST/$DB_NAME --site-name="$SITE_NAME" --account-name="$DRUPAL_ADMIN_USERNAME" --account-pass="$DRUPAL_ADMIN_PASSWORD" --account-mail="$DRUPAL_ADMIN_EMAIL" --site-mail="$DRUPAL_SITE_EMAIL" -y
# Drupal modules folder structure setup
mkdir sites/all/libraries
mkdir sites/all/modules/custom
mkdir sites/all/modules/contrib
#mkdir sites/all/modules/features
chown $DEFAULT_UNIX_OWNER:$DEFAULT_UNIX_GROUP . -R
chmod $DEFAULT_UNIX_MOD . -R
# Drupal File System folders setup
mkdir $DRUPAL_TMP_FOLDER
mkdir $DRUPAL_PRIVATE_FILES_FOLDER
chown $WRITEABLE_UNIX_OWNER:$WRITEABLE_UNIX_GROUP $DRUPAL_FILES_FOLDER -R
chmod $WRITEABLE_UNIX_MOD $DRUPAL_FILES_FOLDER -R
chown $WRITEABLE_UNIX_OWNER:$WRITEABLE_UNIX_GROUP $DRUPAL_TMP_FOLDER -R
chmod $WRITEABLE_UNIX_MOD $DRUPAL_TMP_FOLDER -R
chown $WRITEABLE_UNIX_OWNER:$WRITEABLE_UNIX_GROUP $DRUPAL_PRIVATE_FILES_FOLDER -R
chmod $WRITEABLE_UNIX_MOD $DRUPAL_PRIVATE_FILES_FOLDER -R
drush vset --yes file_public_path $DRUPAL_FILES_FOLDER
drush vset --yes file_private_path $DRUPAL_PRIVATE_FILES_FOLDER
drush vset --yes file_temporary_path $DRUPAL_TMP_FOLDER
# Make config write-protected
chmod $PROTECTED_CFG_UNIX_MOD sites/default
chmod $PROTECTED_CFG_UNIX_MOD sites/default/settings.php
#-----------------------------------------
# From minimal install
# to my "standard" dev setup
# Minimal install : missing needed Drupal core modules
drush en taxonomy field_ui -y
# Manual updates
drush dis update -y
# Drupal modules : minimum + dev + admin
drush dl admin_menu devel backup_migrate libraries transliteration
drush en admin_menu admin_menu_toolbar devel backup_migrate libraries transliteration -y
# Theme for admin menu
drush dl adminimal_admin_menu
drush en adminimal_admin_menu -y
# DB dump 1 : "standard" install restore point
drush bb
#-----------------------------------------
# Usual modules
# Basic functions
drush dl token
drush en token -y
drush dl ctools views
drush en ctools views views_ui -y
drush dl date
drush en date -y
drush en date_all_day date_popup date_repeat date_repeat_field date_views -y
drush dl dates
drush en dates -y
# Content architecture
drush dl entity
drush en entity -y
drush dl entityreference
drush en entityreference -y
# Admin views
drush dl admin_views
drush en admin_views -y
# The CCK of Entities (UI for creating custom Entities)
#drush dl eck
#drush en eck -y
# Relations
#drush dl relation relation_add
#drush en relation relation_ui relation_add -y
# User profiles
#drush dl profile2
#drush en profile2 -y
# Note 2013/02/21 18:50:57 - when using Profile2, the "label" column always gets the value of bundle title
# -> using this auto_entitylabel is required when using an entity_reference field to target the profile entites directly using autocomplete
#drush dl auto_entitylabel
#drush en auto_entitylabel -y
# File storage
#drush dl storage_api
#drush en storage_api -y
# Linked Data
#drush dl schemaorg
#drush en schemaorg -y
# SEO
drush dl metatag
drush en metatag -y
drush dl pathauto
drush en pathauto -y
drush dl pathologic
drush en pathologic -y
drush dl redirect
drush en redirect -y
#drush dl xmlsitemap
#drush en xmlsitemap -y
#drush dl subpathauto
#drush en subpathauto -y
#drush dl menu_attributes
#drush en menu_attributes -y
# Other
#drush dl webform
#drush en webform -y
# Nice to-do list worthy to look at before going live
#drush dl prod_check
#drush en prod_check -y
# Media
#drush dl imagestyleflush
#drush en imagestyleflush -y
#-----------------------------------------
# Email
drush dl mailsystem
drush en mailsystem -y
cd sites/all/libraries
# Check link for latest version (current latest: 5.3.1 - as of 2014/12/26 14:02:14)
# @see http://swiftmailer.org/
wget https://github.com/swiftmailer/swiftmailer/archive/v5.3.1.tar.gz --quiet --no-check-certificate
tar -zxf v5.3.1.tar.gz
mv swiftmailer-5.3.1 swiftmailer
rm v5.3.1.tar.gz
chown $DEFAULT_UNIX_OWNER:$DEFAULT_UNIX_GROUP . -R
chmod $DEFAULT_UNIX_MOD . -R
cd ../../../
drush dl swiftmailer
drush en swiftmailer -y
# Emails "throttling"
#drush dl queue_mail
#drush en queue_mail -y
# DB dump 2 : "usual" install restore point
drush bb
#-----------------------------------------
# Useful field types
# (Core)
drush en number -y
drush dl telephone
drush en telephone -y
#drush dl email
#drush en email -y
#drush dl invisimail
#drush en invisimail -y
drush dl url
drush en url -y
# untested 2014/06/05 02:55:21 - module monday entry
#drush dl tablefield
#drush en tablefield -y
#-----------------------------------------
# UX / Redaction helpers
# Input filters
# @todo : custom module for custom token
#drush dl token_filter
#drush en token_filter -y
# Prevent Simultaneous Edits
#drush dl content_lock
#drush en content_lock -y
# Marquer / Linker of word occurrences
#drush dl word_link
#drush en word_link -y
# Collapse input format description
drush dl hide_formats
drush en hide_formats -y
# Prevent double submit click
#drush dl hide_submit
#drush en hide_submit -y
# UI helpers
#drush dl content_menu
#drush en content_menu -y
#drush dl options_element
#drush en options_element -y
#drush dl select_or_other
#drush en select_or_other -y
#drush dl term_reference_tree
#drush en term_reference_tree -y
# Entity reference helpers
#drush dl inline_entity_form
#drush en inline_entity_form -y
# Node publishing options visibility
#drush dl override_node_options
#drush en override_node_options -y
# Adds a publish and unpublish button for a simpler editorial workflow
# @see http://www.lullabot.com/articles/module-monday-publish-button
#drush dl publish_button
#drush en publish_button -y
# Alternative
#drush dl publishcontent
#drush en publishcontent -y
# Adding custom node publishing options
#drush dl custom_pub
#drush en custom_pub -y
# Breadcrumbs
#drush dl crumbs
#drush en crumbs -y
# Alternative :
#drush dl path_breadcrumbs
#drush en path_breadcrumbs -y
# Complement to Crumbs & alternative to menu_block
# (apparently, no admin UI though)
#drush dl menupoly
#drush en menupoly -y
# Replace anything that's passed through t()
#drush dl stringoverrides
#drush en stringoverrides -y
# Modal Forms (using CTools)
#drush dl modal_forms
#drush en modal_forms -y
# jQuery update
drush dl jquery_update
drush en jquery_update -y
# Colorbox
#cd sites/all/libraries
#wget https://github.com/jackmoore/colorbox/archive/master.zip --quiet --no-check-certificate
#unzip master.zip
#mv colorbox-master colorbox
#rm master.zip
#chown $DEFAULT_UNIX_OWNER:$DEFAULT_UNIX_GROUP . -R
#chmod $DEFAULT_UNIX_MOD . -R
#cd ../../../
#drush dl colorbox
#drush en colorbox -y
#-----------------------------------------
# Multilingual
# Install another language
drush en locale -y
drush dl l10n_update
drush en l10n_update -y
# Handle content translation
drush en translation -y
drush dl i18n
drush en i18n i18n_node i18n_select i18n_redirect i18n_user -y
#drush en i18n_variable -y
drush en i18n_field -y
#drush en i18n_sync -y
#drush en i18n_path -y
#drush en i18n_menu -y
#drush en i18n_block -y
drush en i18n_taxonomy -y
# Translation overview
drush dl translation_overview
drush en translation_overview -y
# Language detection : cookie
# @see https://www.drupal.org/node/2398959
drush dl language_cookie
drush en language_cookie -y
#-----------------------------------------
# Security
#drush dl seckit
#drush en seckit -y
#-----------------------------------------
# Workflow & content moderation
#drush dl revisioning
#drush en revisioning -y
#drush dl wokflow
#drush en wokflow workflow_admin_ui workflow_access -y
#drush dl workbench
#drush en workbench -y
#-----------------------------------------
# Access management
#drush dl acl
#drush en acl -y
#drush dl content_access
#drush en content_access -y
#drush dl field_permissions
#drush en field_permissions -y
#drush dl restrict_node_page_view
#drush en restrict_node_page_view -y
#drush dl nodeaccess_nodereference
#drush en nodeaccess_nodereference -y
#drush dl node_access_relation
#drush en node_access_relation -y
#drush dl node_access_rebuild_bonus
#drush en node_access_rebuild_bonus -y
# Scheduled field access
#drush dl fieldscheduler
#drush en fieldscheduler -y
#-----------------------------------------
# Site building / Content Architecture
# Helper modules
# Export/import support for : Node types, Taxonomy, User, Fields, Field Groups
#drush dl bundle_copy
#drush en bundle_copy -y
# Collection of useful UI tools for working with fields (untested) :
# Apply a vocabulary to multiple entities and bundles at once
# Clone any field instance to multiple entities and bundles
# Clone all field instance of a bundle to multiple entities and bundles
# Delete multiple instances of a field
#drush dl field_tools
#drush en field_tools -y
# Structure
#drush dl eva
#drush en eva -y
#drush dl nodequeue
#drush en nodequeue -y
#drush dl field_group
#drush en field_group -y
#drush dl field_collection
#drush en field_collection -y
#drush dl draggableviews
#drush en draggableviews -y
#drush dl skyfield
#drush en skyfield -y
#drush dl content_type_groups
#drush en content_type_groups -y
# Layout "presets" for use inside body / wysiwyg
#drush dl article_templater
#drush en article_templater -y
# Display term and its parents
#drush dl hierarchical_term_formatter
#drush en hierarchical_term_formatter -y
# Flag
#drush dl flag
#drush en flag -y
#drush dl flag_weights
#drush en flag_weights -y
# Rules
#drush dl rules
#drush en rules rules_admin -y
# Migrate
#drush dl migrate
#drush en migrate migrate_ui -y
# Import / Export
#drush dl feeds
#drush en feeds -y
#drush dl phpexcel
#drush en phpexcel -y
# Other
#drush dl Droogle
#drush en Droogle -y
#drush dl splashify
#drush en splashify -y
#-----------------------------------------
# Social stuff
# Realname
# choose fields from the user profile that will be used to add a "real name" element (method) to a user object.
# It will also optionally set all nodes and comments to show this name.
#drush dl realname
#drush en realname -y
# OAuth alternative (Doesn't depend on any external service)
drush dl hybridauth
cd sites/all/libraries
wget https://github.com/hybridauth/hybridauth/archive/master.zip --quiet --no-check-certificate
unzip master.zip
mv hybridauth-master hybridauth
rm master.zip
chown $DEFAULT_UNIX_OWNER:$DEFAULT_UNIX_GROUP . -R
chmod $DEFAULT_UNIX_MOD . -R
cd ../../../
drush en hybridauth -y
# Twitter module : supports 3rd-party login (with oauth), tweets agregation (import), tweets publication (push)
#drush dl oauth twitter
#drush en oauth_common twitter -y
# Simpler, read-only Twitter module
#drush dl twitter_pull
#drush en twitter_pull -y
# Twitter profile infos
#drush dl twitter_profile
#drush en twitter_profile -y
# Force users to complete their profile
#drush dl pfff
#drush en pfff -y
# Social Aggregation (twitter + rss feeds)
#drush dl activitystream
#drush en activitystream -y
# Notifications / Subscription / Journaling - Framework (Gizra inside)
#drush dl message
#drush en message -y
#drush dl message_subscribe
#drush en message_subscribe message_subscribe_ui -y
#drush dl message_notify
#drush en message_notify -y
#-----------------------------------------
# Geolocalization
# Minimalistic solution (most basic)
# this module only uses a plain text field for entering an address
#drush dl simple_gmap
#drush en simple_gmap -y
# Untested, might check it out later
#drush dl ip_geoloc
#drush en ip_geoloc -y
#drush dl getlocations
#drush en getlocations -y
# Geofield (stores complex coordinates)
# Note : also contains a simple display formatter using GMap (module "geofield_map")
drush dl geofield
drush en geofield geofield_map -y
# Addressfield (implements xNAL standard)
drush dl addressfield
drush en addressfield -y
# Geocoding (make "geofield" points from "addressfield", "geolocation", or "location")
drush dl geocoder
drush en geocoder -y
#drush dl geocoder_autocomplete
#drush en geocoder_autocomplete -y
# Leaflet (light map display)
#drush dl leaflet
#drush en leaflet -y
# OpenLayers (heavier, more sophisticated map display)
#drush dl openlayers
#drush en openlayers -y
# Location field (7.x not ready yet, medium sophistication, wait for branch 7.x-5.x - will be using proper entities)
#drush dl location
#drush en location -y
#drush dl locationmap
#drush en locationmap -y
# Others (untested)
#drush dl geolocation
#drush en geolocation -y
# Geo-search
#drush dl search_api_location
#drush en search_api_location -y
#drush dl openlayers_solr
#drush en openlayers_solr -y
#-----------------------------------------
# Dev utils
# Dummy content
#drush dl realistic_dummy_content
#drush en realistic_dummy_content -y
# Configuration in code
#drush dl features strongarm
#drush en features strongarm -y
#drush dl ftools
#drush en ftools -y
#drush dl features_override
#drush en features_override -y
# Configuration in code - experimental alternative (Drupal 8 inspired)
#drush dl configuration
#drush en configuration -y
# Emails (sandbox-like behaviour : sends all emails from Drupal to a single address)
#drush dl reroute_email
#drush en reroute_email -y
# Drush extensions
# Remove unnecessary files
drush dl drush_cleanup -n
drush cleanup
# Cron enhancement
drush dl elysia_cron
drush en elysia_cron -y
# alternative :
# Cron enhancement (untested)
#drush dl ultimate_cron
#drush en ultimate_cron -y
# Session-related utils
#drush dl session_cache
#drush en session_cache -y
# Modules introspection / overview
#drush dl moduleinfo
#drush en moduleinfo -y
# Monitoring
#drush dl performance
#drush en performance -y
# Batch
#drush dl better_batch
#drush en better_batch -y
# Maintenance
#drush dl watchdog_digest
#drush en watchdog_digest -y
# DB-related utils
#drush dl schema
#drush en schema -y
#-----------------------------------------
# Theming & Front-end
# Conditional stylesheets for IE
#drush dl conditional_styles
#drush en conditional_styles -y
# Front-end Utils
drush dl magic
drush en magic -y
# Base theme
#drush dl mothership
#drush en mothership -y
# HTML 5 helpers
drush dl elements html5_tools
drush en elements html5_tools -y
drush dl fences
drush en fences -y
# Layout management - Panels
#drush dl panels
#drush en page_manager -y
# Per-node Panels layout selection
#drush dl panelizer
#drush en panelizer -y
# Layout management - Display Suite
#drush dl ds
#drush en ds ds_ui ds_forms -y
#drush en ds ds_devel ds_extras ds_forms ds_format ds_ui ds_search -y
# Layout management - Custom Entity view modes
drush dl entity_view_mode
drush en entity_view_mode -y
# "Poorman's Panels" - Good to know for anything simple enough
#drush dl fieldblock
#drush en fieldblock -y
# Layout management - Context
#drush dl context
#drush en context context_ui -y
#drush en context context_layouts context_ui -y
# Layout management - Theme Key (untested)
#drush dl themekey
#drush en themekey -y
# Layout management - Delta (untested)
#drush dl delta
#drush en delta delta_ui delta_blocks -y
#drush en delta delta_ui delta_color delta_blocks -y
# Utils / Formatters
#drush dl css_injector
#drush en css_injector -y
#drush dl js_injector
#drush en js_injector -y
#drush dl token_formatters
#drush en token_formatters -y
# Front-end app architecture (untested)
#drush dl backbone
#drush en backbone -y
# Typography helpers
#drush dl typogrify
#drush en typogrify -y
# CSS / Styling (theme building)
#drush dl styleguide
#drush en styleguide -y
#drush dl design
#drush en design_test -y
#-----------------------------------------
# Commerce
drush dl commerce
drush en commerce commerce_ui -y
drush en commerce_customer commerce_customer_ui -y
drush en commerce_price -y
drush en commerce_line_item commerce_line_item_ui -y
drush en commerce_order commerce_order_ui -y
drush en commerce_checkout commerce_payment commerce_product -y
drush en commerce_cart commerce_product_pricing -y
#drush en commerce_tax -y
drush en commerce_product_ui -y
#drush en commerce_tax_ui -y
# Stock
drush dl commerce_stock
drush en commerce_stock -y
# Payment
drush dl commerce_cheque
drush en commerce_cheque -y
drush dl commerce_pay_in_person
drush en commerce_pay_in_person -y
drush dl commerce_bank_transfer
drush en commerce_bank_transfer -y
# Invoice (untested)
#drush dl commerce_billy
#drush en commerce_billy -y
#-----------------------------------------
# Performance
# Faster 404
# @see http://drupal.org/node/1500092
# -> to re-test as of 7.x-1.4
#drush dl fast_404
#drush en fast_404 -y
# Image cache utils
#drush dl imageinfo_cache
#drush en imageinfo_cache -y
# Faster callbacks (ajax)
#drush dl js
#drush en js -y
# (Legacy module)
#drush dl js_callback
#drush en js_callback -y
# Replacement for Drupal Core's default cache implementation
# @see http://www.metaltoad.com/blog/how-drupals-cron-killing-you-your-sleep-simple-cache-warmer
# In your settings.php file, you'll need to add the following lines to force Drupal to use the ADBC backend :
# <?php
# $conf['cache_backends'][] = 'sites/all/modules/adbc/adbc.cache.inc';
# $conf['cache_default_class'] = 'AlternativeDrupalDatabaseCache';
# ?>
#drush dl adbc
#drush en adbc -y
# File Caches
#drush dl boost
#drush en boost -y
# OR
#drush dl filecache
#drush en filecache -y
# Drupal core JS optimization
#drush dl speedy
#drush en speedy -y
# Drupal core JS / CSS aggregation optimization
#drush dl agrcache
#drush en agrcache -y
#drush dl core_library
#drush en core_library -y
#drush dl advagg
#drush en advagg -y
# Pjax navigation
#drush dl pjax
#drush en pjax -y
# Memory usage
#drush dl role_memory_limit
#drush en role_memory_limit -y
# More cache backends / utilities
#drush dl entitycache
#drush en entitycache -y
#drush dl expire
#drush en expire -y
#drush dl cache_graceful
#drush en cache_graceful -y
#drush dl cache_lifetime_options
#drush en cache_lifetime_options -y
#drush dl cdn
#drush en cdn -y
#drush dl apc
#drush en apc -y
#drush dl memcache
#drush en memcache -y
#drush dl varnish
#drush en varnish -y
#drush dl purge
#drush en purge -y
#drush dl esi
#drush en esi -y
# Other
#drush dl httprl
#drush en httprl -y
# DB dump 3 : "start" restore point
#drush bb
|
<gh_stars>0
package com.corsair.sparrow.pirate.oauth.service.impl;
import com.corsair.sparrow.pirate.oauth.domain.bean.SysUserRole;
import com.corsair.sparrow.pirate.oauth.mapper.SysUserRoleMapper;
import com.corsair.sparrow.pirate.oauth.service.ISysUserRoleService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.conditions.Wrapper;
import com.corsair.sparrow.pirate.core.base.PagingRequest;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
/**
* <p>
* 用户角色关联表 服务实现类
* </p>
*
* @author jack
* @since 2019-03-22
*/
@Service
public class SysUserRoleServiceImpl extends ServiceImpl<SysUserRoleMapper, SysUserRole> implements ISysUserRoleService {
@Override
public PageInfo<SysUserRole> getPageInfo(PagingRequest pagingRequest, Wrapper<SysUserRole> queryWrapper) {
PageHelper.startPage(pagingRequest.getPageNum(),pagingRequest.getPageSize());
return new PageInfo<>(super.list(queryWrapper));
}
}
|
(function() {
// var el = document.getElementById("device");
// var browser = document.getElementById("browser");
// var os = document.getElementById("os");
var parser = new UAParser();
if (parser.getDevice() && parser.getDevice().name) {
// el.innerHTML += '<b>Device:</b> ' + JSON.stringify(parser.getDevice()) + '<br>';
var device = JSON.stringify(parser.getDevice());
}
// el.innerHTML += '<b>CPU:</b><br> '+navigator.platform+', ';
if (parser.getCPU() && parser.getCPU().name) {
// el.innerHTML += JSON.stringify(parser.getCPU()) + ' - ';
var cpu = JSON.stringify(parser.getCPU());
}
var cores = (navigator.hardwareConcurrency ? navigator.hardwareConcurrency + ' Cores' : '');
// console.log(cores);
var os = parser.getOS().name + ' ' + parser.getOS().version ;
var browser = parser.getBrowser().name + ' ' + parser.getBrowser().version;
// console.log(browser);
console.log($('#card1 #cardcontent'));
$('#card1 #cardcontent').html(
`
<div style="text-align: left; font-family: sans-serif; font-weight: 500; padding: 10px;">
<b>Device Name</b>: ${device} <br/>
<b>CPU</b>: ${cpu} <br/>
<b>Cores</b>: ${cores} <br/>
<b>OS</b>: ${os} <br/>
<b>Broswer</b>: ${browser} <br/>
</div>
`
)
function updateBatteryStatus(battery) {
// document.querySelector('#charging').innerHTML = '<b>Battery</b><br>';
// document.querySelector('#charging').innerHTML += 'Charging: ' + (battery.charging ? 'charging' : 'not charging');
// document.querySelector('#level').textContent = 'Battery Level: ' + (Math.round(battery.level * 10000) / 100) + '%';
if (!battery.charging) {
// document.querySelector('#dischargingTime').textContent = 'Time remaining: ' + (battery.dischargingTime === Infinity ? 'Infinity' : (Math.round(100 * battery.dischargingTime / 3600) / 100) + 'h');
} else {
// document.querySelector('#dischargingTime').textContent = 'Charging Time: ' + (battery.chargingTime === Infinity ? 'Infinity' : (Math.round(100 * battery.chargingTime / 3600) / 100) + 'h');
}
}
navigator.getBattery().then(function(battery) {
// Update the battery status initially when the promise resolves ...
updateBatteryStatus(battery);
// .. and for any subsequent updates.
battery.onchargingchange = function() {
updateBatteryStatus(battery);
};
battery.onlevelchange = function() {
updateBatteryStatus(battery);
};
battery.ondischargingtimechange = function() {
updateBatteryStatus(battery);
};
});
// window.addEventListener('devicelight', function(event) {
// document.getElementById('ambient').textContent = 'Ambient Light: ' + event.value;
// });
/* GPU */
var canvas = document.getElementById("glcanvas");
var gpu = document.getElementById("gpu");
try {
gl = canvas.getContext("experimental-webgl");
gl.viewportWidth = canvas.width;
gl.viewportHeight = canvas.height;
} catch (e) {}
if (gl) {
// gpu.innerHTML = '<b>GPU:</b><br/>';
var extension = gl.getExtension('WEBGL_debug_renderer_info');
if (extension != undefined) {
// gpu.innerHTML += "Vendor: " + gl.getParameter(extension.UNMASKED_VENDOR_WEBGL) + '<br/>';
// gpu.innerHTML += "Renderer: " + gl.getParameter(extension.UNMASKED_RENDERER_WEBGL) + '<br/>';
console.log(gl.getParameter(extension.UNMASKED_VENDOR_WEBGL) );
console.log(gl.getParameter(extension.UNMASKED_RENDERER_WEBGL) );
} else {
// gpu.innerHTML += "Vendor: " + gl.getParameter(gl.VENDOR) + '<br/>';
// gpu.innerHTML += "Renderer: " + gl.getParameter(gl.RENDERER) + '<br/>';
console.log(gl.getParameter(gl.VENDOR) );
console.log(gl.getParameter(gl.RENDERER));
}
// gpu.innerHTML += "Version: " + gl.getParameter(gl.VERSION) + '<br/>';
// gpu.innerHTML += "Shading language: " + gl.getParameter(gl.SHADING_LANGUAGE_VERSION) + '<br/>';
// gpu.innerHTML += "Extensions: " + gl.getSupportedExtensions();
}
// gpu.innerHTML += 'Display: ' + window.screen.width + ' x ' + window.screen.height + ' - ' + window.screen.colorDepth + 'bits/pixel';
console.log(window.screen.width+'x'+window.screen.height);
/* Plugins */
}())
|
<filename>app/src/main/java/com/ap/stephen/videodrawerplayer/VideoListActivity.java
package com.ap.stephen.videodrawerplayer;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.os.CountDownTimer;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.ap.stephen.videodrawerplayer.repositories.SdCardVideoRepository;
import com.ap.stephen.videodrawerplayer.content.VideoItem;
import java.util.List;
/**
* An activity representing a list of Videos. This activity
* has different presentations for handset and tablet-size devices. On
* handsets, the activity presents a list of items, which when touched,
* lead to a {@link VideoDetailActivity} representing
* item details. On tablets, the activity presents the list of items and
* item details side-by-side using two vertical panes.
*/
public class VideoListActivity extends AppCompatActivity {
/**
* Whether or not the activity is in two-pane mode, i.e. running on a tablet
* device.
*/
private boolean mTwoPane;
private static SdCardVideoRepository videoRepository = SdCardVideoRepository.getInstance();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_list);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
toolbar.setTitle(getTitle());
getSupportActionBar().setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.red)));
if (findViewById(R.id.video_detail_container) != null) {
// The detail container view will be present only in the
// large-screen layouts (res/values-w900dp).
// If this view is present, then the
// activity should be in two-pane mode.
mTwoPane = true;
}
setupRecyclerView();
}
private void setupRecyclerView() {
RecyclerView recyclerView = findViewById(R.id.video_list);
assert recyclerView != null;
videoRepository.randomizeItems();
recyclerView.setAdapter(new SimpleItemRecyclerViewAdapter(this, videoRepository.getItems(), mTwoPane));
}
private static boolean isVideoLoading;
public void resetVideoLoadingState() {
isVideoLoading = false;
}
public static class SimpleItemRecyclerViewAdapter
extends RecyclerView.Adapter<SimpleItemRecyclerViewAdapter.ViewHolder> {
private final VideoListActivity mParentActivity;
private final List<VideoItem> mValues;
private final boolean mTwoPane;
private final View.OnClickListener mOnClickListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (isVideoLoading) {
return;
}
isVideoLoading = true;
mParentActivity.setupRecyclerView();
VideoItem item = (VideoItem) view.getTag();
if (mTwoPane) {
Bundle arguments = new Bundle();
arguments.putString(VideoDetailFragment.ARG_ITEM_PATH, item.getPath());
VideoDetailFragment fragment = new VideoDetailFragment();
fragment.setArguments(arguments);
mParentActivity.getSupportFragmentManager().beginTransaction()
.replace(R.id.video_detail_container, fragment)
.commit();
mParentActivity.resetVideoLoadingState();
} else {
Context context = view.getContext();
Intent intent = new Intent(context, VideoDetailActivity.class);
intent.putExtra(VideoDetailFragment.ARG_ITEM_PATH, item.getPath());
context.startActivity(intent);
mParentActivity.resetVideoLoadingState();
}
}
};
SimpleItemRecyclerViewAdapter(VideoListActivity parent,
List<VideoItem> items,
boolean twoPane) {
mValues = items;
mParentActivity = parent;
mTwoPane = twoPane;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext())
.inflate(R.layout.video_list_content, parent, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(final ViewHolder holder, int position) {
VideoItem item = mValues.get(position);
holder.mIdView.setText(item.getName());
holder.mContentView.setImageBitmap(item.getBitmap());
holder.itemView.setTag(item);
holder.itemView.setOnClickListener(mOnClickListener);
}
@Override
public int getItemCount() {
return mValues.size();
}
class ViewHolder extends RecyclerView.ViewHolder {
final TextView mIdView;
final ImageView mContentView;
ViewHolder(View view) {
super(view);
mIdView = view.findViewById(R.id.id_text);
mContentView = view.findViewById(R.id.content);
}
}
}
}
|
package com.iflytek.cyber.resolver.speechrecognizer;
import android.content.Context;
import com.google.gson.JsonObject;
import com.iflytek.cyber.CyberDelegate;
import com.iflytek.cyber.SpeechController;
import com.iflytek.cyber.resolver.ResolverModule;
import okio.Source;
public class SpeechRecognizerResolver extends ResolverModule implements SpeechController {
private static final String TAG = "SpeechRecognizer";
private IATCallback iatCallback;
public SpeechRecognizerResolver(Context context, CyberDelegate delegate) {
super(context, delegate);
delegate.registerSpeechController(this);
}
@Override
public void start(Source source, JsonObject initiator) {
final JsonObject payload = new JsonObject();
payload.addProperty("profile", "FAR_FIELD");
payload.addProperty("format", "AUDIO_L16_RATE_16000_CHANNELS_1");
if (initiator != null) {
payload.add("initiator", initiator);
}
delegate.postEvent("Recognize", payload, source);
}
@Override
public void finish() {
}
@Override
public void cancel() {
}
@Override
public void resolve(JsonObject header, JsonObject payload, Callback callback) {
final String name = header.get("name").getAsString();
switch (name) {
case "StopCapture":
delegate.stopCapture();
callback.next();
break;
case "IntermediateText":
if (iatCallback != null)
iatCallback.onReceiveIAT(payload.get("text").getAsString());
callback.next();
break;
case "ExpectSpeech":
delegate.expectSpeech(payload.getAsJsonObject("initiator"));
callback.next();
break;
default:
callback.skip();
break;
}
}
public void setIatCallback(IATCallback iatCallback) {
this.iatCallback = iatCallback;
}
public interface IATCallback {
void onReceiveIAT(String text);
}
}
|
import * as R from 'ramda'
import { Style, Icon } from 'ol/style'
import ms from 'milsymbol'
import { K } from '../../../shared/combinators'
import { defaultStyle, styleFactory } from './default-style'
const MODIFIERS = {
c: 'quantity',
f: 'reinforcedReduced',
g: 'staffComments',
h: 'additionalInformation',
m: 'higherFormation',
q: 'direction',
t: 'uniqueDesignation',
v: 'type',
z: 'speed',
aa: 'specialHeadquarters',
w: 'dtg'
}
const modifiers = properties => Object.entries(properties)
.filter(([key, value]) => MODIFIERS[key] && value)
.filter(([key, value]) => {
if (key === 't' && value === '[NO FORMALABBREVIATEDNAME]') return false
if (key === 't' && value === 'Untitled') return false
if (key === 'v' && value === 'Not otherwise specified') return false
if (key === 'v' && value === 'Not Specified') return false
return true
})
.reduce((acc, [key, value]) => K(acc)(acc => (acc[MODIFIERS[key]] = value)), {})
const icon = symbol => {
const anchor = [symbol.getAnchor().x, symbol.getAnchor().y]
const imgSize = size => [Math.floor(size.width), Math.floor(size.height)]
return new Icon({
anchor,
scale: 0.4,
anchorXUnits: 'pixels',
anchorYUnits: 'pixels',
imgSize: imgSize(symbol.getSize()),
img: symbol.asCanvas()
})
}
// Point geometry, aka symbol.
export const symbolStyle = mode => (feature, resolution) => {
const factory = styleFactory({ mode, feature, resolution })(R.identity)
const { sidc, ...properties } = feature.getProperties()
const infoFields = mode === 'selected' ||
mode === 'multi' ||
factory.showLabels()
const outlineWidth = mode === 'selected' ? 6 : 4
const symbol = new ms.Symbol(sidc, {
...modifiers(properties),
outlineWidth,
outlineColor: 'white',
infoFields
})
return symbol.isValid()
? [
new Style({ image: icon(symbol) }),
mode === 'multi' ? factory.handles(feature.getGeometry()) : []
].flat()
: defaultStyle(feature)
}
|
# configure astyle
if [ ! -d /home/vagrant/astyle-install ]; then
cd /home/vagrant
mkdir astyle-install
cd astyle-install
wget https://jaist.dl.sourceforge.net/project/astyle/astyle/astyle%203.1/astyle_3.1_linux.tar.gz -O astyle.tar.gz
tar -xzf astyle.tar.gz
cd astyle
cmake .
make
make install
fi
# configure pre-commit
mkdir -p /home/vagrant/.git/hooks
cd /home/vagrant/.git/hooks
wget https://github.com/hgryoo/cubrid-vagrant-dev/raw/master/shell/pre-commit
chmod +x pre-commit
chown vagrant pre-commit
# configure indent
mkdir -p /home/vagrant/bin
cd /home/vagrant/bin
wget https://github.com/hgryoo/cubrid-vagrant-dev/raw/master/shell/indent
chmod +x indent
chown vagrant indent
git config --global hooks.indent /home/vagrant/bin/indent
# install vscode
cd /vagrant
rpm --import https://packages.microsoft.com/keys/microsoft.asc
cp ./vscode.repo /etc/yum.repos.d/vscode.repo
yum -y install code
|
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class RemoveDuplicates {
public static void main(String[] args) {
List<Integer> list = new ArrayList<>();
list.add(5);
list.add(2);
list.add(12);
list.add(9);
list.add(2);
list.add(7);
list.add(12);
Set<Integer> set = new HashSet<>(list);
List<Integer> newList = new ArrayList<>(set);
System.out.println(newList); // [5, 2, 12, 9, 7]
}
}
|
export MINIO_ACCESS_KEY=minio
export MINIO_SECRET_KEY=miniokey
export AWS_ACCESS_KEY_ID=minio
export AWS_SECRET_ACCESS_KEY=miniokey
export WORLD_NAME=New_York_Track
export ROS_AWS_REGION=us-east-1
export AWS_REGION=us-east-1
export AWS_DEFAULT_REGION=us-east-1
export MODEL_S3_PREFIX=rl-deepracer-sagemaker
export MODEL_S3_BUCKET=bucket
export LOCAL=True
export MYIP=`ifconfig | grep -e 'inet [197][970]' | awk '{print $2}'`
export S3_ENDPOINT_URL=http://$MYIP:9000
export MARKOV_PRESET_FILE=deepracer.py
export LOCAL_ENV_VAR_JSON_PATH=$(greadlink -f ./source/rl_coach/env_vars.json)
#export LOCAL_EXTRA_DOCKER_COMPOSE_PATH=$(readlink -f ./docker_compose_extra.json)
|
require File.expand_path('../../../spec_helper', __FILE__)
describe "File.chown" do
before :each do
@fname = tmp('file_chown_test')
touch @fname
end
after :each do
rm_r @fname
end
as_superuser do
platform_is :windows do
it "does not modify the owner id of the file" do
File.chown 0, nil, @fname
File.stat(@fname).uid.should == 0
File.chown 501, nil, @fname
File.stat(@fname).uid.should == 0
end
it "does not modify the group id of the file" do
File.chown nil, 0, @fname
File.stat(@fname).gid.should == 0
File.chown nil, 501, @fname
File.stat(@fname).gid.should == 0
end
end
platform_is_not :windows do
it "changes the owner id of the file" do
File.chown 501, nil, @fname
File.stat(@fname).uid.should == 501
File.chown 0, nil, @fname
File.stat(@fname).uid.should == 0
end
it "changes the group id of the file" do
File.chown nil, 501, @fname
File.stat(@fname).gid.should == 501
File.chown nil, 0, @fname
File.stat(@fname).uid.should == 0
end
it "does not modify the owner id of the file if passed nil or -1" do
File.chown 501, nil, @fname
File.chown nil, nil, @fname
File.stat(@fname).uid.should == 501
File.chown nil, -1, @fname
File.stat(@fname).uid.should == 501
end
it "does not modify the group id of the file if passed nil or -1" do
File.chown nil, 501, @fname
File.chown nil, nil, @fname
File.stat(@fname).gid.should == 501
File.chown nil, -1, @fname
File.stat(@fname).gid.should == 501
end
end
end
it "returns the number of files processed" do
File.chown(nil, nil, @fname, @fname).should == 2
end
platform_is_not :windows do
it "raises an error for a non existent path" do
lambda {
File.chown(nil, nil, "#{@fname}_not_existing")
}.should raise_error(Errno::ENOENT)
end
end
it "accepts an object that has a #to_path method" do
File.chown(nil, nil, mock_to_path(@fname)).should == 1
end
end
describe "File#chown" do
before :each do
@fname = tmp('file_chown_test')
@file = File.open(@fname, 'w')
end
after :each do
@file.close unless @file.closed?
rm_r @fname
end
as_superuser do
platform_is :windows do
it "does not modify the owner id of the file" do
@file.chown 0, nil
@file.stat.uid.should == 0
@file.chown 501, nil
@file.stat.uid.should == 0
end
it "does not modify the group id of the file" do
@file.chown nil, 0
@file.stat.gid.should == 0
@file.chown nil, 501
@file.stat.gid.should == 0
end
end
platform_is_not :windows do
it "changes the owner id of the file" do
@file.chown 501, nil
@file.stat.uid.should == 501
@file.chown 0, nil
@file.stat.uid.should == 0
end
it "changes the group id of the file" do
@file.chown nil, 501
@file.stat.gid.should == 501
@file.chown nil, 0
@file.stat.uid.should == 0
end
it "does not modify the owner id of the file if passed nil or -1" do
@file.chown 501, nil
@file.chown nil, nil
@file.stat.uid.should == 501
@file.chown nil, -1
@file.stat.uid.should == 501
end
it "does not modify the group id of the file if passed nil or -1" do
@file.chown nil, 501
@file.chown nil, nil
@file.stat.gid.should == 501
@file.chown nil, -1
@file.stat.gid.should == 501
end
end
end
it "returns 0" do
@file.chown(nil, nil).should == 0
end
end
describe "File.chown" do
it "needs to be reviewed for spec completeness"
end
describe "File#chown" do
it "needs to be reviewed for spec completeness"
end
|
<gh_stars>0
/* **** Notes
Exchange the DI and the SI
*/
# define CAR
# include "../../../incl/config.h"
signed(__cdecl sw_p(void(**di),void(**si))) {
auto void *p;
if(!di) return(0x00);
if(!si) return(0x00);
p = (*di);
*di = (*si);
*si = (p);
return(0x01);
}
|
#!/bin/bash
root_folder=$(cd $(dirname $0); cd ..; pwd)
readonly ENV_FILE="${root_folder}/local.env"
exec 3>&1
function _out() {
echo "$(date +'%F %H:%M:%S') $@"
}
function setup() {
_out Deploying web-api-java-jee v1
cd ${root_folder}/istio
protectyaml="${root_folder}/web-api-java-jee/istio/protect-web-api.yaml"
if [ -f "$protectyaml" ]
then
kubectl delete -f protect-web-api.yaml --ignore-not-found
fi
cd ${root_folder}/web-api-java-jee
kubectl delete -f deployment/kubernetes-service.yaml --ignore-not-found
kubectl delete -f deployment/kubernetes-deployment-v1.yaml --ignore-not-found
kubectl delete -f deployment/kubernetes-deployment-v2.yaml --ignore-not-found
kubectl delete -f deployment/istio-service-v2.yaml --ignore-not-found
file="${root_folder}/web-api-java-jee/liberty-opentracing-zipkintracer-1.2-sample.zip"
if [ -f "$file" ]
then
echo "$file found"
else
curl -L -o $file https://github.com/WASdev/sample.opentracing.zipkintracer/releases/download/1.2/liberty-opentracing-zipkintracer-1.2-sample.zip
fi
unzip -o liberty-opentracing-zipkintracer-1.2-sample.zip -d liberty-opentracing-zipkintracer/
sed 's/10/5/' src/main/java/com/ibm/webapi/business/Service.java > src/main/java/com/ibm/webapi/business/Service2.java
rm src/main/java/com/ibm/webapi/business/Service.java
mv src/main/java/com/ibm/webapi/business/Service2.java src/main/java/com/ibm/webapi/business/Service.java
if [ -z "$APPID_ISSUER" ]
then
_out App ID has NOT been configured
else
_out App ID has been configured
_out ${APPID_ISSUER}
_out ${APPID_JWKS_URI}
cd ${root_folder}/web-api-java-jee
sed "s+https://us-south.appid.cloud.ibm.com/oauth/v4/xxx+$APPID_ISSUER+g" liberty/server.xml > liberty/server2.xml
rm liberty/server.xml
mv liberty/server2.xml liberty/server.xml
fi
eval $(minikube docker-env)
docker build -f Dockerfile.nojava -t web-api:1 .
kubectl apply -f deployment/kubernetes-service.yaml
kubectl apply -f deployment/kubernetes-deployment-v1.yaml
kubectl apply -f deployment/istio-service-v1.yaml
if [ -f "$protectyaml" ]
then
cd ${root_folder}/istio
kubectl apply -f protect-web-api.yaml
fi
if [ -z "$APPID_ISSUER" ]
then
_out App ID has NOT been configured
else
cd ${root_folder}/web-api-java-jee
sed "s+$APPID_ISSUER+https://us-south.appid.cloud.ibm.com/oauth/v4/xxx+g" liberty/server.xml > liberty/server2.xml
rm liberty/server.xml
mv liberty/server2.xml liberty/server.xml
fi
minikubeip=$(minikube ip)
nodeport=$(kubectl get svc web-api --output 'jsonpath={.spec.ports[*].nodePort}')
_out Minikube IP: ${minikubeip}
_out NodePort: ${nodeport}
_out Done deploying web-api-java-jee v1
_out Wait until the pod has been started: "kubectl get pod --watch | grep web-api"
_out Open the OpenAPI explorer: http://${minikubeip}:${nodeport}/openapi/ui/
}
function readEnv() {
if [ -f "$ENV_FILE" ]
then
source $ENV_FILE
fi
}
readEnv
setup
|
#!/bin/bash
#SBATCH --account=hacc
#SBATCH --qos=regular
#SBATCH --constraint=knl
#SBATCH --time=24:00:00
#SBATCH --nodes=8
#SBATCH --tasks-per-node=1
#SBATCH --job-name=deltasigma
#SBATCH --output=M001-%j.out
cd /global/cscratch1/sd/asv13/
source /global/homes/a/asv13/miniconda3/bin/activate chopperds
srun python -u /global/cscratch1/sd/asv13/repos/deltasigma/chopper_ds/run_walker.py 0 '/global/cscratch1/sd/asv13/repos/deltasigma/M001_worklist.json'
|
import { RANGES, SUPPORTED_PROTOCOLS } from "../constants";
export const checkProtocolSupport = protocol => {
return SUPPORTED_PROTOCOLS.includes(protocol);
};
export const findScoreRange = score => {
let range = "good";
const { poor, ok } = RANGES;
if (score < poor) {
range = "poor";
} else if (score >= poor && score < ok) {
range = "ok";
}
return range;
};
export const fetchOrigin = url => {
return new URL(url).origin;
};
// TODO: we should probably do this using userAgent
export const fetchBrowserIcon = protocol => {
const modifiedProtocol = protocol.replace(":", "");
switch (modifiedProtocol) {
case "chrome":
return "/assets/images/chrome.png";
case "about":
return "/assets/images/firefox.png";
default:
break;
}
};
export const getTokenDuration = (createdAt, expiresAt) => {
const diffInMs = (expiresAt - createdAt) / 1000;
const diffInHours = diffInMs / 3600;
return Math.abs(Math.round(diffInHours));
};
|
<reponame>sergeytkachenko/siesta-template<gh_stars>1-10
StartTest(function (t) {
/*
This test reproduces a "click offset" bug, which appears in very specific environment
It was caused by the jQuery bug - the "el.offset()" and "el.scrollTop()" methods were
using different algorithms for calculating the scroll top of the page
offset method was doing:
scrollTop = win.pageYOffset || jQuery.support.boxModel && docElem.scrollTop || body.scrollTop,
scrollTop method was doing:
return win ? ("pageXOffset" in win) ? win[i ? "pageYOffset" : "pageXOffset"] :
jQuery.support.boxModel && win.document.documentElement[method] ||
win.document.body[method] :
elem[method];
Note the difference, how `offset` method continues the calculations if `pageYOffset` is 0 and `scrollTop` method
stops if `pageYOffset` is in `window` properties.
*/
var body = document.body
body.style.padding = '15px'
var div = document.createElement('div')
div.innerHTML = 'Some content'
body.appendChild(div.cloneNode())
body.appendChild(div.cloneNode())
body.appendChild(div.cloneNode())
body.appendChild(div.cloneNode())
var viewport = new Ext.container.Viewport({
layout : 'fit',
items : [
new Ext.tree.Panel({
bufferedRenderer : false,
store : new Ext.data.TreeStore({
root : {
expanded : true,
children : [
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true },
{ leaf : true }
]
}
})
})
]
})
t.chain(
{ waitFor : 100 },
function () {
var tree = Ext.ComponentQuery.query('treepanel')[ 0 ]
var nodes = tree.getView().getNodes()
var middleIndex = Math.round(nodes.length / 2)
var middleNode = nodes[ middleIndex ]
t.firesOk(nodes[ middleIndex - 1 ], 'click', 0, "Previous node has not been clicked")
t.firesOk(middleNode, 'click', 1, "Middle node has been clicked")
t.firesOk(nodes[ middleIndex + 1 ], 'click', 0, "Next node has not been clicked")
t.chain(
// some Ext classes delays its processing to "idle" event (TouchScroller for example)
// need to fire this event before every action
function (next) {
Ext.GlobalEvents.fireEvent('idle')
next()
},
{
click : function () {
// debugger
return middleNode
}
}
)
}
)
});
|
def process_message(input_dict: dict) -> dict:
response_message = 'Received message: ' + input_dict['message'] # Concatenate the message with a prefix
input_dict['response'] = response_message # Add a new key-value pair to the dictionary
input_dict.pop('message') # Remove the 'message' key from the dictionary
return input_dict
|
#!/usr/bin/env bash
VNC_PORT=5900
VNC_PASSWORD=112358
PARAMS=
while (( "$#" )); do
case "$1" in
-p|--vnc-port)
VNC_PORT=$2
shift 2
;;
-pw|--vnc-password)
VNC_PASSWORD=$2
shift 2
;;
--) # end argument parsing
shift
break
;;
-*|--*=) # unsupported flags
echo "Error: Unsupported flag $1" >&2
exit 1
;;
*) # preserve positional arguments
PARAMS="$PARAMS $1"
shift
;;
esac
done
echo Starting VNC server on port $VNC_PORT with password $VNC_PASSWORD
echo please run \"python simulator_example.py\" once you see the docker command prompt:
docker run --gpus all -ti -p $VNC_PORT:5900 -e VNC_PASSWORD=$VNC_PASSWORD --rm igibson/igibson-gui:latest bash
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BEMCheckBox/BEMCheckBox.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BTNavigationDropdownMenu/BTNavigationDropdownMenu.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CMPhotoCropEditor/CMPhotoCropEditor.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DCCommentView/DCCommentView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DropDown/DropDown.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FLAnimatedImage/FLAnimatedImage.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Popover/Popover.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SCLAlertView/SCLAlertView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SWRevealViewController/SWRevealViewController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SmileLock/SmileLock.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftMessages/SwiftMessages.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Alamofire/Alamofire.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BEMCheckBox/BEMCheckBox.framework"
install_framework "${BUILT_PRODUCTS_DIR}/BTNavigationDropdownMenu/BTNavigationDropdownMenu.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CMPhotoCropEditor/CMPhotoCropEditor.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DCCommentView/DCCommentView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DropDown/DropDown.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FLAnimatedImage/FLAnimatedImage.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Popover/Popover.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Realm/Realm.framework"
install_framework "${BUILT_PRODUCTS_DIR}/RealmSwift/RealmSwift.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SCLAlertView/SCLAlertView.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SWRevealViewController/SWRevealViewController.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SmileLock/SmileLock.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftMessages/SwiftMessages.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftyJSON/SwiftyJSON.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import React, { useState } from 'react';
const App = () => {
const [num1, setNum1] = useState(0);
const [num2, setNum2] = useState(0);
const [result, setResult] = useState(0);
const add = () => {
setResult(num1 + num2);
};
return (
<div>
<input type="number" value={num1} onChange={e => setNum1(+e.target.value)} />
<input type="number" value={num2} onChange={e => setNum2(+e.target.value)} />
<button onClick={add}>Add</button>
<p>The result is {result}</p>
</div>
);
};
export default App;
|
#!/bin/bash
cd "$( cd "$( dirname "$0" )" && pwd )"
coffee -o ../js/ -cw .
|
#!/usr/bin/env bash
#
# Copyright 2020 Brian Smith.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
set -eux -o pipefail
IFS=$'\n\t'
export NULL=""
cargo clippy \
--target-dir=target/clippy \
--all-features ---all-targets \
-- \
--deny warnings \
--allow clippy::collapsible_if \
--allow clippy::from_over_into \
--allow clippy::identity_op \
--allow clippy::len_without_is_empty \
--allow clippy::len_zero \
--allow clippy::ptr_arg \
--allow clippy::let_unit_value \
--allow clippy::many_single_char_names \
--allow clippy::needless_range_loop \
--allow clippy::new_without_default \
--allow clippy::neg_cmp_op_on_partial_ord \
--allow clippy::range_plus_one \
--allow clippy::redundant_slicing \
--allow clippy::too_many_arguments \
--allow clippy::trivially_copy_pass_by_ref \
--allow clippy::type_complexity \
--allow clippy::unreadable_literal \
--allow clippy::upper_case_acronyms \
--allow clippy::vec_init_then_push \
$NULL
|
<reponame>m-nakagawa/sample<gh_stars>0
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.syntax.syntaxtransform;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.query.Query ;
import org.apache.jena.query.QueryFactory ;
import org.apache.jena.query.Syntax ;
import org.apache.jena.sparql.algebra.Algebra ;
import org.apache.jena.sparql.algebra.Op ;
import org.apache.jena.sparql.syntax.syntaxtransform.ElementTransformCleanGroupsOfOne ;
import org.apache.jena.sparql.syntax.syntaxtransform.QueryTransformOps ;
import org.junit.Test ;
public class TestFlattenSyntax extends BaseTest {
static String PRE = "PREFIX : <http://example/>\n" ;
@Test public void test_flatten_basic_01()
{ test(":s0 :p :o .", null) ; }
@Test public void test_flatten_basic_02()
{ test("{ :s1 :p :o }", ":s1 :p :o") ; }
@Test public void test_flatten_basic_03()
{ test("{{ :s2 :p :o }}", ":s2 :p :o") ; }
@Test public void test_flatten_basic_04()
{ test("{{{ :s3 :p :o }}}", ":s3 :p :o") ; }
@Test public void test_flatten_filter_01()
{ test(":s0 :p :o .{FILTER(?x)}", null) ; }
@Test public void test_flatten_fileter_02()
{ test("{ :s1 :p :o {FILTER(?x)} }", ":s1 :p :o {FILTER(?x)}") ; }
@Test public void test_flatten_filter_03()
{ test("{{ :s1 :p :o {FILTER(?x)}}}", " :s1 :p :o {FILTER(?x)}") ; }
@Test public void test_flatten_optional_01()
{ test("OPTIONAL{ ?s1 :q ?z }", null) ; }
@Test public void test_flatten_optional_02()
{ test("OPTIONAL{{?s2 :q ?z}}", "OPTIONAL{?s2 :q ?z}") ; }
@Test public void test_flatten_optional_03()
{ test("OPTIONAL{?s1f :q ?z FILTER(?z) }", null) ; }
@Test public void test_flatten_optional_04()
{ test("OPTIONAL{{?S2 :q ?z FILTER(?z) }}", null); }
@Test public void test_flatten_optional_05()
{ test("OPTIONAL{{{?S3 :q ?z FILTER(?z) }}}", "OPTIONAL{{?S3 :q ?z FILTER(?z) }}") ; }
@Test public void test_flatten_optional_06()
{ test("OPTIONAL{?sx :q ?z {FILTER(?z)} }", null) ; }
@Test public void test_flatten_pattern_01()
{ test("{?s :q ?z } UNION {?s :q ?z }", null) ; }
@Test public void test_flatten_pattern_02()
{ test("{{?s :q ?z}} UNION {?s :q ?z }", "{?s :q ?z} UNION {?s :q ?z }") ; }
@Test public void test_flatten_pattern_03()
{ test("{ ?s :q ?z} UNION {{?s :q ?z}}", "{?s :q ?z} UNION {?s :q ?z }") ; }
@Test public void test_flatten_pattern_04()
{ test("{{ ?s :q ?z } UNION {{?s :q ?z}}}", "{?s :q ?z} UNION {?s :q ?z }") ; }
@Test public void test_flatten_expr_01()
{ test("FILTER EXISTS { :s :p :o }", null) ; }
@Test public void test_flatten_expr_02()
{ test("FILTER EXISTS {{ :s :p :o }}", "FILTER EXISTS { :s :p :o }") ; }
@Test public void test_flatten_arq_01()
{ test("NOT EXISTS {{ :s :p :o FILTER(1) }}", "NOT EXISTS { :s :p :o FILTER(1)}") ; }
@Test public void test_flatten_arq_02()
{ test("EXISTS {{ :s :p :o }}", "EXISTS { :s :p :o }") ; }
private static void test(String input, String expected) {
if ( expected == null )
expected = input ;
String qs = gen(PRE, input) ;
String qsExpected = gen(PRE, expected) ;
Query query = QueryFactory.create(qs, Syntax.syntaxARQ) ;
Query query2 = QueryTransformOps.transform(query, new ElementTransformCleanGroupsOfOne()) ;
Query queryExpected = QueryFactory.create(qsExpected, Syntax.syntaxARQ) ;
Op op1 = Algebra.compile(query) ;
Op op2 = Algebra.compile(query2) ;
assertEquals("Algebra different", op1, op2) ;
boolean modified = ! query.equals(query2) ;
boolean expectModification = !queryExpected.equals(query) ;
assertEquals("Expect query modifed?", expectModification, modified) ;
}
private static String gen(String PRE, String string) {
return PRE+"\nSELECT * { "+string+"\n}" ;
}
}
|
Listeners::BrokerUpdatedListener.run
|
package com.stackroute.playerservice.repository;
import com.stackroute.playerservice.domain.Player;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.data.mongo.DataMongoTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.List;
import static org.junit.Assert.*;
@RunWith(SpringRunner.class)
@DataMongoTest
public class PlayerRepositoryTest {
@Autowired
private PlayerRepository playerRepository;
private Player player;
@Before
public void setUp() throws Exception {
player = new Player("1", "Shaikh", 100);
}
@After
public void tearDown() throws Exception {
playerRepository.deleteAll();
}
@Test
public void testSavePlayer() {
playerRepository.save(player);
Player fetchUser = playerRepository.findById(player.getId()).get();
Assert.assertEquals("1", fetchUser.getId());
}
@Test
public void testSavePlayerName() {
playerRepository.save(player);
Player fetchUser = playerRepository.findById(player.getId()).get();
Assert.assertEquals("Shaikh", fetchUser.getName());
}
@Test
public void testSaveUserFailure() {
Player testUser = new Player("2", "Farhaan", 200);
playerRepository.save(player);
Player fetchUser = playerRepository.findById(player.getId()).get();
Assert.assertNotSame(testUser, player);
}
@Test
public void testGetAllUser() {
Player p1 = new Player("1", "Johny", 1020);
Player p2 = new Player("2", "Harry", 1030);
playerRepository.save(p1);
playerRepository.save(p2);
List<Player> list = playerRepository.findAll();
Assert.assertEquals("Johny", list.get(0).getName());
}
}
|
<filename>app.py
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
@app.route('/page1')
def page1():
return render_template('page1.html')
@app.route('/page2')
def page2():
return render_template('page2.html')
if __name__ == '__main__':
app.run
|
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.credit.creditdefaultswap.pricing;
import javax.time.calendar.ZonedDateTime;
import com.opengamma.analytics.financial.credit.BuySellProtection;
import com.opengamma.analytics.financial.credit.PriceType;
import com.opengamma.analytics.financial.credit.cds.ISDACurve;
import com.opengamma.analytics.financial.credit.creditdefaultswap.definition.LegacyCreditDefaultSwapDefinition;
import com.opengamma.analytics.financial.credit.hazardratemodel.HazardRateCurve;
import com.opengamma.analytics.financial.credit.schedulegeneration.GenerateCreditDefaultSwapIntegrationSchedule;
import com.opengamma.analytics.financial.credit.schedulegeneration.GenerateCreditDefaultSwapPremiumLegSchedule;
import com.opengamma.analytics.util.time.TimeCalculator;
import com.opengamma.financial.convention.daycount.DayCount;
import com.opengamma.financial.convention.daycount.DayCountFactory;
import com.opengamma.util.ArgumentChecker;
/**
* Class containing methods for the valuation of a vanilla Legacy CDS
*/
public class PresentValueLegacyCreditDefaultSwap {
// -------------------------------------------------------------------------------------------------
private static final DayCount ACT_365 = DayCountFactory.INSTANCE.getDayCount("ACT/365");
// Set the number of partitions to divide the timeline up into for the valuation of the contingent leg
private static final int DEFAULT_N_POINTS = 30;
private final int _numberOfIntegrationSteps;
public PresentValueLegacyCreditDefaultSwap() {
this(DEFAULT_N_POINTS);
}
public PresentValueLegacyCreditDefaultSwap(int numberOfIntegrationPoints) {
_numberOfIntegrationSteps = numberOfIntegrationPoints;
}
// -------------------------------------------------------------------------------------------------
// TODO : Lots of ongoing work to do in this class - Work In Progress
// TODO : Add a method to calc both the legs in one method (useful for performance reasons e.g. not computing survival probabilities and discount factors twice)
// TODO : If valuationDate = adjustedMatDate - 1day have to be more careful in how the contingent leg integral is calculated
// TODO : Fix the bug when val date is very close to mat date
// TODO : Need to add the code for when the settlement date > 0 business days (just a discount factor)
// TODO : Replace the while with a binary search function
// TODO : Should build the cashflow schedules outside of the leg valuation routines to avoid repitition of calculations
// TODO : Eventually replace the ISDACurve with a YieldCurve object (currently using ISDACurve built by RiskCare as this allows exact comparison with the ISDA model)
// TODO : Replace the accrued schedule double with a ZonedDateTime object to make it consistent with other calculations
// TODO : Tidy up the calculatePremiumLeg, valueFeeLegAccrualOnDefault and methods
// TODO : Add the calculation for the settlement and stepin discount factors
// -------------------------------------------------------------------------------------------------
// Public method for computing the PV of a CDS based on an input CDS contract (with a hazard rate curve calibrated to market observed data)
public double getPresentValueCreditDefaultSwap(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// -------------------------------------------------------------
// Check input CDS, YieldCurve and SurvivalCurve objects are not null
ArgumentChecker.notNull(cds, "LegacyCreditDefaultSwapDefinition");
ArgumentChecker.notNull(yieldCurve, "YieldCurve");
ArgumentChecker.notNull(hazardRateCurve, "HazardRateCurve");
// -------------------------------------------------------------
// Calculate the value of the premium leg (including accrued if required)
double presentValuePremiumLeg = calculatePremiumLeg(cds, yieldCurve, hazardRateCurve);
// Calculate the value of the contingent leg
double presentValueContingentLeg = calculateContingentLeg(cds, yieldCurve, hazardRateCurve);
// Calculate the PV of the CDS (assumes we are buying protection i.e. paying the premium leg, receiving the contingent leg)
double presentValue = -(cds.getParSpread() / 10000.0) * presentValuePremiumLeg + presentValueContingentLeg;
// -------------------------------------------------------------
// If we require the clean price, then calculate the accrued interest and add this to the PV
if (cds.getPriceType() == PriceType.CLEAN) {
presentValue += calculateAccruedInterest(cds, yieldCurve, hazardRateCurve);
}
// If we are selling protection, then reverse the direction of the premium and contingent leg cashflows
if (cds.getBuySellProtection() == BuySellProtection.SELL) {
presentValue = -1 * presentValue;
}
// -------------------------------------------------------------
return presentValue;
}
//-------------------------------------------------------------------------------------------------
// Public method to calculate the par spread of a CDS at contract inception (with a hazard rate curve calibrated to market observed data)
public double getParSpreadCreditDefaultSwap(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// -------------------------------------------------------------
// Check input CDS, YieldCurve and SurvivalCurve objects are not null
ArgumentChecker.notNull(cds, "CDS field");
ArgumentChecker.notNull(yieldCurve, "YieldCurve field");
ArgumentChecker.notNull(hazardRateCurve, "HazardRateCurve field");
// -------------------------------------------------------------
double parSpread = 0.0;
// -------------------------------------------------------------
// Construct a cashflow schedule object
final GenerateCreditDefaultSwapPremiumLegSchedule cashflowSchedule = new GenerateCreditDefaultSwapPremiumLegSchedule();
// Check if the valuationDate equals the adjusted effective date (have to do this after the schedule is constructed)
ArgumentChecker.isTrue(cds.getValuationDate().equals(cashflowSchedule.getAdjustedEffectiveDate(cds)), "Valuation Date should equal the adjusted effective date when computing par spreads");
// -------------------------------------------------------------
// Calculate the value of the premium leg
double presentValuePremiumLeg = calculatePremiumLeg(cds, yieldCurve, hazardRateCurve);
// Calculate the value of the contingent leg
double presentValueContingentLeg = calculateContingentLeg(cds, yieldCurve, hazardRateCurve);
// -------------------------------------------------------------
// Calculate the par spread (NOTE : Returned value is in bps)
if (Double.doubleToLongBits(presentValuePremiumLeg) == 0.0) {
throw new IllegalStateException("Warning : The premium leg has a PV of zero - par spread cannot be computed");
} else {
parSpread = 10000.0 * presentValueContingentLeg / presentValuePremiumLeg;
}
// -------------------------------------------------------------
return parSpread;
}
// -------------------------------------------------------------------------------------------------
// Method to calculate the value of the premium leg of a CDS (with a hazard rate curve calibrated to market observed data)
// The code for the accrued calc has just been lifted from RiskCare's implementation for now because it exactly reproduces the ISDA model - will replace with a better model in due course
private double calculatePremiumLeg(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// -------------------------------------------------------------
// Local variable definitions
int startIndex = 0;
int endIndex = 0;
double presentValuePremiumLeg = 0.0;
double presentValueAccruedInterest = 0.0;
// -------------------------------------------------------------
// Construct a cashflow schedule object for the premium leg
final GenerateCreditDefaultSwapPremiumLegSchedule cashflowSchedule = new GenerateCreditDefaultSwapPremiumLegSchedule();
// Build the premium leg cashflow schedule from the contract specification
ZonedDateTime[] premiumLegSchedule = cashflowSchedule.constructCreditDefaultSwapPremiumLegSchedule(cds);
// Construct a schedule object for the accrued leg (this is not a cashflow schedule per se, but a set of time nodes for evaluating the accrued payment integral)
GenerateCreditDefaultSwapIntegrationSchedule accruedSchedule = new GenerateCreditDefaultSwapIntegrationSchedule();
// Build the integration schedule for the calculation of the accrued leg
double[] accruedLegIntegrationSchedule = accruedSchedule.constructCreditDefaultSwapAccruedLegIntegrationSchedule(cds, yieldCurve, hazardRateCurve);
// Calculate the stepin time with the appropriate offset
double offsetStepinTime = accruedSchedule.calculateCreditDefaultSwapOffsetStepinTime(cds, ACT_365);
// -------------------------------------------------------------
// Get the date on which we want to calculate the MtM
ZonedDateTime valuationDate = cds.getValuationDate();
// Get the (adjusted) maturity date of the trade
ZonedDateTime adjustedMaturityDate = cashflowSchedule.getAdjustedMaturityDate(cds);
// -------------------------------------------------------------
// If the valuationDate is after the adjusted maturity date then throw an exception (differs from check in ctor because of the adjusted maturity date)
ArgumentChecker.isTrue(!valuationDate.isAfter(adjustedMaturityDate), "Valuation date {} must be on or before the adjusted maturity date {}", valuationDate, adjustedMaturityDate);
// If the valuation date is exactly the adjusted maturity date then simply return zero
if (valuationDate.equals(adjustedMaturityDate)) {
return 0.0;
}
// -------------------------------------------------------------
// Determine where in the cashflow schedule the valuationDate is
int startCashflowIndex = getCashflowIndex(cds, premiumLegSchedule, 1, 1);
// -------------------------------------------------------------
// Calculate the value of the remaining premium and accrual payments (due after valuationDate)
for (int i = startCashflowIndex; i < premiumLegSchedule.length; i++) {
// Get the beginning and end dates of the current coupon
ZonedDateTime accrualStart = premiumLegSchedule[i - 1];
ZonedDateTime accrualEnd = premiumLegSchedule[i];
// -------------------------------------------------------------
// Calculate the time between the valuation date (time at which survival probability is unity) and the current cashflow
double t = TimeCalculator.getTimeBetween(valuationDate, accrualEnd, ACT_365);
// Calculate the discount factor at time t
double discountFactor = yieldCurve.getDiscountFactor(t);
// -------------------------------------------------------------
// If protection starts at the beginning of the period ...
if (cds.getProtectionStart()) {
// ... Roll all but the last date back by 1/365 of a year
if (i < premiumLegSchedule.length - 1) {
t -= cds.getProtectionOffset();
}
// This is a bit of a hack - need a more elegant way of dealing with the timing nuances
if (i == 1) {
accrualStart = accrualStart.minusDays(1);
}
// ... Roll the final maturity date forward by one day
if (i == premiumLegSchedule.length - 1) {
accrualEnd = accrualEnd.plusDays(1);
}
}
// -------------------------------------------------------------
// Compute the daycount fraction for the current accrual period
double dcf = cds.getDayCountFractionConvention().getDayCountFraction(accrualStart, accrualEnd);
// Calculate the survival probability at the modified time t
double survivalProbability = hazardRateCurve.getSurvivalProbability(t);
// Add this discounted cashflow to the running total for the value of the premium leg
presentValuePremiumLeg += dcf * discountFactor * survivalProbability;
// -------------------------------------------------------------
// Now calculate the accrued leg component if required (need to re-write this code)
if (cds.getIncludeAccruedPremium()) {
double stepinDiscountFactor = 1.0;
startIndex = endIndex;
while (accruedLegIntegrationSchedule[endIndex] < t) {
++endIndex;
}
presentValueAccruedInterest += valueFeeLegAccrualOnDefault(dcf, accruedLegIntegrationSchedule, yieldCurve, hazardRateCurve, startIndex, endIndex,
offsetStepinTime, stepinDiscountFactor);
}
// -------------------------------------------------------------
}
// -------------------------------------------------------------
return cds.getNotional() * (presentValuePremiumLeg + presentValueAccruedInterest);
// -------------------------------------------------------------
}
//-------------------------------------------------------------------------------------------------
// Need to re-write this code completely!!
private double valueFeeLegAccrualOnDefault(final double amount, final double[] timeline, final ISDACurve yieldCurve, final HazardRateCurve hazardRateCurve, final int startIndex,
final int endIndex, final double stepinTime, final double stepinDiscountFactor) {
final double[] timePoints = timeline; //timeline.getTimePoints();
final double startTime = timePoints[startIndex];
final double endTime = timePoints[endIndex];
final double subStartTime = stepinTime > startTime ? stepinTime : startTime;
final double accrualRate = amount / (endTime - startTime);
double t0, t1, dt, survival0, survival1, discount0, discount1;
double lambda, fwdRate, lambdaFwdRate, valueForTimeStep, value;
t0 = subStartTime - startTime + 0.5 * (1.0 / 365.0); //HALF_DAY_ACT_365F;
survival0 = hazardRateCurve.getSurvivalProbability(subStartTime);
double PRICING_TIME = 0.0;
discount0 = startTime < stepinTime || startTime < PRICING_TIME ? stepinDiscountFactor : yieldCurve.getDiscountFactor(timePoints[startIndex]); //discountFactors[startIndex];
value = 0.0;
for (int i = startIndex + 1; i <= endIndex; ++i) {
if (timePoints[i] <= stepinTime) {
continue;
}
t1 = timePoints[i] - startTime + 0.5 * (1.0 / 365.0); //HALF_DAY_ACT_365F;
dt = t1 - t0;
survival1 = hazardRateCurve.getSurvivalProbability(timePoints[i]);
discount1 = yieldCurve.getDiscountFactor(timePoints[i]); //discountFactors[i];
lambda = Math.log(survival0 / survival1) / dt;
fwdRate = Math.log(discount0 / discount1) / dt;
lambdaFwdRate = lambda + fwdRate + 1.0e-50;
valueForTimeStep = lambda * accrualRate * survival0 * discount0
* (((t0 + 1.0 / lambdaFwdRate) / lambdaFwdRate) - ((t1 + 1.0 / lambdaFwdRate) / lambdaFwdRate) * survival1 / survival0 * discount1 / discount0);
value += valueForTimeStep;
t0 = t1;
survival0 = survival1;
discount0 = discount1;
}
return value;
}
// -------------------------------------------------------------------------------------------------
// If the cleanPrice flag is TRUE then this function is called to calculate the accrued interest between valuationDate and the previous coupon date
private double calculateAccruedInterest(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// Construct a cashflow schedule object
final GenerateCreditDefaultSwapPremiumLegSchedule cashflowSchedule = new GenerateCreditDefaultSwapPremiumLegSchedule();
// Build the premium leg cashflow schedule from the contract specification
ZonedDateTime[] premiumLegSchedule = cashflowSchedule.constructCreditDefaultSwapPremiumLegSchedule(cds);
// Assume the stepin date is the valuation date + 1 day (this is not business day adjusted)
ZonedDateTime stepinDate = cds.getValuationDate().plusDays(1);
// Determine where in the premium leg cashflow schedule the current valuation date is
int startCashflowIndex = getCashflowIndex(cds, premiumLegSchedule, 0, 1);
// Get the date of the last coupon before the current valuation date
ZonedDateTime previousPeriod = premiumLegSchedule[startCashflowIndex - 1];
// Compute the amount of time between previousPeriod and stepinDate
double dcf = cds.getDayCountFractionConvention().getDayCountFraction(previousPeriod, stepinDate);
// Calculate the accrued interest gained in this period of time
double accruedInterest = (cds.getParSpread() / 10000.0) * dcf * cds.getNotional();
return accruedInterest;
}
// -------------------------------------------------------------------------------------------------
// Method to determine where in the premium leg cashflow schedule the valuation date is
private int getCashflowIndex(LegacyCreditDefaultSwapDefinition cds, ZonedDateTime[] premiumLegSchedule, final int startIndex, final int deltaDays) {
int counter = startIndex;
// Determine where in the cashflow schedule the valuationDate is
while (!cds.getValuationDate().isBefore(premiumLegSchedule[counter].minusDays(deltaDays))) {
counter++;
}
return counter;
}
// -------------------------------------------------------------------------------------------------
// Method to calculate the contingent leg (replicates the calculation in the ISDA model)
private double calculateContingentLeg(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// -------------------------------------------------------------
// Local variable definitions
double presentValueContingentLeg = 0.0;
// -------------------------------------------------------------
// Construct an integration schedule object for the contingent leg
GenerateCreditDefaultSwapIntegrationSchedule contingentLegSchedule = new GenerateCreditDefaultSwapIntegrationSchedule();
// Build the integration schedule for the calculation of the contingent leg
double[] contingentLegIntegrationSchedule = contingentLegSchedule.constructCreditDefaultSwapContingentLegIntegrationSchedule(cds, yieldCurve, hazardRateCurve);
// -------------------------------------------------------------
// Get the survival probability at the first point in the integration schedule
double survivalProbability = hazardRateCurve.getSurvivalProbability(contingentLegIntegrationSchedule[0]);
// Get the discount factor at the first point in the integration schedule
double discountFactor = yieldCurve.getDiscountFactor(contingentLegIntegrationSchedule[0]);
// -------------------------------------------------------------
// Loop over each of the points in the integration schedule
for (int i = 1; i < contingentLegIntegrationSchedule.length; ++i) {
// Calculate the time between adjacent points in the integration schedule
double deltat = contingentLegIntegrationSchedule[i] - contingentLegIntegrationSchedule[i - 1];
// Set the probability of survival up to the previous point in the integration schedule
double survivalProbabilityPrevious = survivalProbability;
// Set the discount factor up to the previous point in the integration schedule
double discountFactorPrevious = discountFactor;
// Get the survival probability at this point in the integration schedule
survivalProbability = hazardRateCurve.getSurvivalProbability(contingentLegIntegrationSchedule[i]);
// Get the discount factor at this point in the integration schedule
discountFactor = yieldCurve.getDiscountFactor(contingentLegIntegrationSchedule[i]);
// Calculate the forward hazard rate over the interval deltat (assumes the hazard rate is constant over this period)
double hazardRate = Math.log(survivalProbabilityPrevious / survivalProbability) / deltat;
// Calculate the forward interest rate over the interval deltat (assumes the interest rate is constant over this period)
double interestRate = Math.log(discountFactorPrevious / discountFactor) / deltat;
// Calculate the contribution of the interval deltat to the overall contingent leg integral
presentValueContingentLeg += (hazardRate / (hazardRate + interestRate)) * (1.0 - Math.exp(-(hazardRate + interestRate) * deltat)) * survivalProbabilityPrevious * discountFactorPrevious;
}
// -------------------------------------------------------------
return cds.getNotional() * (1 - cds.getRecoveryRate()) * presentValueContingentLeg;
}
// -------------------------------------------------------------------------------------------------
// Method to calculate the value of the contingent leg of a CDS (with a hazard rate curve calibrated to market observed data) - Currently not used but this is a more elegant calc than ISDA
private double calculateContingentLegOld(LegacyCreditDefaultSwapDefinition cds, ISDACurve yieldCurve, HazardRateCurve hazardRateCurve) {
// -------------------------------------------------------------
// Construct a schedule generation object (to access the adjusted maturity date method)
GenerateCreditDefaultSwapPremiumLegSchedule cashflowSchedule = new GenerateCreditDefaultSwapPremiumLegSchedule();
// Get the date when protection begins
ZonedDateTime valuationDate = cds.getValuationDate();
// Get the date when protection ends
ZonedDateTime adjustedMaturityDate = cashflowSchedule.getAdjustedMaturityDate(cds);
// -------------------------------------------------------------
// If the valuationDate is after the adjusted maturity date then throw an exception (differs from check in ctor because of the adjusted maturity date)
ArgumentChecker.isTrue(!valuationDate.isAfter(adjustedMaturityDate), "Valuation date {} must be on or before the adjusted maturity date {}", valuationDate, adjustedMaturityDate);
// If the valuation date is exactly the adjusted maturity date then simply return zero
if (valuationDate.equals(adjustedMaturityDate)) {
return 0.0;
}
// -------------------------------------------------------------
double presentValueContingentLeg = 0.0;
// -------------------------------------------------------------
// Calculate the partition of the time axis for the calculation of the integral in the contingent leg
// The period of time for which protection is provided
double protectionPeriod = TimeCalculator.getTimeBetween(valuationDate, adjustedMaturityDate.plusDays(1), /*cds.getDayCountFractionConvention()*/ACT_365);
// Given the protection period, how many partitions should it be divided into
int numberOfPartitions = (int) (_numberOfIntegrationSteps * protectionPeriod + 0.5);
// The size of the time increments in the calculation of the integral
double epsilon = protectionPeriod / numberOfPartitions;
// -------------------------------------------------------------
// Calculate the integral for the contingent leg (note the limits of the loop)
for (int k = 1; k <= numberOfPartitions; k++) {
double t = k * epsilon;
double tPrevious = (k - 1) * epsilon;
double discountFactor = yieldCurve.getDiscountFactor(t);
double survivalProbability = hazardRateCurve.getSurvivalProbability(t);
double survivalProbabilityPrevious = hazardRateCurve.getSurvivalProbability(tPrevious);
presentValueContingentLeg += discountFactor * (survivalProbabilityPrevious - survivalProbability);
}
// -------------------------------------------------------------
return cds.getNotional() * (1.0 - cds.getRecoveryRate()) * presentValueContingentLeg;
}
// -------------------------------------------------------------------------------------------------
}
|
from abc import ABC, abstractmethod
class AbstractRecognizer:
""" A base class for facial recognition service interfaces
Args:
recognition_endpoint (string): The url of the recognition service (theoretically could be something other than a string...maybe an object)
"""
def __init__(self, recognition_endpoint):
self.recognition_endpoint = recognition_endpoint
@abstractmethod
def recognize(self, image):
""" Implement this method to interface with the recognition service and add results to an image object
Args:
image (RecognitionImage): An object containing an image file to be sent to the recognizer and which will be populated with the recognition information
"""
pass
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Reserve a static ip address for sumservice's load balancer
gcloud compute addresses create --global sumservice-ip
## Create a backend service
gcloud compute backend-services create --global sumservice-backend
## Create a urlmap
gcloud compute url-maps create sumservice-urlmap \
--default-service=sumservice-backend
## Create a TLS certificate
gcloud compute ssl-certificates create sumservice-certificate \
--certificate ./sumservice.crt --private-key ./private.key --global
## Create HTTPS proxy
gcloud compute target-https-proxies create sumservice-https \
--ssl-certificates=sumservice-certificate \
--url-map=sumservice-urlmap
## Create a forwardig-rule
gcloud compute forwarding-rules create --global sumservice-lb \
--target-https-proxy=sumservice-https \
--address=sumservice-ip \
--ports=443
|
#!/bin/bash
# SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
# Kselftest framework requirement - SKIP code is 4.
ksft_skip=4
if [ -f /dev/tpmrm0 ] ; then
python -m unittest -v tpm2_tests.SpaceTest
else
exit $ksft_skip
fi
|
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "verification/debug/config_load.h"
#include "verification/debug/allowlist/allowlist.h"
#include "verification/job_queue/job_queue.h"
#include "verification/job_queue/job_fill.h"
#include "verification/cache/results_cache.h"
#include "verification/util/invalid_ref.h"
#include "events/events.h"
#include "runtime/bridge/bridge.h"
#include "runtime/entrypoints/entrypoints.h"
#include "runtime/jit/profiling_data.h"
#include "runtime/include/class_linker-inl.h"
#include "runtime/include/exceptions.h"
#include "runtime/include/locks.h"
#include "runtime/include/mem/panda_smart_pointers.h"
#include "runtime/include/method-inl.h"
#include "runtime/include/runtime.h"
#include "runtime/include/panda_vm.h"
#include "runtime/include/runtime_notification.h"
#include "runtime/include/value-inl.h"
#include "runtime/interpreter/frame.h"
#include "runtime/interpreter/interpreter.h"
#include "libpandabase/utils/hash.h"
#include "libpandabase/utils/span.h"
#include "libpandabase/utils/utf.h"
#include "libpandabase/os/mutex.h"
#include "libpandafile/code_data_accessor-inl.h"
#include "libpandafile/debug_data_accessor-inl.h"
#include "libpandafile/file-inl.h"
#include "libpandafile/line_program_state.h"
#include "libpandafile/method_data_accessor-inl.h"
#include "libpandafile/method_data_accessor.h"
#include "libpandafile/proto_data_accessor-inl.h"
#include "libpandafile/shorty_iterator.h"
#include "runtime/handle_base-inl.h"
#include "runtime/handle_scope-inl.h"
#include "libpandafile/type_helper.h"
namespace panda {
Method::Proto::Proto(const panda_file::File &pf, panda_file::File::EntityId proto_id)
{
panda_file::ProtoDataAccessor pda(pf, proto_id);
pda.EnumerateTypes([this](panda_file::Type type) { shorty_.push_back(type); });
size_t ref_idx = 0;
for (auto &t : shorty_) {
if (t.IsPrimitive()) {
continue;
}
auto id = pda.GetReferenceType(ref_idx++);
ref_types_.emplace_back(utf::Mutf8AsCString(pf.GetStringData(id).data));
}
}
std::string_view Method::Proto::GetReturnTypeDescriptor() const
{
auto ret_type = GetReturnType();
if (!ret_type.IsPrimitive()) {
return ref_types_[0];
}
switch (ret_type.GetId()) {
case panda_file::Type::TypeId::VOID:
return "V";
case panda_file::Type::TypeId::U1:
return "Z";
case panda_file::Type::TypeId::I8:
return "B";
case panda_file::Type::TypeId::U8:
return "H";
case panda_file::Type::TypeId::I16:
return "S";
case panda_file::Type::TypeId::U16:
return "C";
case panda_file::Type::TypeId::I32:
return "I";
case panda_file::Type::TypeId::U32:
return "U";
case panda_file::Type::TypeId::F32:
return "F";
case panda_file::Type::TypeId::I64:
return "J";
case panda_file::Type::TypeId::U64:
return "Q";
case panda_file::Type::TypeId::F64:
return "D";
case panda_file::Type::TypeId::TAGGED:
return "A";
default:
UNREACHABLE();
}
}
uint32_t Method::GetFullNameHashFromString(const uint8_t *str)
{
return GetHash32String(str);
}
uint32_t Method::GetClassNameHashFromString(const uint8_t *str)
{
return GetHash32String(str);
}
uint32_t Method::GetFullNameHash() const
{
// NB: this function cannot be used in current unit tests, because
// some unit tests are using underdefined method objects
ASSERT(panda_file_ != nullptr && file_id_.IsValid());
PandaString full_name {ClassHelper::GetName(GetClassName().data)};
full_name += "::";
full_name += utf::Mutf8AsCString(GetName().data);
auto hash = GetFullNameHashFromString(reinterpret_cast<const uint8_t *>(full_name.c_str()));
return hash;
}
Method::UniqId Method::CalcUniqId(const uint8_t *class_descr, const uint8_t *name)
{
auto constexpr HALF = 32ULL;
constexpr uint64_t NO_FILE = 0xFFFFFFFFULL << HALF;
uint64_t hash = PseudoFnvHashString(class_descr);
hash = PseudoFnvHashString(name, hash);
return NO_FILE | hash;
}
Method::Method(Class *klass, const panda_file::File *pf, panda_file::File::EntityId file_id,
panda_file::File::EntityId code_id, uint32_t access_flags, uint32_t num_args, const uint16_t *shorty)
: stor_32_ {{}, access_flags, 0, num_args, 0},
stor_ptr_ {{}, klass, nullptr, nullptr},
panda_file_(pf),
file_id_(file_id),
code_id_(code_id),
shorty_(shorty)
{
SetCompilationStatus(CompilationStage::NOT_COMPILED);
}
Value Method::Invoke(ManagedThread *thread, Value *args, bool proxy_call)
{
return InvokeImpl<false>(thread, GetNumArgs(), args, proxy_call);
}
Value Method::InvokeDyn(ManagedThread *thread, uint32_t num_args, Value *args, bool proxy_call, void *data)
{
return InvokeImpl<true>(thread, num_args, args, proxy_call, data);
}
Value Method::InvokeGen(ManagedThread *thread, const uint8_t *pc, Value acc, uint32_t num_actual_args, Value *args,
void *data)
{
Frame *current_frame = thread->GetCurrentFrame();
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_REDUNDANT_INIT)
Value res(static_cast<int64_t>(0));
panda_file::Type ret_type = GetReturnType();
if (!Verify()) {
auto ctx = Runtime::GetCurrent()->GetLanguageContext(*this);
panda::ThrowVerificationException(ctx, GetFullName());
if (ret_type.IsReference()) {
res = Value(nullptr);
} else {
res = Value(static_cast<int64_t>(0));
}
} else {
Span<Value> args_span(args, num_actual_args);
auto frame_deleter = [](Frame *frame) { FreeFrame(frame); };
PandaUniquePtr<Frame, FrameDeleter> frame(
CreateFrameWithActualArgs(num_actual_args, num_actual_args, this, current_frame), frame_deleter);
for (size_t i = 0; i < num_actual_args; i++) {
if (args_span[i].IsDecodedTaggedValue()) {
DecodedTaggedValue decoded = args_span[i].GetDecodedTaggedValue();
frame->GetVReg(i).SetValue(decoded.value);
frame->GetVReg(i).SetTag(decoded.tag);
} else if (args_span[i].IsReference()) {
frame->GetVReg(i).SetReference(args_span[i].GetAs<ObjectHeader *>());
} else {
frame->GetVReg(i).SetPrimitive(args_span[i].GetAs<int64_t>());
}
}
frame->GetAcc().SetValue(static_cast<uint64_t>(acc.GetAs<int64_t>()));
frame->SetData(data);
if (UNLIKELY(frame.get() == nullptr)) {
panda::ThrowOutOfMemoryError("CreateFrame failed: " + GetFullName());
if (ret_type.IsReference()) {
res = Value(nullptr);
} else {
res = Value(static_cast<int64_t>(0));
}
return res;
}
thread->SetCurrentFrame(frame.get());
Runtime::GetCurrent()->GetNotificationManager()->MethodEntryEvent(thread, this);
interpreter::Execute(thread, pc, frame.get());
Runtime::GetCurrent()->GetNotificationManager()->MethodExitEvent(thread, this);
thread->SetCurrentFrame(current_frame);
res = GetReturnValueFromAcc(ret_type, thread->HasPendingException(), frame->GetAcc());
}
return res;
}
panda_file::Type Method::GetReturnType() const
{
panda_file::ShortyIterator it(shorty_);
return *it;
}
panda_file::Type Method::GetArgType(size_t idx) const
{
if (!IsStatic()) {
if (idx == 0) {
return panda_file::Type(panda_file::Type::TypeId::REFERENCE);
}
--idx;
}
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
panda_file::ProtoDataAccessor pda(*panda_file_, mda.GetProtoId());
return pda.GetArgType(idx);
}
panda_file::File::StringData Method::GetRefArgType(size_t idx) const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
if (!IsStatic()) {
if (idx == 0) {
return panda_file_->GetStringData(mda.GetClassId());
}
--idx;
}
panda_file::ProtoDataAccessor pda(*panda_file_, mda.GetProtoId());
panda_file::File::EntityId class_id = pda.GetReferenceType(idx);
return panda_file_->GetStringData(class_id);
}
panda_file::File::StringData Method::GetName() const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
return panda_file_->GetStringData(mda.GetNameId());
}
PandaString Method::GetFullName(bool with_signature) const
{
PandaOStringStream ss;
int ref_idx = 0;
if (with_signature) {
auto return_type = GetReturnType();
if (return_type.IsReference()) {
ss << ClassHelper::GetName(GetRefArgType(ref_idx++).data) << ' ';
} else {
ss << return_type << ' ';
}
}
ss << PandaString(GetClass()->GetName()) << "::" << utf::Mutf8AsCString(Method::GetName().data);
if (!with_signature) {
return ss.str();
}
const char *sep = "";
ss << '(';
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
panda_file::ProtoDataAccessor pda(*panda_file_, mda.GetProtoId());
for (size_t i = 0; i < GetNumArgs(); i++) {
auto type = GetEffectiveArgType(i);
if (type.IsReference()) {
ss << sep << ClassHelper::GetName(GetRefArgType(ref_idx++).data);
} else {
ss << sep << type;
}
sep = ", ";
}
ss << ')';
return ss.str();
}
panda_file::File::StringData Method::GetClassName() const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
return panda_file_->GetStringData(mda.GetClassId());
}
Method::Proto Method::GetProto() const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
return Proto(*panda_file_, mda.GetProtoId());
}
uint32_t Method::GetNumericalAnnotation(AnnotationField field_id) const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
return mda.GetNumericalAnnotation(field_id);
}
panda_file::File::StringData Method::GetStringDataAnnotation(AnnotationField field_id) const
{
ASSERT(field_id >= AnnotationField::STRING_DATA_BEGIN);
ASSERT(field_id <= AnnotationField::STRING_DATA_END);
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
uint32_t str_offset = mda.GetNumericalAnnotation(field_id);
if (str_offset == 0) {
return {0, nullptr};
}
return panda_file_->GetStringData(panda_file::File::EntityId(str_offset));
}
uint32_t Method::FindCatchBlock(Class *cls, uint32_t pc) const
{
ASSERT(!IsAbstract());
auto *thread = ManagedThread::GetCurrent();
[[maybe_unused]] HandleScope<ObjectHeader *> scope(thread);
VMHandle<ObjectHeader> exception(thread, thread->GetException());
thread->ClearException();
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
panda_file::CodeDataAccessor cda(*panda_file_, mda.GetCodeId().value());
uint32_t pc_offset = panda_file::INVALID_OFFSET;
cda.EnumerateTryBlocks([&pc_offset, cls, pc, this](panda_file::CodeDataAccessor::TryBlock &try_block) {
if ((try_block.GetStartPc() <= pc) && ((try_block.GetStartPc() + try_block.GetLength()) > pc)) {
try_block.EnumerateCatchBlocks([&](panda_file::CodeDataAccessor::CatchBlock &catch_block) {
auto type_idx = catch_block.GetTypeIdx();
if (type_idx == panda_file::INVALID_INDEX) {
pc_offset = catch_block.GetHandlerPc();
return false;
}
auto type_id = GetClass()->ResolveClassIndex(type_idx);
auto *handler_class = Runtime::GetCurrent()->GetClassLinker()->GetClass(*this, type_id);
if (cls->IsSubClassOf(handler_class)) {
pc_offset = catch_block.GetHandlerPc();
return false;
}
return true;
});
}
return pc_offset == panda_file::INVALID_OFFSET;
});
thread->SetException(exception.GetPtr());
return pc_offset;
}
panda_file::Type Method::GetEffectiveArgType(size_t idx) const
{
return panda_file::GetEffectiveType(GetArgType(idx));
}
panda_file::Type Method::GetEffectiveReturnType() const
{
return panda_file::GetEffectiveType(GetReturnType());
}
int32_t Method::GetLineNumFromBytecodeOffset(uint32_t bc_offset) const
{
panda_file::MethodDataAccessor mda(*panda_file_, file_id_);
auto debug_info_id = mda.GetDebugInfoId();
if (!debug_info_id) {
return -1;
}
using Opcode = panda_file::LineNumberProgramItem::Opcode;
using EntityId = panda_file::File::EntityId;
panda_file::DebugInfoDataAccessor dda(*panda_file_, debug_info_id.value());
const uint8_t *program = dda.GetLineNumberProgram();
auto size = panda_file_->GetSpanFromId(panda_file_->GetIdFromPointer(program)).size();
auto opcode_sp = Span(reinterpret_cast<const Opcode *>(program), size);
panda_file::LineProgramState state(*panda_file_, EntityId(0), dda.GetLineStart(), dda.GetConstantPool());
size_t i = 0;
Opcode opcode;
size_t prev_line = state.GetLine();
while ((opcode = opcode_sp[i++]) != Opcode::END_SEQUENCE) {
switch (opcode) {
case Opcode::ADVANCE_LINE: {
auto line_diff = state.ReadSLeb128();
state.AdvanceLine(line_diff);
break;
}
case Opcode::ADVANCE_PC: {
auto pc_diff = state.ReadULeb128();
state.AdvancePc(pc_diff);
break;
}
default: {
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_REDUNDANT_INIT)
auto opcode_value = static_cast<uint8_t>(opcode);
if (opcode_value < panda_file::LineNumberProgramItem::OPCODE_BASE) {
break;
}
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_REDUNDANT_INIT)
auto adjust_opcode = opcode_value - panda_file::LineNumberProgramItem::OPCODE_BASE;
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_REDUNDANT_INIT)
uint32_t pc_diff = adjust_opcode / panda_file::LineNumberProgramItem::LINE_RANGE;
// CODECHECK-NOLINTNEXTLINE(C_RULE_ID_REDUNDANT_INIT)
int32_t line_diff = adjust_opcode % panda_file::LineNumberProgramItem::LINE_RANGE +
panda_file::LineNumberProgramItem::LINE_BASE;
state.AdvancePc(pc_diff);
state.AdvanceLine(line_diff);
if (state.GetAddress() == bc_offset) {
return state.GetLine();
}
if (state.GetAddress() > bc_offset) {
return prev_line;
}
prev_line = state.GetLine();
break;
}
}
}
return state.GetLine();
}
panda_file::File::StringData Method::GetClassSourceFile() const
{
panda_file::ClassDataAccessor cda(*panda_file_, GetClass()->GetFileId());
auto source_file_id = cda.GetSourceFileId();
if (!source_file_id) {
return {0, nullptr};
}
return panda_file_->GetStringData(source_file_id.value());
}
bool Method::IsVerified() const
{
if (IsIntrinsic()) {
return true;
}
auto stage = GetVerificationStage();
return stage == VerificationStage::VERIFIED_OK || stage == VerificationStage::VERIFIED_FAIL;
}
void Method::WaitForVerification()
{
if (GetVerificationStage() == VerificationStage::WAITING) {
LOG(DEBUG, VERIFIER) << "Method '" << GetFullName() << std::hex << "' ( 0x" << GetUniqId() << ", 0x"
<< reinterpret_cast<uintptr_t>(this) << " ) is waiting to be verified";
panda::verifier::JobQueue::WaitForVerification(
[this] { return GetVerificationStage() == VerificationStage::WAITING; },
[this] {
auto &runtime = *Runtime::GetCurrent();
auto &&verif_options = runtime.GetVerificationOptions();
auto does_not_fail = verif_options.Mode.VerifierDoesNotFail;
SetVerificationStage(does_not_fail ? VerificationStage::VERIFIED_OK : VerificationStage::VERIFIED_FAIL);
});
}
}
void Method::SetVerified(bool result)
{
verifier::VerificationResultCache::CacheResult(GetUniqId(), result);
SetVerificationStage(result ? VerificationStage::VERIFIED_OK : VerificationStage::VERIFIED_FAIL);
panda::verifier::JobQueue::SignalMethodVerified();
}
bool Method::Verify()
{
if (IsIntrinsic()) {
return true;
}
auto stage = GetVerificationStage();
if (stage == VerificationStage::VERIFIED_OK) {
return true;
}
if (stage == VerificationStage::VERIFIED_FAIL) {
return false;
}
EnqueueForVerification();
auto &runtime = *Runtime::GetCurrent();
auto &&verif_options = runtime.GetVerificationOptions();
if (verif_options.Mode.VerifierDoesNotFail) {
return true;
}
WaitForVerification();
return Verify();
}
Method::~Method()
{
WaitForVerification();
}
bool Method::AddJobInQueue()
{
if (code_id_.IsValid() && !SKIP_VERIFICATION(GetUniqId())) {
if (ExchangeVerificationStage(VerificationStage::WAITING) == VerificationStage::WAITING) {
return true;
}
if (verifier::VerificationResultCache::Enabled()) {
auto status = verifier::VerificationResultCache::Check(GetUniqId());
switch (status) {
case verifier::VerificationResultCache::Status::OK:
SetVerificationStage(VerificationStage::VERIFIED_OK);
LOG(INFO, VERIFIER) << "Verification result of method '" << GetFullName() << "' was cached: OK";
return true;
case verifier::VerificationResultCache::Status::FAILED:
SetVerificationStage(VerificationStage::VERIFIED_FAIL);
LOG(INFO, VERIFIER) << "Verification result of method '" << GetFullName() << "' was cached: FAIL";
return true;
default:
break;
}
}
auto &job = panda::verifier::JobQueue::NewJob(*this);
if (Invalid(job)) {
LOG(INFO, VERIFIER) << "Method '" << GetFullName()
<< "' cannot be enqueued for verification. Cannot create job object.";
auto &runtime = *Runtime::GetCurrent();
auto &&verif_options = runtime.GetVerificationOptions();
auto does_not_fail = verif_options.Mode.VerifierDoesNotFail;
SetVerificationStage(does_not_fail ? VerificationStage::VERIFIED_OK : VerificationStage::VERIFIED_FAIL);
return true;
}
if (!panda::verifier::FillJob(job)) {
LOG(INFO, VERIFIER) << "Method '" << GetFullName() << "' cannot be enqueued for verification";
auto &runtime = *Runtime::GetCurrent();
auto &&verif_options = runtime.GetVerificationOptions();
auto does_not_fail = verif_options.Mode.VerifierDoesNotFail;
SetVerificationStage(does_not_fail ? VerificationStage::VERIFIED_OK : VerificationStage::VERIFIED_FAIL);
panda::verifier::JobQueue::DisposeJob(&job);
return true;
}
panda::verifier::JobQueue::AddJob(job);
LOG(INFO, VERIFIER) << "Method '" << GetFullName() << std::hex << "' ( 0x" << GetUniqId() << ", 0x"
<< reinterpret_cast<uintptr_t>(this) << " ) enqueued for verification";
return true;
}
return false;
}
void Method::EnqueueForVerification()
{
if (GetVerificationStage() != VerificationStage::NOT_VERIFIED) {
return;
}
auto &runtime = *Runtime::GetCurrent();
auto &&verif_options = runtime.GetVerificationOptions();
if (verif_options.Enable) {
if (verif_options.Mode.DebugEnable) {
auto hash = GetFullNameHash();
PandaString class_name {ClassHelper::GetName(GetClassName().data)};
auto class_hash = GetFullNameHashFromString(reinterpret_cast<const uint8_t *>(class_name.c_str()));
panda::verifier::config::MethodIdCalculationHandler(class_hash, hash, GetUniqId());
}
bool is_system = false;
if (!verif_options.Mode.DoNotAssumeLibraryMethodsVerified) {
auto *klass = GetClass();
if (klass != nullptr) {
auto *file = klass->GetPandaFile();
is_system = file != nullptr && verifier::JobQueue::IsSystemFile(file);
}
}
if (!is_system && AddJobInQueue()) {
return;
}
}
if (verif_options.Show.Status) {
LOG(INFO, VERIFIER) << "Verification result of method '" << GetFullName() << "': SKIP";
}
SetVerified(true);
}
Method::VerificationStage Method::GetVerificationStage() const
{
return BitsToVerificationStage(stor_32_.access_flags_.load());
}
void Method::SetVerificationStage(VerificationStage stage)
{
stor_32_.access_flags_.fetch_or((static_cast<uint32_t>(stage) << VERIFICATION_STATUS_SHIFT));
}
Method::VerificationStage Method::ExchangeVerificationStage(VerificationStage stage)
{
return BitsToVerificationStage(
stor_32_.access_flags_.fetch_or(static_cast<uint32_t>(stage) << VERIFICATION_STATUS_SHIFT));
}
Method::VerificationStage Method::BitsToVerificationStage(uint32_t bits)
{
uint32_t val = (bits & VERIFICATION_STATUS_MASK) >> VERIFICATION_STATUS_SHIFT;
// To avoid if - else for conversion set bit index to VerificationStage
// y = 4x / 3 function for integers is used. It produces correct values for
// all correct inputs:
// state value __builtin_ffs 4x/3 VerificationStage
// not verified: 000 0 0 NOT_VERIFIED
// waiting: 100 3 3 WAITING
// verification success: 110 2 2 VERIFIED_OK
// verification failed: 101 1 1 VERIFIED_FAIL
return static_cast<VerificationStage>(4U * panda_bit_utils_ffs(val) / 3U);
}
void Method::StartProfiling()
{
ASSERT(!ManagedThread::GetCurrent()->GetVM()->GetGC()->IsGCRunning() || Locks::mutator_lock->HasLock());
// Some thread already started profiling
if (IsProfilingWithoutLock()) {
return;
}
mem::InternalAllocatorPtr allocator = Runtime::GetCurrent()->GetInternalAllocator();
PandaVector<uint32_t> vcalls;
Span<const uint8_t> instructions(GetInstructions(), GetCodeSize());
for (BytecodeInstruction inst(instructions.begin()); inst.GetAddress() < instructions.end();
inst = inst.GetNext()) {
if (inst.HasFlag(BytecodeInstruction::Flags::CALL_VIRT)) {
vcalls.push_back(inst.GetAddress() - GetInstructions());
}
}
if (vcalls.empty()) {
return;
}
ASSERT(std::is_sorted(vcalls.begin(), vcalls.end()));
auto data = allocator->Alloc(RoundUp(sizeof(ProfilingData), alignof(CallSiteInlineCache)) +
sizeof(CallSiteInlineCache) * vcalls.size());
// CODECHECK-NOLINTNEXTLINE(CPP_RULE_ID_SMARTPOINTER_INSTEADOF_ORIGINPOINTER)
auto profiling_data = new (data) ProfilingData(vcalls.size());
auto ics = profiling_data->GetInlineCaches();
for (size_t i = 0; i < vcalls.size(); i++) {
ics[i].Init(vcalls[i]);
}
ProfilingData *old_value = nullptr;
while (!profiling_data_.compare_exchange_weak(old_value, profiling_data)) {
if (old_value != nullptr) {
// We're late, some thread already started profiling.
allocator->Delete(data);
return;
}
}
EVENT_INTERP_PROFILING(events::InterpProfilingAction::START, GetFullName(), vcalls.size());
}
void Method::StopProfiling()
{
ASSERT(!ManagedThread::GetCurrent()->GetVM()->GetGC()->IsGCRunning() || Locks::mutator_lock->HasLock());
if (!IsProfilingWithoutLock()) {
return;
}
EVENT_INTERP_PROFILING(events::InterpProfilingAction::STOP, GetFullName(),
GetProfilingData()->GetInlineCaches().size());
mem::InternalAllocatorPtr allocator = Runtime::GetCurrent()->GetInternalAllocator();
allocator->Free(GetProfilingData());
profiling_data_ = nullptr;
}
} // namespace panda
|
class Game:
def __init__(self, window, ball, lives, lives_label):
self.window = window
self.ball = ball
self.lives = lives
self.lives_label = lives_label
def update(self):
if self.ball.y >= self.window.height - self.ball.height: # Check if the ball has reached the bottom of the window
self.lives -= 1 # Reduce the player's lives by 1
self.lives_label.text = 'Lives: ' + str(self.lives) # Update the lives label with the new lives count
|
#!/bin/bash
echo "System started at: `date`" >> nihit_log.txt
|
var Path = require('path');
var Promise = require('the-promise');
var uuid = require('uuid');
var _ = require('the-lodash');
class DataProvider {
constructor() {
this._actionTracker = {}
}
//getting all regions
getRegions() {
console.log("[DataProvider][getRegions] begin")
return Promise.timeout(1)
.then(() => {
console.log("[DataProvider][getRegions] inside begin")
return [
{
name: 'us-central1',
provider: 'gcp'
},
{
name: 'us-east1',
provider: 'gcp'
},
{
name: 'us-east4',
provider: 'gcp'
},
{
name: 'us-west1',
provider: 'gcp'
},
{
name: 'us-west2',
provider: 'gcp'
},
{
name: 'us-east-1',
provider: 'aws'
},
{
name: 'us-east-2',
provider: 'aws'
},
{
name: 'us-west-1',
provider: 'aws'
},
{
name: 'us-west-2',
provider: 'aws'
}
];
});
}
//getting deployments for specific region
getDeployments(region) {
console.log('[DataProvider][getDeployments] for : %s begin', region);
return Promise.timeout(1)
.then(() => {
console.log('[DataProvider][getDeployments] for : %s inside begin', region);
return [
{
name: "prod"
},
{
name: "pprod"
},
{
name: "test"
},
{
name: "dev"
}
];
});
}
//getting clusters per region per deployment
getClusters(region, deployment) {
console.log('[DataProvider][getClusters] for region: %s deployment: %s begin', region, deployment);
return Promise.timeout(1)
.then(() => {
console.log('[DataProvider][getClusters] for region: %s deployment: %s inside begin', region, deployment);
return [
{
name: "hello",
state: "deploy",
status: "inprogres",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'undeploy',
confirm: true
}]
},
{
name: "addr",
state: "deploy",
status: "completed",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'undeploy',
confirm: true
}]
},
{
name: "img",
state: "undeploy",
status: "error",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'deploy'
}]
},
{
name: "addrf",
state: "undeploy",
status: "completed",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'deploy'
}]
}
];
});
}
//gets cluster per region per deployment per cluster name
getCluster(params) {
//params.region
//params.deployment
//params.cluster
console.log("[DataProvider][getCluster] begin")
return Promise.timeout(1)
.then(() => {
console.log("[DataProvider][getCluster] inside begin")
if (params.cluster == 'hello') {
return {
name: params.cluster,
state: "deploy",
status: "processing",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'deploy'
}]
};
}
return {
name: params.cluster,
state: "undeploy",
status: "completed",
actions: [{
name: 'build'
}, {
name: 'push'
}, {
name: 'deploy'
}]
};
});
}
//getting definitions per cluster
getDefinitions(params) {
console.log('[DataProvider][getDefinitions] : %s begin', params);
return Promise.timeout(1)
.then(() => {
return [
{
"name": "addr",
"kind": "cluster",
"provides": {
"web": {
"public": true,
"service": "web"
}
}
},
{
"name": "proc",
"cluster": "addr",
"code": {
"kind": "docker"
},
"sector": "main",
"kind": "service",
"consumes": [
{
"actions": [
"subscribe"
],
"queue": "jobs"
},
{
"database": "book"
}
]
},
{
"cluster": "addr",
"code": {
"kind": "docker"
},
"kind": "service",
"provides": {
"default": {
"port": 4000,
"protocol": "http"
}
},
"name": "app",
"sector": "main",
"consumes": [
{
"database": "book"
},
{
"actions": [
"publish"
],
"queue": "jobs"
},
{
"cluster": "phone"
}
]
},
{
"cluster": "addr",
"code": {
"kind": "docker"
},
"kind": "service",
"provides": {
"default": {
"port": 3000,
"protocol": "http"
}
},
"name": "web",
"sector": "main",
"consumes": [
{
"service": "app"
}
]
},
{
"cluster": "addr",
"init": "init.sql",
"subClass": "sql",
"kind": "database",
"name": "book",
"class": "sql",
"sector": "main"
},
{
"name": "jobs",
"cluster": "addr",
"class": "queue",
"subClass": "pubsub",
"sector": "main",
"kind": "queue"
},
{
"name": "memory",
"config": {
"max": 150,
"min": 100
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr"
}
},
{
"name": "memory",
"config": {
"max": null,
"min": 150
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr",
"deployment": "gprod"
}
},
{
"name": "memory",
"config": {
"max": 300,
"min": 200
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr",
"service": "proc",
"deployment": "gprod"
}
},
{
"name": "scale",
"config": {
"max": 10,
"min": 1,
"metrics": {
"cpu": {
"targetAverage": 44
},
"current_connections": {
"targetAverage": 200
},
"memory": {
"targetAverage": "1G"
}
}
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr",
"service": "app",
"deployment": "gprod"
}
},
{
"name": "cpu",
"config": {
"max": 0.15,
"min": 0.1
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr"
}
},
{
"name": "cpu",
"config": {
"max": 0.2,
"min": 0.15
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr",
"deployment": "gprod"
}
},
{
"name": "cpu",
"config": {
"max": 0.25,
"min": 0.2
},
"sector": "main",
"kind": "policy",
"target": {
"cluster": "addr",
"service": "proc",
"deployment": "gprod"
}
}
];
});
}
//getting diagram per cluster
getDiagram(params) {
//params.region
//params.deployment
//params.cluster
console.log("[DataProvider][getDiagram] begin")
return Promise.timeout(1)
.then(() => {
console.log("[DataProvider][getDiagram] inside begin")
var imgPath = Path.join(__dirname, 'static', 'diagrams', 'hello.png');
return imgPath;
});
}
performAction(params) {
console.log("[DataProvider][performAction] begin")
return Promise.timeout(1)
.then(() => {
console.log("[DataProvider][performAction] inside begin")
var id = uuid();
this._actionTracker[id] = {
position: 1,
status: 'in-progress',
}
return {
id: id
};
});
}
getActionStatus(params) {
console.log('[DataProvider][getActionStatus] for cluster: %s begin', params.cluster);
var actionId = params.id;
return Promise.timeout(1)
.then(() => {
var info = this._actionTracker[actionId];
if (info) {
return {
status: info.status
}
} else {
return {
status: "Failed"
}
}
});
}
getScreenMessage(params, query) {
console.log('[DataProvider][getScreenMessage] for cluster: %s begin', params.cluster);
var actionId = params.id;
var info = this._actionTracker[actionId];
if (!info) {
return Promise.timeout(5)
.then(() => {
return [ { kind: "error", date: "", msg: "Action does not exist",}]
});
}
info.position++;
var screenMessages = [
{ kind: "info", date: "", msg: "Building docker image service://addr-main-proc..." },
{ kind: "info", date: "", msg: "$ docker build -t addr-main-proc /Users/diana/Documents/repos/samples-gcp.git/02.K8sAddr/proc" },
{ kind: "error", date: "", msg: "Sending build context to Docker daemon 125.4kB" },
{ kind: "info", date: "", msg: "Step 1/11 : FROM node:10-jessie" },
{ kind: "info", date: "", msg: "10-jessie: Pulling from library/node" },
{ kind: "info", date: "", msg: "bf295113f40d: Pulling fs layer" },
{ kind: "info", date: "", msg: "62fe5b9a5ae4: Pulling fs layer" },
{ kind: "info", date: "", msg: "6cc848917b0a: Pulling fs layer" },
];
screenMessages.forEach((msg, index) =>
{
msg.index = index;
});
if (info.position >= screenMessages.length)
{
info.position = screenMessages.length;
info.status = "Completed";
}
return Promise.timeout(5)
.then(() => {
var result = _.take(screenMessages, info.position);
if (query.next) {
result = result.filter(x => x.index > query.next);
}
return result;
});
}
}
module.exports = DataProvider;
|
cmake -GNinja -H./llvm-test-suite -B./llvm-test-suite-build \
-C./llvm-test-suite/cmake/caches/$1.cmake \
-DCMAKE_C_COMPILER=$PWD/llvm-project-build/bin/clang \
-DTEST_SUITE_USE_PERF=true \
-DTEST_SUITE_SUBDIRS=CTMark \
-DTEST_SUITE_RUN_BENCHMARKS=false \
-DTEST_SUITE_COLLECT_CODE_SIZE=false
|
## read damper
## Update the id for your point
curl -X "POST" "http://localhost/api/read" \
-H 'Accept: application/json' \
-H 'Content-Type: text/zinc' \
-d $'ver:"2.0"
filter,limit
"id==@52e40666-a939-49ce-b403-ea08247b379d",1000
'
|
import { ComponentRendering, Field, HtmlElementRendering, Item } from './dataModels';
/**
* Safely extracts a field value from a rendering or fields object.
* Null will be returned if the field is not defined.
*/
export function getFieldValue<T>(
renderingOrFields: ComponentRendering | { [name: string]: Field | Item[] },
fieldName: string): T | undefined;
export function getFieldValue<T>(
renderingOrFields: ComponentRendering | { [name: string]: Field | Item[] },
fieldName: string,
defaultValue: T): T;
export function getFieldValue<T>(
renderingOrFields: ComponentRendering | { [name: string]: Field | Item[] },
fieldName: string,
defaultValue?: T) {
if (!renderingOrFields || !fieldName) {
return defaultValue;
}
const fields = renderingOrFields as any;
const field = fields[fieldName] as Field<T>;
if (field && typeof field.value !== 'undefined') {
return field.value;
}
const rendering = renderingOrFields as ComponentRendering;
if (
!rendering.fields ||
!rendering.fields[fieldName] ||
typeof (rendering.fields[fieldName] as Field).value === 'undefined'
) {
return defaultValue;
}
return (rendering.fields[fieldName] as Field<T>).value;
}
/**
* Gets rendering definitions in a given child placeholder under a current rendering.
*/
export function getChildPlaceholder(rendering: ComponentRendering, placeholderName: string)
: Array<ComponentRendering | HtmlElementRendering> {
if (
!rendering ||
!placeholderName ||
!rendering.placeholders ||
!rendering.placeholders[placeholderName]
) {
return [];
}
return rendering.placeholders[placeholderName];
}
|
package org.multibit.hd.ui.views.components;
import org.multibit.hd.ui.views.components.display_qrcode.DisplayQRCodeModel;
import org.multibit.hd.ui.views.components.display_qrcode.DisplayQRCodeView;
import org.multibit.hd.ui.views.components.display_environment_alert.DisplayEnvironmentAlertModel;
import org.multibit.hd.ui.views.components.display_environment_alert.DisplayEnvironmentAlertView;
import org.multibit.hd.ui.views.components.enter_yes_no.EnterYesNoModel;
import org.multibit.hd.ui.views.components.enter_yes_no.EnterYesNoView;
import org.multibit.hd.ui.views.fonts.AwesomeIcon;
/**
* <p>Factory to provide the following to UI:</p>
* <ul>
* <li>Creation of complex components requiring a model and view suitable for use as popovers</li>
* </ul>
*
* @since 0.0.1
*
*/
public class Popovers {
/**
* <p>A "discard Yes/No" model and view displays a popover with the following features:</p>
* <ul>
* <li>Button to close the light box popover</li>
* <li>Label field indicating that a Discard operation will occur if Yes is clicked</li>
* <li>No will return to safety</li>
* </ul>
*
* @param panelName The underlying panel name for this popover
*
* @return A new "discard yes/no" model and view
*/
public static ModelAndView<EnterYesNoModel, EnterYesNoView> newDiscardYesNoPopoverMaV(String panelName) {
EnterYesNoModel model = new EnterYesNoModel(panelName);
EnterYesNoView view = new EnterYesNoView(model, AwesomeIcon.TRASH, true);
return new ModelAndView<>(model, view);
}
/**
* <p>A "display QR" model and view displays a QR code with the following features:</p>
* <ul>
* <li>Image field showing a QR code</li>
* <li>Button to copy the QR code image to the Clipboard</li>
* <li>Button to close the light box popover</li>
* <li></li>
* </ul>
*
* @param panelName The underlying panel name for this popover
*
* @return A new "display QR code" model and view
*/
public static ModelAndView<DisplayQRCodeModel, DisplayQRCodeView> newDisplayQRCodePopoverMaV(String panelName) {
DisplayQRCodeModel model = new DisplayQRCodeModel(panelName);
DisplayQRCodeView view = new DisplayQRCodeView(model);
return new ModelAndView<>(model, view);
}
/**
* <p>A "display environment alert" model and view displays an alert with the following features:</p>
* <ul>
* <li>Appropriately themed message panel</li>
* <li>Button to close the light box popover</li>
* </ul>
*
* @param panelName The underlying panel name for this popover
*
* @return A new "display environment alert" model and view
*/
public static ModelAndView<DisplayEnvironmentAlertModel, DisplayEnvironmentAlertView> newDisplayEnvironmentPopoverMaV(String panelName) {
DisplayEnvironmentAlertModel model = new DisplayEnvironmentAlertModel(panelName);
DisplayEnvironmentAlertView view = new DisplayEnvironmentAlertView(model);
return new ModelAndView<>(model, view);
}
}
|
const importMap = require("./importMap");
const MemberExpressionKeys = Object.keys(importMap.MemberExpression);
const NewExpressionKeys = Object.keys(importMap.NewExpression);
module.exports = function main({ types: t }) {
const addedFills = [];
let addPolyfills = true;
function addImport(path, polyfillPath) {
if (addedFills.includes(polyfillPath)) return;
addedFills.push(polyfillPath);
const importDec = t.importDeclaration([], t.stringLiteral(polyfillPath));
importDec._blockHoist = 3; // eslint-disable-line
const programPath = path.find(p => p.isProgram());
programPath.unshiftContainer("body", importDec);
}
return {
name: "for-better-or-worse",
visitor: {
Program(path, state) {
if (state.file.opts.sourceType === "script") return;
const lineComments = state.file.ast.comments.filter(
c => c.type === "CommentLine" && !!c.value.match(/@no-fbow/gi),
);
addPolyfills = !lineComments.length;
},
MemberExpression(path, state) {
if (state.file.opts.sourceType === "script" || !addPolyfills) return;
if (t.isIdentifier(path.node.property)) {
if (MemberExpressionKeys.includes(path.node.property.name)) {
addImport(
path,
importMap.MemberExpression[path.node.property.name],
);
}
}
},
NewExpression(path, state) {
if (state.file.opts.sourceType === "script" || !addPolyfills) return;
if (t.isIdentifier(path.node.callee)) {
if (NewExpressionKeys.includes(path.node.callee.name)) {
addImport(path, importMap.NewExpression.CustomEvent);
}
}
},
},
};
};
|
<reponame>openlibraryenvironment/ui-directory
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage } from 'react-intl';
import { Accordion } from '@folio/stripes/components';
import renderCustProps from '../components/custPropRenderer';
const CustomProperties = (props) => {
const { record } = props;
const custprops = Object.values(record?.customProperties ? record?.customProperties : {})
.map(cp => (cp?.[0] ? cp?.[0] : {}))
.filter(cp => cp.type?.defaultInternal === false);
return (
<Accordion
id={props.id}
label={<FormattedMessage id="ui-directory.information.heading.customProps" />}
open={props.open}
onToggle={props.onToggle}
>
{renderCustProps(custprops)}
</Accordion>
);
};
CustomProperties.propTypes = {
record: PropTypes.object,
id: PropTypes.string,
onToggle: PropTypes.func,
open: PropTypes.bool,
};
export default CustomProperties;
|
function generateProductCarouselTitle($settingsArray) {
$icon = isset($settingsArray['value']) ? $settingsArray['value'] : ''; // Get the selected icon from the settings array
$title = 'Product Carousel Title'; // Replace with the actual product carousel title
// Generate the HTML markup for the product carousel title with the selected icon
$html = '<div class="product-carousel-title">';
if (!empty($icon)) {
$html .= '<span class="selected-icon">' . $icon . '</span>';
}
$html .= '<h2>' . $title . '</h2>';
$html .= '</div>';
return $html;
}
// Example usage
$settingsArray = array(
"name" => __("Heading Icon",'rt_theme_admin'),
"desc" => __('Select and set an icon to precede the product carousel title.','rt_theme_admin'),
"id" => $theTemplateID.'_'.$theGroupID."_wcproduct_carousel[values][heading_icon]",
"class" => "icon_selection",
"value" => $heading_icon, // Replace with the actual selected icon
"type" => "text"
);
$productCarouselTitleHTML = generateProductCarouselTitle($settingsArray);
echo $productCarouselTitleHTML;
|
#!/bin/bash
# follow these steps : https://linuxize.com/post/how-to-install-netbeans-on-ubuntu-18-04/
sudo apt update
sudo apt install openjdk-8-jdk
java -version
sudo snap install netbeans --classic
netbeans
|
# This file was auto-generated from 'constants.yaml'. Changes may be overridden.
GO_VERSION='1.15.8'
|
<reponame>shimpeiws/remote-patient-monitoring-api
import Auth from '@aws-amplify/auth';
import {config} from './config';
import APIGateway from 'aws-sdk/clients/apigateway';
import {ICredentials} from '@aws-amplify/core';
import {Spec, SwaggerUIBundle} from 'swagger-ui-dist'
export const initSwagger = async (): Promise<void> => {
const credentials = await Auth.currentCredentials();
const apiGateway = createAPIGatewayClient(credentials);
const spec = await getAPIGatewaySpec(apiGateway);
renderUI(spec);
};
const createAPIGatewayClient = (credentials: ICredentials): APIGateway => new APIGateway({
region: config.region,
accessKeyId: credentials.accessKeyId,
secretAccessKey: credentials.secretAccessKey,
sessionToken: <PASSWORD>.sessionToken,
});
const getAPIGatewaySpec = async (apiGateway: APIGateway): Promise<Spec> => {
const data = await apiGateway.getExport({
restApiId: config.apiGateway.restApiId,
stageName: config.apiGateway.stageName,
exportType: 'oas30',
accepts: 'application/json',
}).promise();
if (!data.body) {
throw new Error('No documentation body received');
}
const spec = JSON.parse(data.body.toString()) as Spec;
/** Remove leading "/" in base path that leads to incorrect URL with double "//". */
spec.servers.forEach((server: { variables: { basePath: { default: string } } }) => {
const basePath = server.variables.basePath.default;
if (basePath.startsWith('/')) {
server.variables.basePath.default = basePath.substr(1);
}
});
return spec;
};
const renderUI = (spec?: Spec): void => {
SwaggerUIBundle({
spec: spec,
'dom_id': '#swagger',
deepLinking: true,
});
};
|
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var codex;
(function (codex) {
var phpdoc;
(function (phpdoc) {
var defined = codex.util.defined;
var create = codex.util.create;
var PhpdocApi = (function (_super) {
__extends(PhpdocApi, _super);
function PhpdocApi(project, ref) {
if (project === void 0) { project = ''; }
if (ref === void 0) { ref = 'master'; }
_super.call(this, codex.config('apiUrl'));
this.ref = null;
this.entities = {};
this.sources = {};
this.docs = {};
this.popovers = {};
this.project = project;
this.ref = ref;
}
PhpdocApi.prototype.setProject = function (project) {
this.project = project;
this.entities = {};
};
PhpdocApi.prototype.setRef = function (ref) {
this.ref = ref;
this.entities = {};
};
PhpdocApi.prototype.entitiy = function (name) {
var defer = create();
if (codex.config('debug') === false && defined(this.entities[name])) {
defer.resolve(this.entities[name]);
}
else {
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'entity'].join('/'), { entity: name }).then(function (res) { return defer.resolve(phpdoc.Entity.make(res.data)); });
}
return defer.promise;
};
PhpdocApi.prototype.method = function (name) {
var defer = create();
if (codex.config('debug') === false && defined(this.entities[name])) {
defer.resolve(this.entities[name]);
}
else {
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'entity'].join('/'), { entity: name }).then(function (res) { return defer.resolve(phpdoc.Entity.make(res.data)); });
}
return defer.promise;
};
PhpdocApi.prototype.list = function (full) {
if (full === void 0) { full = false; }
var defer = create();
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'list'].join('/'), { full: full }).then(function (data) { return defer.resolve(data.data); });
return defer.promise;
};
PhpdocApi.prototype.tree = function (full) {
if (full === void 0) { full = false; }
var defer = create();
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'tree'].join('/'), { full: full }).then(function (data) { return defer.resolve(data); });
return defer.promise;
};
PhpdocApi.prototype.source = function (name) {
var _this = this;
var defer = create();
if (codex.config('debug') === false && defined(this.sources[name])) {
defer.resolve(this.sources[name]);
}
else {
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'source'].join('/'), { entity: name }).then(function (res) {
_this.sources[name] = res.data.source;
defer.resolve(res.data.source);
});
}
return defer.promise;
};
PhpdocApi.prototype.doc = function (name) {
var _this = this;
var defer = create();
if (codex.config('debug') === false && defined(this.docs[name])) {
defer.resolve(this.docs[name]);
}
else {
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'doc'].join('/'), { entity: name }).then(function (res) {
_this.docs[name] = res.data.doc;
defer.resolve(res.data.doc);
});
}
return defer.promise;
};
PhpdocApi.prototype.popover = function (name) {
var _this = this;
var defer = create();
if (codex.config('debug') === false && defined(this.popovers[name])) {
defer.resolve(this.popovers[name]);
}
else {
_super.prototype.get.call(this, ['phpdoc', this.project, this.ref, 'popover'].join('/'), { name: name }).then(function (res) {
_this.popovers[name] = res.data;
defer.resolve(res.data);
});
}
return defer.promise;
};
return PhpdocApi;
}(codex.Api));
phpdoc.PhpdocApi = PhpdocApi;
})(phpdoc = codex.phpdoc || (codex.phpdoc = {}));
})(codex || (codex = {}));
var codex;
(function (codex) {
var phpdoc;
(function (phpdoc) {
var Entity = (function () {
function Entity(e) {
this.e = e;
this.typeIcon = this.getTypeIcon();
this.link = this.getLink();
}
Entity.prototype.getLink = function () {
return window.location.pathname + "#!/" + this.e.full_name;
};
Entity.prototype.getTypeIcon = function () {
switch (this.e.type) {
case 'class':
return 'fa fa-file-code-o color-green-500';
case 'interface':
return 'fa fa-code color-purple-800';
case 'trait':
return 'fa fa-terminal color-blue-500';
}
return '';
};
Entity.make = function (e) {
return new Entity(e);
};
Entity.prototype.getClassLink = function () {
return '';
};
return Entity;
}());
phpdoc.Entity = Entity;
})(phpdoc = codex.phpdoc || (codex.phpdoc = {}));
})(codex || (codex = {}));
var codex;
(function (codex) {
var phpdoc;
(function (phpdoc) {
var PhpdocHelper = (function () {
function PhpdocHelper() {
this._list = [];
this._tree = {};
}
PhpdocHelper.prototype.init = function (project, ref) {
var _this = this;
if (project === void 0) { project = ''; }
if (ref === void 0) { ref = 'master'; }
this._api = new phpdoc.PhpdocApi(project, ref);
this.defer = codex.util.create();
async.parallel([
function (cb) { return _this.api.list().then(function (res) {
_this._list = res;
cb();
}); },
function (cb) { return _this.api.tree().then(function (res) {
_this._tree = res.data;
cb();
}); }
], function () {
_this.defer.resolve();
});
return this;
};
PhpdocHelper.prototype.ready = function (cb) {
return this.defer.promise.then(cb);
};
Object.defineProperty(PhpdocHelper.prototype, "project", {
get: function () {
return this._api.project;
},
enumerable: true,
configurable: true
});
Object.defineProperty(PhpdocHelper.prototype, "ref", {
get: function () {
return this._api.ref;
},
enumerable: true,
configurable: true
});
Object.defineProperty(PhpdocHelper.prototype, "list", {
get: function () {
return this._list;
},
enumerable: true,
configurable: true
});
Object.defineProperty(PhpdocHelper.prototype, "tree", {
get: function () {
return this._tree;
},
enumerable: true,
configurable: true
});
Object.defineProperty(PhpdocHelper.prototype, "api", {
get: function () {
return this._api;
},
enumerable: true,
configurable: true
});
PhpdocHelper.prototype.initLinks = function () {
var attr = {
trigger: 'hover',
html: true,
viewport: 'body',
container: 'body',
placement: 'top'
};
var $link = $('.phpdoc-link');
$link.tooltip(_.merge(attr, {
template: "<div class=\"tooltip tooltip-phpdoc\" role=\"tooltip\"><div class=\"tooltip-arrow\"></div><div class=\"tooltip-inner\"></div></div>"
}));
var $popoverLink = $('.phpdoc-popover-link');
$popoverLink.popover(_.merge(attr, {
template: "<div class=\"popover popover-phpdoc\" role=\"tooltip\"><div class=\"arrow\"></div><h3 class=\"popover-title\"></h3><div class=\"popover-content\"></div></div>"
}));
};
PhpdocHelper.prototype.classLink = function (fullName) {
return window.location.pathname + "#!/" + fullName;
};
PhpdocHelper.prototype.makeTypeLink = function (types) {
var _this = this;
var els = [];
types.toString().split('|').forEach(function (type) {
var isAdvancedtype = type.indexOf('\\') !== -1;
if (!isAdvancedtype) {
els.push($('<span>')
.text(type)
.addClass('simple-type simple-type-' + type.toLowerCase())
.get(0)
.outerHTML);
}
else {
var found = _.find(_this._list, { full_name: type });
var $a = $('<a>')
.text(type.split('\\').reverse()[0])
.addClass('type-link')
.attr('title', type);
if (codex.util.defined(found)) {
$a.addClass('local');
$a.attr('href', _this.classLink(type));
}
els.push($a.get(0).outerHTML);
}
});
return els.join(' | ');
};
PhpdocHelper.prototype.methodCallsign = function (method) {
var txt = method.visibility;
if (method.abstract) {
txt = 'abstract ' + txt;
}
if (method.final) {
txt += ' final';
}
if (method.static) {
txt += ' static';
}
return txt;
};
PhpdocHelper.prototype.removeValues = function (arr) {
var what, a = arguments, L = a.length, ax;
while (L > 1 && arr.length) {
what = a[--L];
while ((ax = arr.indexOf(what)) !== -1) {
arr.splice(ax, 1);
}
}
return arr;
};
return PhpdocHelper;
}());
phpdoc.PhpdocHelper = PhpdocHelper;
jQuery.extend({
phpdoc: new PhpdocHelper
});
})(phpdoc = codex.phpdoc || (codex.phpdoc = {}));
})(codex || (codex = {}));
var codex;
(function (codex) {
var phpdoc;
(function (phpdoc) {
var PhpdocWidget = (function (_super) {
__extends(PhpdocWidget, _super);
function PhpdocWidget() {
_super.call(this);
this.widgetEventPrefix = 'phpdoc';
this.options = {
project: '',
ref: 'master',
defaultClass: null,
styleClasses: {
container: 'phpdoc',
tree: 'phpdoc-tree',
content: 'phpdoc-content',
},
jstree: {
'plugins': ['types', 'search', 'wholerow'],
'core': {
'themes': {
'responsive': false,
'name': 'codex'
}
},
'types': {
'default': { 'icon': 'fa fa-file' },
'folder': { 'icon': 'fa fa-folder color-blue-grey-500' },
'class': { icon: 'fa fa-file-code-o color-green-500' },
'interface': { icon: 'fa fa-code color-purple-800' },
'trait': { icon: 'fa fa-terminal color-blue-500' }
}
}
};
this.history = [];
this.ignoreTreeSelect = false;
}
PhpdocWidget.prototype.$ = function (sel) {
return this.element.find(sel);
};
PhpdocWidget.prototype._create = function () {
var _this = this;
if (codex.config('debug')) {
window['widget'] = this;
}
this.$el = this.element;
this.data = { list: [], tree: {}, entities: [] };
this.$el.html('');
this.$el.ensureClass(this.options.styleClasses.container);
this.$tree = $('<div>').addClass(this.options.styleClasses.tree).appendTo(this.$el);
this.$treeRoot = $('<ul>').appendTo(this.$tree);
this.$content = $('<div>').addClass(this.options.styleClasses.content).appendTo(this.$el);
codex.startLoader(this.$content);
$.phpdoc.init(this.options.project, this.options.ref);
this.api = $.phpdoc.api;
$.phpdoc.ready(function () {
_this.data.list = $.phpdoc.list;
_this.data.tree = $.phpdoc.tree;
codex.stopLoader(_this.$content);
_this._createTree();
var fullName;
if (location.hash.indexOf('#!/') !== -1) {
fullName = location.hash.replace(/\#\!\//, '');
}
else if (_this.options.defaultClass !== null) {
fullName = _this.options.defaultClass;
}
else {
fullName = _this.data.list[0].full_name;
}
window.history.replaceState(null, fullName, window.location.pathname + "#!/" + fullName);
window.addEventListener("popstate", function (event) {
console.log('popstate', window.location);
if (location.hash.indexOf('#!/') !== -1) {
_this.open(location.hash.replace(/\#\!\//, ''));
}
}, false);
_this.open(fullName);
_this._bindTreeListener();
});
};
PhpdocWidget.prototype._createTree = function () {
this._traverseTree(this.data.tree, this.$treeRoot, 0);
this.$tree.jstree(this.options.jstree);
this.tree = this.$tree.jstree();
};
PhpdocWidget.prototype._bindTreeListener = function () {
var _this = this;
this.$tree.on('select_node.jstree', this, function (event, data) {
if (_this.ignoreTreeSelect)
return;
codex.debug.log('select_node.jstree', data);
codex.debug.log('Selected type', data.node.type);
if (data.node.type === 'folder') {
_this.tree.open_node(data.node);
}
else {
var fullName = data.node.data.fullName;
_this.open(fullName);
window.history.pushState(null, fullName, window.location.pathname + "#!/" + fullName);
}
});
};
PhpdocWidget.prototype.scrollToBegin = function () {
$('html, body').animate({ scrollTop: this.$content.offset().top }, 800);
};
PhpdocWidget.prototype.open = function (name) {
var _this = this;
console.log('phpdoc open', name);
codex.startLoader(this.$content);
this.$('.type-link').tooltip('hide');
codex.debug.profile('doc-request');
this.api.doc(name).then(function (doc) {
codex.debug.profileEnd();
codex.stopLoader(_this.$content);
codex.debug.profile('doc-html');
_this.$content.html(doc);
codex.debug.profileEnd();
async.parallel([
function (cb) {
codex.debug.profile('tooltips');
_this.$('.type-link, .visibility-icon').tooltip({ viewport: 'body', container: 'body' });
cb();
codex.debug.profileEnd();
},
function (cb) {
codex.debug.profile('highlight');
Prism.highlightAll();
cb();
codex.debug.profileEnd();
},
function (cb) {
codex.debug.profile('tree');
_this.openTreeTo(name);
cb();
codex.debug.profileEnd();
},
function (cb) {
codex.debug.profile('scroll');
_this.scrollToBegin();
cb();
codex.debug.profileEnd();
}
], function () {
console.log('cb done', arguments);
});
}).otherwise(function (e) {
console.error(e);
});
};
PhpdocWidget.prototype._traverseTree = function (items, $tree, level) {
for (var k in items) {
var item = items[k];
if (isNaN(parseInt(k))) {
var $nel = $('<ul>');
var $nli = $('<li>').text(k).append($nel);
$nli.addClass('fs-12');
if (level == 0) {
$nli.attr('data-jstree', '{ "opened" : true, "type" : "folder" }');
}
else {
$nli.attr('data-jstree', '{ "type" : "folder" }');
}
var namePath = "\\" + k;
if (typeof $tree.closest('li').attr('data-full-name') !== "undefined") {
namePath = $tree.closest('li').attr('data-full-name') + namePath;
}
$nli.attr('data-full-name', namePath);
$tree.prepend($nli);
this._traverseTree(item, $nel, level++);
}
else {
$tree.append($('<li>')
.text(item['name'])
.attr('data-jstree', '{ "type": "' + item['type'] + '" }')
.attr('data-full-name', item['full_name']));
}
}
};
PhpdocWidget.prototype.searchTree = function (fullName) {
var items = this.tree.get_json(null, { flat: true });
codex.debug.log('search for', fullName, 'in', items);
var found = false;
items.forEach(function (item) {
if (typeof item.data.fullName !== "undefined" && _.endsWith(item.data.fullName, fullName)) {
codex.debug.log('search for', fullName, 'found', item);
found = item;
return false;
}
});
return found;
};
PhpdocWidget.prototype.openTreeTo = function (fullName) {
var node = this.searchTree(fullName);
if (node !== false) {
this.ignoreTreeSelect = true;
this.tree.close_all();
this.tree._open_to(node);
this.tree.deselect_all();
this.tree.select_node(node);
this.ignoreTreeSelect = false;
}
};
PhpdocWidget.prototype._destroy = function () {
codex.debug.log('destroy');
};
PhpdocWidget = __decorate([
codex.util.widget('phpdoc')
], PhpdocWidget);
return PhpdocWidget;
}(codex.util.Widget));
phpdoc.PhpdocWidget = PhpdocWidget;
})(phpdoc = codex.phpdoc || (codex.phpdoc = {}));
})(codex || (codex = {}));
var codex;
(function (codex) {
var phpdoc;
(function (phpdoc) {
phpdoc.helper = new phpdoc.PhpdocHelper;
function init(selector, options) {
if (options === void 0) { options = {}; }
$(function () { return $(selector).phpdoc(options); });
}
phpdoc.init = init;
})(phpdoc = codex.phpdoc || (codex.phpdoc = {}));
})(codex || (codex = {}));
//# sourceMappingURL=phpdoc.js.map
|
<filename>docs/52.bundle.js
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[52],{
/***/ "../../src/components/icon/assets/bolt.js":
/*!***************************************************************************!*\
!*** /Users/chanderprall/projects/eui/src/components/icon/assets/bolt.js ***!
\***************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.icon = void 0;
__webpack_require__(/*! core-js/modules/es6.object.assign */ "../../node_modules/core-js/modules/es6.object.assign.js");
var _react = _interopRequireDefault(__webpack_require__(/*! react */ "../../node_modules/react/index.js"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
var EuiIconBolt = function EuiIconBolt(props) {
return _react.default.createElement("svg", _extends({
width: 16,
height: 16,
viewBox: "0 0 16 16",
xmlns: "http://www.w3.org/2000/svg"
}, props), _react.default.createElement("path", {
d: "M7.04 13.274a.5.5 0 1 0 .892.453l3.014-5.931a.5.5 0 0 0-.445-.727H5.316L8.03 1.727a.5.5 0 1 0-.892-.453L4.055 7.343a.5.5 0 0 0 .446.726h5.185L7.04 13.274z"
}));
};
var icon = EuiIconBolt;
exports.icon = icon;
EuiIconBolt.__docgenInfo = {
"description": "",
"methods": [],
"displayName": "EuiIconBolt"
};
/***/ })
}]);
//# sourceMappingURL=52.bundle.js.map
|
#!/bin/bash
ORIGIN=024000
top=../../../
tu=${top}/src/tapeUtils/
as8=${top}/src/as8+/
# Preprocess
${as8}/as8pp < bound_bootload_0.as8 > bound_bootload_0.s.tmp
# Assemble
${as8}/as8+ bound_bootload_0.s.tmp -o bound_bootload_0.oct
# Pack
${tu}/pack bound_bootload_0.oct bound_bootload_0.pck.tmp
# Compare original tape blk to pack
cmp bound_bootload_0.dat bound_bootload_0.pck.tmp
[ "$?" != "0" ] && ( \
${tu}/unpack $ORIGIN bound_bootload_0.dat bound_bootload_0.oct.tmp
${tu}/unpack $ORIGIN bound_bootload_0.pck.tmp bound_bootload_0.pck.oct.tmp
xxdiff bound_bootload_0.oct.tmp bound_bootload_0.pck.oct.tmp \
)
|
from game.client.view.pad.pad import Pad
class LegendPad(Pad):
TEXT_COLOR = '#eaeaea'
HIGHLIGHTED_TEXT_COLOR = '#e6e600'
BACKGROUND_COLOR = '#26004d'
# BACKGROUND_COLOR = '#000000'
INVENTORY_TEXT = ' Inventory '
SKIP_TEXT = ' Skip turn '
NEXT_TEXT = ' Next (↓) '
PREV_TEXT = ' Prev (↑) '
USE_TEXT = ' Use '
DROP_TEXT = ' Drop '
QUIT_TEXT = ' Quit '
ORDER = [INVENTORY_TEXT, SKIP_TEXT, NEXT_TEXT, PREV_TEXT, USE_TEXT, DROP_TEXT]
@staticmethod
def _get_shift(text):
shift = 0
for btn_text in LegendPad.ORDER:
if btn_text == text:
break
shift += len(btn_text) + 4
return shift
def _refresh_background(self):
void_color = self.view.entities_desc['map']['void']['background_color']
for x in range(self.x0, self.x1):
for y in range(self.y0, self.y1):
self.view._put_colored_symbol(x=x, y=y, c=' ', color=void_color, bkcolor=void_color)
def _refresh_inventory(self):
x, y = self.x0 + self._get_shift(self.INVENTORY_TEXT), self.y1 - 1
xi = self.INVENTORY_TEXT.index('I')
self.view._put_colored_text(x, y, self.INVENTORY_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xi, y, 'I', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_skip(self):
x, y = self.x0 + self._get_shift(self.SKIP_TEXT), self.y1 - 1
xs = self.SKIP_TEXT.index('S')
self.view._put_colored_text(x, y, self.SKIP_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xs, y, 'S', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_next(self):
if not self.view.model.inventory.is_opened():
return
x, y = self.x0 + self._get_shift(self.NEXT_TEXT), self.y1 - 1
xd = self.NEXT_TEXT.index('↓')
self.view._put_colored_text(x, y, self.NEXT_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xd, y, '↓', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_prev(self):
if not self.view.model.inventory.is_opened():
return
x, y = self.x0 + self._get_shift(self.PREV_TEXT), self.y1 - 1
xu = self.PREV_TEXT.index('↑')
self.view._put_colored_text(x, y, self.PREV_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xu, y, '↑', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_use(self):
if not self.view.model.inventory.is_opened():
return
if self.view.model.inventory.get_selected_item() is None:
return
x, y = self.x0 + self._get_shift(self.USE_TEXT), self.y1 - 1
xu = self.USE_TEXT.index('U')
self.view._put_colored_text(x, y, self.USE_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xu, y, 'U', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_drop(self):
if not self.view.model.inventory.is_opened():
return
if self.view.model.inventory.get_selected_item() is None:
return
x, y = self.x0 + self._get_shift(self.DROP_TEXT), self.y1 - 1
xd = self.DROP_TEXT.index('D')
self.view._put_colored_text(x, y, self.DROP_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xd, y, 'D', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def _refresh_quit(self):
if self.view.model.inventory.is_opened():
return
x, y = self.x1 - len(self.QUIT_TEXT), self.y1 - 1
xq = self.QUIT_TEXT.index('Q')
self.view._put_colored_text(x, y, self.QUIT_TEXT, self.TEXT_COLOR, self.BACKGROUND_COLOR)
self.view._put_colored_symbol(x + xq, y, 'Q', self.HIGHLIGHTED_TEXT_COLOR, self.BACKGROUND_COLOR)
def refresh(self):
self._refresh_background()
if self.view.model.hero.stats.health > 0:
self._refresh_inventory()
self._refresh_skip()
self._refresh_next()
self._refresh_prev()
self._refresh_use()
self._refresh_drop()
self._refresh_quit()
|
<filename>src/web/assets/js/myUnderscore.js<gh_stars>1-10
/*
自己封装的工具库
基于可插拔的架构去写
*/
(function () {
var root =
(typeof self == "object" && self.self === self && self) ||
(typeof global == "object" && global.global === global && global) ||
this ||
{};
// 这里返回的其实是一个new好的underscore实例
var _ = function (obj) {
// 如果传入的不是 underscore 实例,则会 new 一个实例
// new 出来的 Object.create(obj)
if (!(this instanceof _)) return new _(obj);
// 记录的是 _(data) 传入的 data 参数 [11, 22, 33]
this._wrapped = obj;
};
// 判断传入的参数,是否是一个function
_.isFunction = function (obj) {
return typeof obj === "function" || false;
};
// 实现each方法
_.each = function (array, fn) {
for (let i = 0; i < array.length; i++) {
fn(array[i], i);
}
return array;
};
/*
实现一个节流函数,需要解决三个问题
1.每个一段时间,执行一次
2.第一次触发会立即执行
3.如果在间隔时间内触发,会在间隔末尾再执行一次
*/
_.throttle = function(callback,wait){
let isFirst = true;
let execDate = +new Date();
let throttleId = null;
return function(){
if(isFirst){
callback()
execDate = +new Date();
isFirst = false;
}else{
let currentDate = +new Date();
if(currentDate-execDate>=wait){
callback();
execDate = +new Date();
}else{
if(throttleId){
clearTimeout(throttleId)
}
const timeWait = execDate+wait - +new Date();
setTimeout(()=>{
callback()
execDate = +new Date();
},timeWait)
}
}
}
}
// 暴露出去
root._ = _;
})();
|
<filename>src/_global.js<gh_stars>0
let namespace
if (typeof(global) !== 'undefined') {
namespace = global
}
else if (typeof(window) !== 'undefined') {
namespace = window
}
module.exports = namespace
|
<reponame>huanglii/Awesome-GIS-RS-Data<gh_stars>1-10
import React from 'react'
import ReactDOM from 'react-dom'
import { ConfigProvider } from 'antd'
import zhCN from 'antd/es/locale/zh_CN'
import App from './app'
import './index.less'
ReactDOM.render(
<ConfigProvider locale={zhCN}>
<App />
</ConfigProvider>,
document.getElementById('root')
)
|
#!/bin/bash
echo --------------------------------
echo ------------Limpiando imagenes
echo --------------------------------
docker rmi $(docker images | grep mongo) --force
echo --------------------------------
echo ------------Creado docker
echo --------------------------------
docker-compose -f ms-db-docker-compose.yml build
echo --------------------------------
echo ------------Push a Container Registry
echo --------------------------------
docker tag mongo:4.2 gcr.io/$PROJECT_ID/mongo:4.2
gcloud docker -- push gcr.io/$PROJECT_ID/mongo:4.2
echo --------------------------------
echo ------------Desplegando Microservicio
echo --------------------------------
kubectl delete deployment ms-db -n ingestion-tool
sleep 10
kubectl apply -f k8s/ -n ingestion-tool
|
<gh_stars>1-10
from sys import argv
from flask import Flask, render_template, request, Response, send_from_directory
import settings
app = Flask( __name__ )
# db_session
@app.teardown_appcontext
def close_db( error ):
from database import db_session
db_session.close()
@app.route( '/static/<path:path>/<filename>' )
def send_static( path, filename ):
# probably not very secure but it'll do for now
return send_from_directory( settings.static_path, path + "/" + filename )
# Not required here - moved to Apache config
# @app.after_request
# def after_request( response ):
# response.headers.add( 'Access-Control-Allow-Origin', '*' )
# return response
# The index should show the genome browser and also a search box.
# Maybe also some introductory text.
@app.route( "/" )
def index():
from controllers import GenomeBrowser
return render_template( "index.html",
settings=settings,
genome_browser=GenomeBrowser(),
page="home" )
@app.route( "/search" )
def search():
from controllers import GenomeBrowser
return render_template( "index.html",
settings=settings,
genome_browser=GenomeBrowser(),
page="search" )
@app.route( "/help" )
def help():
from controllers import GenomeBrowser
return render_template( "index.html",
settings=settings,
genome_browser=GenomeBrowser(),
page="help" )
# Transcript - initialise the genome browser with custom parameters to center on the gene of interest.
# Also show the transcript's details
@app.route( "/transcript/<transcript_id>" )
def view_transcript( transcript_id ):
from controllers import GenomeBrowser, TranscriptView
return render_template( "index.html",
settings=settings,
genome_browser=GenomeBrowser(),
transcript_view=TranscriptView( transcript_id ),
page="transcript" )
@app.route( "/ajax/genome-browser/genes" )
def get_genes_ajax():
from controllers import GenomeBrowser
return GenomeBrowser().get_genes( request )
@app.route( "/ajax/help" )
def get_help_ajax():
return render_template( "help-view.html",
settings=settings )
@app.route( "/ajax/genome-browser/transcripts" )
def get_transcripts_ajax():
from controllers import GenomeBrowser
return GenomeBrowser().get_transcripts( request )
@app.route( "/ajax/search-transcript/<search_string>" )
def search_transcripts_ajax( search_string ):
from controllers import TranscriptSearcher
return TranscriptSearcher().search( search_string )
@app.route( "/ajax/search-coverage/<page_num>" )
def search_coverage_ajax( page_num ):
from controllers import CoverageSearcher
return render_template(
"coverage-search.html",
transcript_data=CoverageSearcher().fetch_transcript_data( page_num )
)
@app.route( "/ajax/get-coverage-page-count" )
def get_coverage_page_count():
from controllers import CoverageSearcher
return str( CoverageSearcher().fetch_page_count() )
@app.route( "/ajax/transcript/<transcript_id>" )
def view_transcript_ajax( transcript_id ):
from controllers import TranscriptView
return render_template( "transcript-view.html",
transcript_view=TranscriptView( transcript_id ) )
@app.route( "/ajax/structure-diagram/<structure_id>" )
def structure_diagram_ajax( structure_id ):
from controllers import StructureDiagramView
return StructureDiagramView( structure_id ).data_json
@app.route( "/ajax/structure-circle-plot/<structure_id>" )
def structure_circle_plot_ajax( structure_id ):
from controllers import StructureCirclePlotView
return StructureCirclePlotView( structure_id ).data_json
# strain ID .. should really be experiment ID
# and strain ID should only be associated with experiment ID.
# can then just use experiment IDs for everything.
@app.route( "/download/structure/<transcript_id>" )
def download_structure( transcript_id ):
from controllers import StructureDownloader
buf = StructureDownloader( structure_prediction_run_ids=settings.v1_ids.structure_prediction_run_ids,
transcript_id=transcript_id ).generate()
return Response( buf, mimetype='text/plain' )
@app.route( "/download/measurements/<experiment_id>/<transcript_id>" )
def download_measurements( experiment_id, transcript_id ):
from controllers import NucleotideMeasurementDownloader
buf = NucleotideMeasurementDownloader( experiment_id, transcript_id ).get_normalised()
return Response( buf, mimetype='text/plain' )
@app.route( "/download/raw_measurements/<experiment_id>/<transcript_id>" )
def download_raw_measurements( experiment_id, transcript_id ):
from controllers import NucleotideMeasurementDownloader
buf = NucleotideMeasurementDownloader( experiment_id, transcript_id ).get_raw()
return Response( buf, mimetype='text/plain' )
@app.route( "/download/all" )
def download_all():
return "All data here"
if __name__ == "__main__":
# if we're in here, we're using `python3 app.py [blah...]`
if len( argv ) > 1:
cmd = argv[ 1 ]
# custom commands
if argv[ 1 ] == "grabstructures":
import utils
utils.grab_structures()
elif argv[ 1 ] == "hydratedb":
# reset the database
import importers
importers.import_db( 2 )
elif argv[ 1 ] == "exportfasta":
# export sequences into a big fasta file
from utils import FastaExporter
FastaExporter().export()
elif argv[ 1 ] == "splitfasta":
# split fasta sequences into individual ones
from utils import FastaSplitter
FastaSplitter().split()
elif argv[ 1 ] == "export_structure_tids":
from importers import StructureTidsExporter
StructureTidsExporter().export()
else: # some other command
from models import manager
manager.run()
else:
# dev server: get the party started
app.run( host='0.0.0.0', debug=True )
|
package lemongrenade.core.templates;
import com.rabbitmq.client.ConnectionFactory;
import io.latent.storm.rabbitmq.RabbitMQBolt;
import io.latent.storm.rabbitmq.config.*;
import lemongrenade.core.coordinator.AdapterManager;
import lemongrenade.core.coordinator.JobManager;
import lemongrenade.core.models.LGJob;
import lemongrenade.core.models.LGJobError;
import lemongrenade.core.models.LGPayload;
import lemongrenade.core.storm.CoordinatorSinkScheme;
import lemongrenade.core.storm.LGRabbitMQSpout;
import lemongrenade.core.util.ExceptionWriter;
import lemongrenade.core.util.LGConstants;
import lemongrenade.core.util.LGProperties;
import lemongrenade.core.util.StringScheme;
import org.apache.storm.Config;
import org.apache.storm.Constants;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.TopologyBuilder;
import org.apache.storm.topology.base.BaseBasicBolt;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import java.util.Map;
public abstract class LGJavaAdapter extends LGAdapter {
private static final Logger log = LoggerFactory.getLogger(LGJavaAdapter.class);
private StormTopology TOPOLOGY = null;
private BaseRichBolt ADAPTER_BOLT = null;
private BaseBasicBolt HEARTBEAT_BOLT = null;
private RabbitMQBolt RABBIT_BOLT = null;
public LGJavaAdapter(String id) {super(id);}
public StormTopology getTopology() {
if(TOPOLOGY != null) {
return TOPOLOGY;
}
ConnectionConfig connectionConfig = new ConnectionConfig(LGProperties.get("rabbit.hostname"),
LGProperties.getInteger("rabbit.port", 5672),
LGProperties.get("rabbit.user"),
LGProperties.get("rabbit.password"),
ConnectionFactory.DEFAULT_VHOST,
10); // host, port, username, password, virtualHost, heartBeat
ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig)
.queue(getQueueName())
.prefetch(LGProperties.getInteger("rabbit.prefetch.messages", 250))
.requeueOnFail()
.build();
ProducerConfig sinkConfig = new ProducerConfigBuilder().connection(connectionConfig).build();
TopologyBuilder builder = new TopologyBuilder();
// Populate Adapter Data Structures
int maxNodesPerTask = 0;
if (getConfig().containsKey("max_nodes_per_task")) {
String str = getConfig().get("max_nodes_per_task").toString();
try {
if ((str != null) && (!str.equals(""))) {
maxNodesPerTask = Integer.parseInt(str);
}
}
catch (NumberFormatException e) {
maxNodesPerTask = 0;
}
}
AdapterManager.buildAdapterDataStructure(getAdapterId(), getAdapterName(),
getAdapterQuery(), getAdapterDepth(), getRequiredAttributes(), maxNodesPerTask, this.getAuxInfo());
builder.setSpout("input", new LGRabbitMQSpout(new StringScheme(), getAdapterId()
, getAdapterName(), getQueueName())
, LGProperties.getInteger("rabbit.spout.threads", 1))
.addConfigurations(spoutConfig.asMap())
.setMaxSpoutPending(LGProperties.getInteger("rabbit.prefetch.messages", 250));
/**
* Heartbeat bolt
*/
if(HEARTBEAT_BOLT == null) {
HEARTBEAT_BOLT = createHeartbeatBolt();
}
builder.setBolt("heartbeat", HEARTBEAT_BOLT);
if(ADAPTER_BOLT == null) {
ADAPTER_BOLT = createAdapterBolt();
}
int executors = getParallelismHint();
int tasks = getTaskCount();
int workers = (int) getConfig().getOrDefault("topology.workers", 1);
log.info("Setting bolt for "+getAdapterName()+". Workers:"+workers+" Executors:"+executors+" Tasks:"+tasks);
builder.setBolt(getAdapterName(), ADAPTER_BOLT, executors)//set the amount of threads for this adapter
.shuffleGrouping("input")
.setNumTasks(tasks)//sets the total number of tasks
;
if(RABBIT_BOLT == null) {
RABBIT_BOLT = createRabbitBolt();
}
builder.setBolt("output", RABBIT_BOLT, LGProperties.getInteger("rabbit.sink.threads", 1))
.addConfigurations(sinkConfig.asMap())
.shuffleGrouping(getAdapterName());
TOPOLOGY = builder.createTopology();
return TOPOLOGY;
} //end getTopology
public RabbitMQBolt createRabbitBolt() {
RabbitMQBolt bolt = new RabbitMQBolt(new CoordinatorSinkScheme());
return bolt;
}
public BaseBasicBolt createHeartbeatBolt() {
BaseBasicBolt bolt = new BaseBasicBolt() {
public void cleanup() {
super.cleanup();
AdapterManager.close();
}
@Override public void prepare(final Map config, final TopologyContext context) {}
@Override public Map<String, Object> getComponentConfiguration() {
Config conf = new Config();
int heartBeatTime = LGProperties.getInteger("adapter_heartbeat", 20);
log.info("Heartbeat tick time = " + heartBeatTime);
conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, heartBeatTime);
return conf;
}
protected boolean isTickTuple(Tuple tuple) {
return tuple.getSourceComponent().equals(Constants.SYSTEM_COMPONENT_ID)
&& tuple.getSourceStreamId().equals(Constants.SYSTEM_TICK_STREAM_ID);
}
@Override public void execute(Tuple tuple, BasicOutputCollector collector) {
try {
if (isTickTuple(tuple)) {
log.info("Received Heartbeat " + getAdapterName() + " " + getAdapterId());
AdapterManager.setHeartBeat(getAdapterId(), System.currentTimeMillis());
}
} catch (Exception e) {
log.error("Bolt execute error: " + e.getMessage());
}
}
// This is a dummy bolt - so no outputfields to declare
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
}
};
return bolt;
}
public BaseRichBolt createAdapterBolt() {
BaseRichBolt bolt = new BaseRichBolt() {
private OutputCollector oc;
private transient JobManager JOB_MANAGER;
private transient AdapterManager AM;
public void cleanup() {
super.cleanup();
if(JOB_MANAGER != null) {
JOB_MANAGER.close();
}
if(AM != null) {
AM.close();
}
}
public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
oc = outputCollector;
JOB_MANAGER = new JobManager();
AM = new AdapterManager();
}
public void execute(Tuple tuple) {
LGPayload payload = (LGPayload) tuple.getValueByField(LGConstants.LG_PAYLOAD);
LGJob job;
String jobId = payload.getJobId();
try {
job = JOB_MANAGER.getJob(jobId);
} catch (org.mongodb.morphia.mapping.MappingException e) {
log.error(" Unable to map jobId " + payload.getJobId() + " error:" + e.getMessage());
oc.reportError(e);
oc.fail(tuple);
return;
}
if (job == null) { //We probably received a request for a deleted job. We will ack this request, but do nothing.
log.error("Adapter received task:"+payload.getTaskId()+" for job:"+payload.getJobId()+", but job is invalid.");
JOB_MANAGER.updateTaskToDropped(job, payload.getTaskId());
oc.ack(tuple);
return;
}
if (job.getStatus() == LGJob.STATUS_STOPPED) {
log.info("Adapter received task:"+payload.getTaskId()+" for job:"+payload.getJobId()+", but job is STOPPED. Dropping task.");
JOB_MANAGER.updateTaskToDropped(job, payload.getTaskId());
oc.ack(tuple);
return;
}
// Check TTL (If Job has expired, stop processing
if (JOB_MANAGER.hasJobExpired(job)) {
log.info("Adapter received task:"+payload.getTaskId()+" for job:"+payload.getJobId()+", but job has expired.");
JOB_MANAGER.updateTaskToDropped(job, payload.getTaskId());
oc.ack(tuple);
return;
}
AM.setCurrentTaskId(getAdapterId(), payload.getTaskId());
try {
process(payload, new LGCallback() {
@Override
public void emit(LGPayload resp) {
//String job_id = resp.getJobId();
//String task_id = resp.getTaskId();
//log.info("***EMITTING [Adapter" + getAdapterName() + "] job_id:" + job_id + " task_id:" + task_id);
Values values = new Values(resp.getJobId(), resp);
oc.emit(tuple, values);
oc.ack(tuple);
}
@Override
public void fail(Exception ex) {
String job_id = payload.getJobId();
String task_id = payload.getTaskId();
String errorMsg = getAdapterName() + ": " + ex.getMessage();
log.info("***FAILING [Adapter" + getAdapterName() + "]. job_id:" + job_id + " task_id:" + task_id
+" for error:"+ex.getMessage());
ex.printStackTrace();//error handling here...
try {
ExceptionWriter writer = new ExceptionWriter();
String requests = payload.getRequestNodes().toString();
writer.publishException(task_id, requests, ex);
writer.channel.close();
writer.connection.close();
} catch (Exception e) {
log.error("Failure while writing adapter failure exception. job_id:"+job_id +"task_id:"+task_id);
e.printStackTrace();
}
LGJobError error = new LGJobError(job_id, task_id, getAdapterName(), getAdapterId(), errorMsg);
log.info("Adding error for job_id:"+job_id+" task_id:"+task_id+" error:"+errorMsg);
try {
JOB_MANAGER.updateErrorsForTask(job, task_id, error);
}
catch(Exception e) {//This can happen if the job was deleted
log.error("Unable to add error for job_id:"+job_id+" task_id:"+task_id+".");
}
oc.reportError(ex);
oc.fail(tuple);
}
});
}
catch (Exception e) {
log.error("Adapter threw an exception: Failing Tuple: "+e.getMessage());
e.printStackTrace();
try {
ExceptionWriter writer = new ExceptionWriter();
String taskId = payload.getTaskId();
String requests = payload.getRequestNodes().toString();
writer.publishException(taskId, requests, e);
} catch (Exception e2) {
e2.printStackTrace();
}
oc.reportError(e);
oc.fail(tuple);
}
}
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
outputFieldsDeclarer.declare(new Fields(LGConstants.LG_JOB_ID, LGConstants.LG_PAYLOAD));
}
};//end of bolt declaration
return bolt;
}
public interface LGCallback {
void emit(LGPayload resp);
void fail(Exception ex);
}
public JSONObject getJobConfig(LGPayload payload) {
JSONObject generic_job_config = payload.getJobConfig();
JSONObject new_job_config = new JSONObject(payload.getJobConfig().toString());//create a deep copy of job_config
if(generic_job_config.has("adapters")) {
JSONObject adapters = new JSONObject(generic_job_config.get("adapters").toString());
new_job_config.remove("adapters");
if(adapters.has(this.getAdapterName())) {//adapters section has entries for this adapter
JSONObject unique = adapters.getJSONObject(this.getAdapterName());
Iterator<?> keys = unique.keys();
while(keys.hasNext()) {
String key = keys.next().toString();
new_job_config.put(key, unique.get(key));
}
}
}
return new_job_config;
}
//Cleans up and closes bolts and topology
public void close() {
if(ADAPTER_BOLT != null) {
ADAPTER_BOLT.cleanup();
ADAPTER_BOLT = null;
}
if(HEARTBEAT_BOLT != null) {
HEARTBEAT_BOLT.cleanup();
HEARTBEAT_BOLT = null;
}
if(RABBIT_BOLT != null) {
RABBIT_BOLT.cleanup();
RABBIT_BOLT = null;
}
TOPOLOGY = null;
}
public abstract void process(LGPayload input, LGCallback callback);
}
|
#!/bin/bash
#
# Crontab entry
# */2 * * * * /root/block_bad_auth_cron.sh > /var/log/block_bad_auth_cron.log
date "+timestamp: %Y-%m-%d %H:%M:%S"
(
echo "Checking for lock. If lock, will wait 10s"
# Wait for lock on /var/lock/.block_bad_auth_cron_lock
flock -x -w 10 200 || exit 1
. /root/.cronenv
/root/block_bad_auth.py
echo "Done inside"
/bin/sleep 10
/root/block_bad_auth.py
echo "Done inside 2 (after 10 seconds)"
/bin/sleep 10
/root/block_bad_auth.py
echo "Done inside 3 (after 20 seconds)"
/bin/sleep 10
/root/block_bad_auth.py
echo "Done inside 4 (after 30 seconds)"
) 200>/var/lock/.block_bad_auth_cron_lock
echo "Done outside"
|
/**
* Copyright(c) 2004-2018 bianfeng
*/
package com.shareyi.molicode.builder.impl;
import com.alibaba.fastjson.JSON;
import com.shareyi.molicode.builder.AbstractBuilder;
import com.shareyi.molicode.common.enums.DataTypeEnum;
import com.shareyi.molicode.common.enums.OwnerTypeEnum;
import com.shareyi.molicode.common.enums.StatusEnum;
import com.shareyi.molicode.domain.conf.CommonExtInfo;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 通用扩展信息 Builder 类
*
* @author david
* @date 2018-08-25
*/
@Service("commonExtInfoBuilder")
public class CommonExtInfoBuilder extends AbstractBuilder<CommonExtInfo> {
@Override
public String getBizName() {
return "通用扩展信息";
}
@Override
public Class getBizClass() {
return CommonExtInfo.class;
}
@Override
public CommonExtInfo supplyAddInfo(CommonExtInfo dto) {
super.supplyAddInfo(dto);
//默认设置为text
if (dto.getType() == null) {
dto.setType(DataTypeEnum.TEXT.getCode());
}
return dto;
}
public Map<String, Map<String, String>> buildConfigMap(List<CommonExtInfo> list) {
Map<String, Map<String, String>> dataMap = new HashMap<>();
if (CollectionUtils.isEmpty(list)) {
return dataMap;
}
for (CommonExtInfo commonExtInfo : list) {
dataMap.put(commonExtInfo.getExtKey(), JSON.parseObject(commonExtInfo.getExtValue(), Map.class));
}
return dataMap;
}
/**
* 构建系统bindInfo对象
*
* @param bindId
* @param configMapJson
* @param ownerCode
* @return
*/
public CommonExtInfo buildByBindInfo(String bindId, String configMapJson, String ownerCode) {
CommonExtInfo commonExtInfo = new CommonExtInfo();
commonExtInfo.setOwnerType(OwnerTypeEnum.USER.getCode());
commonExtInfo.setOwnerCode(ownerCode);
commonExtInfo.setExtKey(bindId);
commonExtInfo.setExtValue(configMapJson);
commonExtInfo.setStatus(StatusEnum.YES.getCode());
commonExtInfo.setType(DataTypeEnum.JSON.getCode());
commonExtInfo.setCreator(ownerCode);
return commonExtInfo;
}
}
|
package org.fluentlenium.adapter;
import org.testng.ITestContext;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeTest;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* All TestNG Test should extends this class. It provides default parameters.
*/
public abstract class FluentTestNg extends FluentTestRunnerAdapter {
public FluentTestNg() {
super();
}
private static Map<Method, ITestNGMethod> methods = new HashMap<>();
@BeforeTest
public void beforeTest(ITestContext context) {
for (ITestNGMethod method : context.getAllTestMethods()) {
methods.put(method.getConstructorOrMethod().getMethod(), method);
}
}
@AfterTest
public void afterTest() {
methods.clear();
}
@BeforeMethod
public void beforeMethod(Method m, ITestContext context) {
ITestNGMethod testNGMethod = methods.get(m);
starting(testNGMethod.getRealClass(), testNGMethod.getMethodName());
}
@AfterMethod
public void afterMethod(ITestResult result) {
if (!result.isSuccess()) {
failed(result.getThrowable(), result.getTestClass().getRealClass(), result.getName());
}
finished(result.getTestClass().getRealClass(), result.getName());
}
@AfterClass
public void afterClass() {
releaseSharedDriver();
}
}
|
import {
EventEmitter,
OnInit,
Output,
ViewChild,
ElementRef,
Input
} from '@angular/core';
import { fromEvent } from 'rxjs';
import 'rxjs/Rx';
/**
* The 'BaseViewComponent' provides the common API to search models.
*/
export abstract class BaseSearchComponent implements OnInit {
/**
* Debounce time default 1 second
*/
@Input() debounceTime: number = 1000;
/**
* Emits the event of key pressed to start a search.
*
* @type {}
*/
@Output() keySearch = new EventEmitter<any>();
/**
* Current value of the search text.
*/
public currentSearch: string;
/**
* Element ref current search
*/
@ViewChild('searchRef') searchRef: ElementRef;
/**
* On Init of the component.
*/
ngOnInit() {
this.setupDebounce();
}
/**
* Executes after the search text change.
*/
onKeySearch(): void {
this.keySearch.emit({ filter: [], search: this.currentSearch });
}
/**
* Initial debounce setting
*
*/
setupDebounce(): void {
fromEvent(this.searchRef.nativeElement, 'keyup')
.map((evt: any) => evt.target.value)
.debounceTime(this.debounceTime)
.distinctUntilChanged()
.subscribe((text: string) => this.onKeySearch());
}
}
|
#!/usr/bin/env bash
set -euxo pipefail
time crane validate --remote=kaniko.kontain.me/dockersamples/node-bulletin-board/bulletin-board-app:3f08afd
time crane validate --remote=kaniko.kontain.me/dockersamples/node-bulletin-board/bulletin-board-app:3f08afd
|
<gh_stars>1-10
import React from 'react'
import { graphql } from 'gatsby'
import Layout from '../components/layout'
import PostList from '../components/post-list'
import PageHeading from '../components/UI/PageHeading'
const Articles = ({ data }) => {
const blogPosts = data.allMarkdownRemark.edges.filter(post => {
return post.node.frontmatter.type === 'blog-post'
})
const journalPosts = data.allMarkdownRemark.edges.filter(post => {
return post.node.frontmatter.type === 'journal'
})
return (
<Layout>
<PageHeading className="ml-4 lg:ml-0">Articles</PageHeading>
<ArticleSection title="Blog Posts" posts={blogPosts} />
<ArticleSection title="Work Journals" posts={journalPosts} />
</Layout>
)
}
const ArticleSection = ({ title, posts }) => {
return (
<div className="mb-8">
<h3 className="mb-4 ml-4 lg:ml-0 font-semibold text-lg">{title}</h3>
<PostList posts={posts} />
</div>
)
}
export const query = graphql`
query BlogQuery {
allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }) {
totalCount
edges {
node {
id
frontmatter {
title
date(formatString: "MMMM DD, YYYY")
icon
type
}
fields {
slug
}
}
}
}
}
`
export default Articles
|
#!/usr/bin/env sh
DATA_SET=image
# define data source for train and test
DATA_FILE=cifar100_fine_R10
DEBUG=false
TRAIN_LIST=/scratch/dutta/cifarR20_trainval_train.txt
VAL_LIST=/scratch/dutta/test_fine_labels.txt
TRAIN_BASE_DIR=/scratch/dutta/cifar\-100\-train
VAL_BASE_DIR=/scratch/dutta/cifar\-100\-test
WIDTH=96
WIDTH_MULT=2
CUDA_VISIBLE_DEVICES=0
th main.lua \
-dataset $DATA_SET \
-dataFile $DATA_FILE \
-trainList $TRAIN_LIST \
-trainBaseDir $TRAIN_BASE_DIR \
-valList $VAL_LIST \
-valBaseDir $VAL_BASE_DIR \
-width $WIDTH \
-widthMult $WIDTH_MULT \
-debug $DEBUG
|
#!/usr/bin/expect -f
set timeout 1800
set cmd [lindex $argv 0]
set licenses [lindex $argv 1]
spawn {*}$cmd
expect {
"Do you accept the license '*'*" {
exp_send "y\r"
exp_continue
}
"Accept?*" {
exp_send "y\r"
exp_continue
}
eof
}
|
#ifndef _CAFFE_UTIL_IM2CHUK_HPP_
#define _CAFFE_UTIL_IM2CHUK_HPP_
namespace caffe {
template <typename Dtype>
void im2chuk_cpu(const Dtype* data_im, const int channels,
const int height, const int width, const int kernel_h, const int kernel_w,
const int local_h, const int local_w, const int stride_h,
const int stride_w, Dtype* data_col);
template <typename Dtype>
void chuk2im_cpu(const Dtype* data_col, const int channels,
const int height, const int width, const int patch_h, const int patch_w,
const int local_h, const int local_w, const int stride_h,
const int stride_w, Dtype* data_im);
template <typename Dtype>
void im2chuk_gpu(const Dtype* data_im, const int channels,
const int height, const int width, const int kernel_h, const int kernel_w,
const int local_h, const int local_w, const int stride_h,
const int stride_w, Dtype* data_col);
template <typename Dtype>
void chuk2im_gpu(const Dtype* data_col, const int channels,
const int height, const int width, const int patch_h, const int patch_w,
const int local_h, const int local_w, const int stride_h,
const int stride_w, Dtype* data_im);
} // namespace caffe
#endif // CAFFE_UTIL_IM2CHUK_HPP_
|
<gh_stars>10-100
package idmap
import (
"os/user"
"strconv"
"github.com/lxc/lxd/shared/idmap"
"github.com/pkg/errors"
)
func ResolveCurrentIdmapSet() (*idmap.IdmapSet, error) {
currentUser, err := user.Current()
if err != nil {
return nil, errors.Wrapf(err, "couldn't resolve current user")
}
return resolveIdmapSet(currentUser)
}
func resolveIdmapSet(user *user.User) (*idmap.IdmapSet, error) {
idmapSet, err := idmap.DefaultIdmapSet("", user.Username)
if err != nil {
return nil, errors.Wrapf(err, "failed parsing /etc/sub{u,g}idmap")
}
if idmapSet != nil {
/* Let's make our current user the root user in the ns, so that when
* stacker emits files, it does them as the right user.
*/
uid, err := strconv.Atoi(user.Uid)
if err != nil {
return nil, errors.Wrapf(err, "couldn't decode uid")
}
gid, err := strconv.Atoi(user.Gid)
if err != nil {
return nil, errors.Wrapf(err, "couldn't decode gid")
}
hostMap := []idmap.IdmapEntry{
idmap.IdmapEntry{
Isuid: true,
Hostid: int64(uid),
Nsid: 0,
Maprange: 1,
},
idmap.IdmapEntry{
Isgid: true,
Hostid: int64(gid),
Nsid: 0,
Maprange: 1,
},
}
for _, hm := range hostMap {
err := idmapSet.AddSafe(hm)
if err != nil {
return nil, errors.Wrapf(err, "failed adding idmap entry: %v", hm)
}
}
}
return idmapSet, nil
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A class that defines the constant strings used by the statistics implementation.
*/
final class StatsSetupConst {
/**
* The value of the user variable "hive.stats.dbclass" to use HBase implementation.
*/
static final String HBASE_IMPL_CLASS_VAL = "hbase";
/**
* The value of the user variable "hive.stats.dbclass" to use JDBC implementation.
*/
static final String JDBC_IMPL_CLASS_VAL = "jdbc";
/**
* The name of the statistic Num Files to be published or gathered.
*/
static final String NUM_FILES = "numFiles";
/**
* The name of the statistic Num Partitions to be published or gathered.
*/
static final String NUM_PARTITIONS = "numPartitions";
/**
* The name of the statistic Total Size to be published or gathered.
* It is hdfs logical size
*/
static final String TOTAL_SIZE = "totalSize";
/**
* The name of the statistic Row Count to be published or gathered.
*/
static final String ROW_COUNT = "numRows";
/**
* The name of the statistic Raw Data Size to be published or gathered.
* It is size before hive encodes and compresses data.
*/
static final String RAW_DATA_SIZE = "rawDataSize";
/**
* @return List of all supported statistics
*/
static List<String> getSupportedStats() {
List<String> supportedStats = new ArrayList<String>();
supportedStats.addAll(getStatsRequireScan());
supportedStats.addAll(getStatsNoScan());
return supportedStats;
}
/**
* @return List of all statistics that need to be collected during query execution. These are
* statistics that inherently require a scan of the data.
*/
static List<String> getStatsRequireScan() {
List<String> collectableStats = new ArrayList<String>();
collectableStats.add(ROW_COUNT);
collectableStats.add(RAW_DATA_SIZE);
return collectableStats;
}
/**
* @return List of statistics that can be collected quickly without requiring a scan of the data.
*/
static List<String> getStatsNoScan() {
List<String> fastStats = new ArrayList<String>();
fastStats.add(NUM_FILES);
fastStats.add(TOTAL_SIZE);
return fastStats;
}
/**
* Store all statistics name mapping.
*
* One usage is to construct toString for all statistics.
* @return
*/
static Map<String, String> getNameMapping() {
Map<String, String> nameMapping = new HashMap<String, String>();
nameMapping.put(NUM_FILES, "num_files");
nameMapping.put(ROW_COUNT, "num_rows");
nameMapping.put(TOTAL_SIZE, "total_size");
nameMapping.put(RAW_DATA_SIZE, "raw_data_size");
return nameMapping;
}
}
|
#!/bin/sh
#docker-volumes /nix/store:/nix/store:ro \
#docker-volumes /nix/var/nix/db:/nix/var/nix/db:ro \
#docker-volumes /nix/var/nix/daemon-socket:/nix/var/nix/daemon-socket:ro \
mkdir -p /backup/nix/var/nix
cp -r /nix/store /backup/nix/store
cp -r /nix/var/nix/db /backup/nix/var/nix/db
|
def zip_two_lists(list1, list2):
return zip(list1, list2)
|
<filename>public/src/App.tsx<gh_stars>1-10
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { connect, Provider } from 'react-redux';
import {store} from './store';
let App = ({ isPinging, ping }) => (
<div>
<h1>is pinging: {isPinging.toString()}</h1>
<button onClick={ping}>Start PING</button>
</div>
);
const PING = 'PING';
const ping = () => ({ type: PING });
App = connect(
({ isPinging }) => ({ isPinging }),
{ ping }
)(App);
ReactDOM.render(
<Provider store={store}>
<App />
</Provider>,
document.getElementById('root')
);
|
const schedule = require('node-schedule');
const express = require('express');
const app = express();
// Store tasks
let tasks = {};
// Create a task
app.post('/tasks', function (req, res) {
const taskId = req.body.taskId;
const task = req.body.task;
// Schedule the task
const job = schedule.scheduleJob(taskId, task);
tasks[taskId] = job;
res.send({
message: 'Task created successfully'
});
});
// Edit an existing task
app.put('/tasks/:taskId', function (req, res) {
const taskId = req.params.taskId;
const task = req.body.task;
const job = tasks[taskId];
job.reschedule(task);
res.send({
message: 'Task updated successfully'
});
});
// Delete an existing task
app.delete('/tasks/:taskId', function (req, res) {
const taskId = req.params.taskId;
const job = tasks[taskId];
job.cancel();
delete tasks[taskId];
res.send({
message: 'Task deleted successfully'
});
});
// Start the app
app.listen(3000);
|
#include "factory.h"
namespace odfaeg {
namespace core {
template <class O, class K>
std::map<K, O*> Factory<O, K>::m_map = std::map<K, O*> ();
template <class O, class K>
void Factory<O, K>::Register (K key, O* object) {
if(m_map.find(key)==m_map.end())
{
m_map[key]=object;
}
}
template <class O, class K>
O* Factory<O, K>::Create (const K& key) {
O* tmp=0;
typename std::map<K, O*>::iterator it=m_map.find(key);
if(it!=m_map.end())
{
tmp=((*it).second)->Clone();
}
return tmp;
}
}
}
|
#!/bin/bash
$HADOOP_PREFIX/bin/hdfs dfs -mkdir -p /user/root
$HADOOP_PREFIX/bin/hdfs dfs -put $HADOOP_PREFIX/etc/hadoop input
$HADOOP_PREFIX/bin/hadoop jar $HADOOP_PREFIX/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.2.jar grep input output 'dfs[a-z.]+'
$HADOOP_PREFIX/bin/hdfs dfs -cat output/*
|
<filename>src/main/java/app/habitzl/elasticsearch/status/monitor/tool/client/params/ClusterAllocationParams.java
package app.habitzl.elasticsearch.status.monitor.tool.client.params;
/**
* Parameter list of all values offered by the Elasticsearch {@code /_cluster/allocation/explain} API.
*/
public final class ClusterAllocationParams {
private ClusterAllocationParams() {
// instantiation protection
}
public static final String API_ENDPOINT = "/_cluster/allocation/explain";
}
|
<filename>src/math/Boj1676.java
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 1676번: 팩토리얼 0의 갯수
*
* @see https://www.acmicpc.net/problem/1676/
*
*/
public class Boj1676 {
private static final int TWO = 2;
private static final int FIVE = 5;
private static final int TEN = 10;
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
int two = 0;
int five = 0;
int ten = 0;
for(int i = 2; i < N + 1; i++) {
int num = i;
if(num % TEN == 0) { // 첫째로 10의 약수의 갯수를 구함
int tmp = i;
while(tmp != 1) {
if(tmp % TEN != 0) break;
tmp /= TEN;
ten++;
}
if(tmp == 1) continue; // 해당 값이 나누어 떨어지면 다음 숫자로
else num = tmp; // 아니면 num에 나눈 후의 값을 저장 후 다음 조건문 실행
}
if(num % FIVE == 0) { // 둘째로 5의 약수의 갯수를 구함
int tmp = num;
while(tmp != 1) {
if(tmp % FIVE != 0) break;
tmp /= FIVE;
five++;
}
if(tmp == 1) continue; // 해당 값이 나누어 떨어지면 다음 숫자로
else num = tmp; // 아니면 num에 나눈 후의 값을 저장 후 다음 조건문 실행
}
if(num % TWO == 0) { // 마지막으로 2의 약수의 갯수를 구함
int tmp = num;
while(tmp != 1) {
if(tmp % TWO != 0) break;
tmp /= TWO;
two++;
}
}
}
// 결과값에 2와 5중 더 많은 숫자의 값 + 10의 갯수를 저장
int res = (five > two ? two : five) + ten;
System.out.println(res); // 결과 값 출력
}
}
|
<table>
<thead>
<tr>
<th>Name</th>
<th>Age</th>
</tr>
</thead>
<tbody>
<tr>
<td>John</td>
<td>20</td>
</tr>
<tr>
<td>Mary</td>
<td>15</td>
</tbody>
<tbody>
<tr>
<td>Bob</td>
<td>30</td>
</tr>
</tbody>
</table>
<script>
let table = document.querySelector('table');
let rows = Array.from(table.querySelectorAll('tr'));
let sorted = rows.sort((rowA, rowB) => {
let ageA = parseInt(rowA.querySelector('td:last-child').textContent);
let ageB = parseInt(rowB.querySelector('td:last-child').textContent);
return ageB - ageA;
});
sorted.forEach(row => table.appendChild(row));
</script>
|
<reponame>NightKosh/wolfarmor
package com.attributestudios.wolfarmor.entity.passive;
import com.attributestudios.wolfarmor.WolfArmorMod;
import com.attributestudios.wolfarmor.api.IWolfArmorCapability;
import com.attributestudios.wolfarmor.common.capabilities.CapabilityWolfArmor;
import net.minecraft.entity.passive.EntityWolf;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ContainerHorseChest;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.IInventoryChangedListener;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.network.datasync.DataParameter;
import net.minecraft.network.datasync.DataSerializers;
import net.minecraft.network.datasync.EntityDataManager;
import net.minecraft.util.EnumHand;
import net.minecraft.world.World;
import javax.annotation.Nonnull;
/**
* Replacement entity for EntityWolf that supports armor
* @deprecated Since 2.1.0
*/
@Deprecated
public class EntityWolfArmored extends EntityWolf implements IInventoryChangedListener, IWolfArmorCapability {
//region Fields
private ContainerHorseChest inventory;
private static final String NBT_TAG_HAS_CHEST = "hasChest";
private static final String NBT_TAG_SLOT = "slot";
private static final String NBT_TAG_INVENTORY = "inventory";
private static final String NBT_TAG_ARMOR_ITEM = "armorItem";
private static final int MAX_SIZE_INVENTORY = 7;
private static final DataParameter<Boolean> HAS_CHEST = EntityDataManager.createKey(EntityWolfArmored.class, DataSerializers.BOOLEAN);
private static final DataParameter<ItemStack> ARMOR_ITEM = EntityDataManager.createKey(EntityWolfArmored.class, DataSerializers.ITEM_STACK);
//endregion Fields
//region Constructors
/**
* Creates a new entity in the specified world.
*
* @param world The world in which to create the entity.
*/
public EntityWolfArmored(@Nonnull World world) {
super(world);
this.inventoryInit();
}
//endregion Constructors
//region Public / Protected Methods
/**
* Sets up the entity's inventory.
*/
private void inventoryInit() {
ContainerHorseChest inventoryExisting = this.inventory;
this.inventory = new ContainerHorseChest("container.wolfarmor.wolf", 7);
String customName = this.getCustomNameTag();
if (!customName.isEmpty()) {
this.inventory.setCustomName(customName);
}
if (inventoryExisting != null) {
inventoryExisting.removeInventoryChangeListener(this);
int numberOfItemsExisting = Math.min(this.inventory.getSizeInventory(), inventoryExisting.getSizeInventory());
for (int slotIndex = 0; slotIndex < numberOfItemsExisting; slotIndex++) {
ItemStack stackInSlot = inventoryExisting.getStackInSlot(slotIndex);
if (!stackInSlot.isEmpty()) {
this.inventory.setInventorySlotContents(slotIndex, stackInSlot.copy());
}
}
}
this.inventory.addInventoryChangeListener(this);
this.inventory.markDirty();
}
/**
* Initializes the entity's data watcher values
*/
@Override
protected void entityInit() {
super.entityInit();
this.dataManager.register(HAS_CHEST, false);
this.dataManager.register(ARMOR_ITEM, ItemStack.EMPTY);
}
/**
* Writes the entity to the provided NBT Tag Compound
*
* @param tags The NBT data to write the entity to
*/
@Override
public void writeEntityToNBT(@Nonnull NBTTagCompound tags) {
super.writeEntityToNBT(tags);
boolean entityHasChest = this.getHasChest();
boolean entityHasArmor = this.getHasArmor();
tags.setBoolean(NBT_TAG_HAS_CHEST, entityHasChest);
if (entityHasChest) {
NBTTagList inventoryItemsTagList = new NBTTagList();
for (byte slotIndex = 0;
slotIndex < this.getInventory().getSizeInventory();
slotIndex++) {
ItemStack stackInSlot = this.getInventory().getStackInSlot(slotIndex);
if (!stackInSlot.isEmpty()) {
NBTTagCompound slotTag = new NBTTagCompound();
slotTag.setByte(NBT_TAG_SLOT, slotIndex);
stackInSlot.writeToNBT(slotTag);
inventoryItemsTagList.appendTag(slotTag);
}
}
tags.setTag(NBT_TAG_INVENTORY, inventoryItemsTagList);
}
if(entityHasArmor)
{
ItemStack armorItem = getArmorItemStack();
if(!armorItem.isEmpty()) {
tags.setTag(NBT_TAG_ARMOR_ITEM, armorItem.writeToNBT(new NBTTagCompound()));
}
else {
tags.removeTag(NBT_TAG_ARMOR_ITEM);
}
}
}
/**
* Reads the entity from the provided NBT Tag Compound
*
* @param tags The NBT data containing the entity's values
*/
@Override
public void readEntityFromNBT(@Nonnull NBTTagCompound tags) {
super.readEntityFromNBT(tags);
// Only load EntityWolfArmored-specific data if the NBT we are reading is actually an EntityWolfArmored NBT.
boolean entityHasChest = tags.hasKey(NBT_TAG_HAS_CHEST, 1) && tags.getBoolean(NBT_TAG_HAS_CHEST);
this.setHasChest(entityHasChest);
if(entityHasChest)
{
this.inventoryInit();
// Tags of type NBTTagCompound
NBTTagList inventoryItemsTagList = tags.getTagList(NBT_TAG_INVENTORY, 10);
for(int tagIndex = 0; tagIndex < inventoryItemsTagList.tagCount(); tagIndex++)
{
NBTTagCompound itemTag = inventoryItemsTagList.getCompoundTagAt(tagIndex);
byte slotIndex = itemTag.getByte(NBT_TAG_SLOT);
if(slotIndex < this.inventory.getSizeInventory())
{
this.inventory.setInventorySlotContents(slotIndex, new ItemStack(itemTag));
}
}
}
NBTTagCompound armorTags = tags.getCompoundTag(NBT_TAG_ARMOR_ITEM);
if(!armorTags.hasNoTags())
{
ItemStack armorItemStack = new ItemStack(armorTags);
this.equipArmor(armorItemStack);
}
}
/**
* Event called when the entity's inventory is changed.
*
* @param inventory The inventory
*/
@Override
public void onInventoryChanged(@Nonnull IInventory inventory) {
ItemStack armor = inventory.getStackInSlot(0);
this.setArmorItemStack(armor);
}
/**
* Equips a wolf armor item
* @param armorItemStack The armor to equip
*/
@Override
public void equipArmor(@Nonnull ItemStack armorItemStack) {
if(this.canEquipItem(armorItemStack)) {
this.inventory.setInventorySlotContents(0, armorItemStack);
}
}
@Override
public boolean canEquipItem(@Nonnull ItemStack armorItemStack) {
return CapabilityWolfArmor.isValidWolfArmor(armorItemStack) && (!this.getHasArmor() || armorItemStack.isEmpty());
}
@Override
public boolean processInteract(@Nonnull EntityPlayer player, @Nonnull EnumHand hand) {
return super.processInteract(player, hand);
}
@Override
public void dropEquipment(boolean killedByPlayer, int lootingModifier) {
super.dropEquipment(killedByPlayer, lootingModifier);
}
@Override
public void damageArmor(float damage) {
super.damageArmor(damage);
}
@Override
public void dropInventoryContents() { }
//endregion Public / Protected Methods
//region Accessors / Mutators
/**
* Gets a boolean value from the data watcher indicating whether or not the entity currently has a chest
*
* @return A boolean value indicating whether or not the entity currently has a chest
*/
@Override
public boolean getHasChest() {
return WolfArmorMod.getConfiguration().getIsWolfChestEnabled() && this.dataManager.get(HAS_CHEST);
}
/**
* Sets a boolean value on the data watcher representing whether or not the entity currently has an inventory.
*
* @param value The new value of the field.
*/
@Override
public void setHasChest(boolean value) {
this.dataManager.set(HAS_CHEST, value);
}
/**
* Gets a boolean value from the data watcher indicating whether or not the entity is currently armored.
* @return A boolean value indicating whether or not the entity is currently armored.
*/
@Override
public boolean getHasArmor() {
return !getArmorItemStack().isEmpty();
}
/**
* Gets the entity's inventory
*
* @return The entity's inventory
*/
@Override
@Nonnull
public InventoryBasic getInventory() {
return this.inventory;
}
@Override
public void setInventoryItem(int index, @Nonnull ItemStack itemStack) {
}
/**
* Gets the entity's armor item from the data watcher.
*
* @return The entity's armor item. If the item's stack size is zero, returns null.
*/
@Override
@Nonnull
public ItemStack getArmorItemStack() {
ItemStack itemStack = this.dataManager.get(ARMOR_ITEM);
if(!CapabilityWolfArmor.isValidWolfArmor(itemStack)) {
this.dataManager.set(ARMOR_ITEM, ItemStack.EMPTY);
return ItemStack.EMPTY;
}
return itemStack;
}
/**
* Updates the entity data watcher with the value of the armor item stack. If the item stack is null, replaces the value with a zero-sized item stack.
*
* @param itemStack The item stack to use, or null
*/
@Override
public void setArmorItemStack(@Nonnull ItemStack itemStack) {
if(itemStack.isEmpty() || !CapabilityWolfArmor.isValidWolfArmor(itemStack)) {
return;
}
ItemStack currentArmor = getArmorItemStack();
if(!currentArmor.isEmpty()) {
return;
}
this.dataManager.set(ARMOR_ITEM, itemStack);
}
@Override
public int getMaxSizeInventory() {
return MAX_SIZE_INVENTORY;
}
//endregion Accessors / Mutators
}
|
zmodload zsh/terminfo
autoload -Uz edit-command-line
zle -N edit-command-line
setopt BEEP
bindkey -M vicmd "E" edit-command-line
bindkey -M vicmd '/' history-incremental-pattern-search-backward
bindkey -M vicmd '?' history-incremental-pattern-search-forward
bindkey -M vicmd "\C-R" redo
bindkey -M vicmd "u" undo
bindkey -M viins '^R' history-incremental-pattern-search-backward
bindkey -M viins '^F' history-incremental-pattern-search-forward
bindkey -M viins "$terminfo[khome]" beginning-of-line
bindkey -M viins "$terminfo[kend]" end-of-line
bindkey -M viins "$terminfo[kich1]" overwrite-mode
bindkey -M viins "^[[3~" delete-char
bindkey -M viins "^?" backward-delete-char
|
public static String reverse(String str) {
if (str == null || str.length() == 0) {
return str;
}
StringBuilder sb = new StringBuilder();
for(int i=str.length()-1; i>=0; --i) {
sb.append(str.charAt(i));
}
return sb.toString();
}
|
def standard_deviation(list_data):
mean = sum(list_data)/len(list_data)
variance = 0
for data in list_data:
variance += (data - mean) **2
variance /= len(list_data)
return variance ** 0.5
list_data = [5, 8, 10, 15, 20]
print(standard_deviation(list_data))
|
#!/bin/env sh
# Temporary script to test syncing, this should be done by the regular tests instead.
set -e
HOST=my-laptop
HOSTHOME=/home/bashlund/cluster-host
export CLUSTERPATH=/home/bashlund/tmp/simplenetes-testing/dev-cluster
# Get an understanding what the remote host has right now.
releaseData="$(space / -e RUN=_SYNC_REMOTE_PACK_RELEASE_DATA -- "${HOSTHOME}")"
printf "Release data:\\n%s\\n" "${releaseData}" >&2
# Create an archive with the diff of remote host and local host dir
dir=$(space / -e RUN=_SYNC_BUILD_UPDATE_ARCHIVE -- "${HOST}" "${releaseData}")
printf "Archive dir: %s\\n" "${dir}" >&2
# Apply the diff onto the remote host directory
space / -e RUN=_SYNC_REMOTE_UNPACK_ARCHIVE2 -- "${HOSTHOME}" "${dir}"
|
<reponame>S0c5/ledger
package ledger
import (
"errors"
"fmt"
"github.com/numary/ledger/core"
machine "github.com/numary/machine/core"
"github.com/numary/machine/script/compiler"
"github.com/numary/machine/vm"
)
func (l *Ledger) Execute(script core.Script) error {
if script.Plain == "" {
return errors.New("no script to execute")
}
p, err := compiler.Compile(script.Plain)
if err != nil {
return fmt.Errorf("compile error: %v", err)
}
m := vm.NewMachine(p)
err = m.SetVarsFromJSON(script.Vars)
if err != nil {
return fmt.Errorf("could not set variables: %v", err)
}
{
ch, err := m.ResolveResources()
if err != nil {
return fmt.Errorf("could not resolve program resources: %v", err)
}
for req := range ch {
if req.Error != nil {
return fmt.Errorf("could not resolve program resources: %v", req.Error)
}
account, err := l.GetAccount(req.Account)
if err != nil {
return fmt.Errorf("could not get account %q: %v", req.Account, err)
}
meta := account.Metadata
entry, ok := meta[req.Key]
if !ok {
return fmt.Errorf("missing key %v in metadata for account %v", req.Key, req.Account)
}
value, err := machine.NewValueFromTypedJSON(entry)
if err != nil {
return fmt.Errorf("invalid format for metadata at key %v for account %v: %v", req.Key, req.Account, err)
}
req.Response <- *value
}
}
{
ch, err := m.ResolveBalances()
if err != nil {
return fmt.Errorf("could not resolve balances: %v", err)
}
for req := range ch {
if req.Error != nil {
return fmt.Errorf("could not resolve balances: %v", err)
}
account, err := l.GetAccount(req.Account)
if err != nil {
return fmt.Errorf("could not get account %q: %v", req.Account, err)
}
amt := account.Balances[req.Asset]
if amt < 0 {
amt = 0
}
req.Response <- uint64(amt)
}
}
c, err := m.Execute()
if err != nil {
return fmt.Errorf("script failed: %v", err)
}
if c == vm.EXIT_FAIL {
return errors.New("script exited with error code EXIT_FAIL")
}
t := core.Transaction{
Postings: m.Postings,
}
_, err = l.Commit([]core.Transaction{t})
return err
}
|
<?php
// Function to generate alphanumeric random string
function generateRandomString($length) {
$characters = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
$charactersLength = strlen($characters);
$randomString = '';
for ($i = 0; $i < $length; $i++) {
$randomString .= $characters[rand(0, $charactersLength - 1)];
}
return $randomString;
}
// Usage
echo generateRandomString(10); // Outputs a 10 characters random string
?>
|
#!/bin/bash
set -e # exit on error
wget "https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh" -O miniconda.sh
bash miniconda.sh -b
echo "Configuring conda."
conda config --set auto_update_conda off
echo "Installing test dependencies."
conda install --yes appdirs dask==2 nodejs jinja2 numpy pyyaml requests msgpack-numpy pytest-cov coveralls \
pytest fsspec intake-parquet zarr notebook panel==0.8.1 hvplot==0.5.2 bokeh==1.4.0 -c conda-forge -c defaults
pip install git+https://github.com/dask/dask --upgrade --no-deps
npm install -g dat
pip install -e . --no-deps
|
<filename>swarm/document-service/src/data/manager-client.js
import service from '../common/service-helper';
export default {
getService,
startService,
checkService,
};
/**
* [service description]
* @param {[type]} name [description]
* @return {[type]} [description]
*/
function getService(name) {
return service.get({ host: service.host(name), port: 4111, path: `/api/services/${name}`});
}
/**
* Scales a service
* @param {[type]} name [description]
* @param {[type]} amount [description]
* @return {[type]} [description]
*/
function startService(name) {
return service.get({ host: service.host(name), port: 4111, path: `/api/services/${name}/start` });
}
/**
* [check description]
* @param {[type]} name [description]
* @param {[type]} port [description]
* @return {[type]} [description]
*/
function checkService(name) {
return service.get({ host: service.host(name), port: 4111, path: `/api/services/${name}/state`})
.then((state) => {
return state !== 'running'
? startService(name)
: getService(name);
});
}
|
#!/bin/bash
# This script builds all dependencies of sdsl
# and installs the library on a LINUX or Mac OS X system
CUR_DIR=`pwd`
SDSL_INSTALL_PREFIX=${HOME}
if [ $# -ge 1 ]; then
SDSL_INSTALL_PREFIX=${1}
fi
# Get absolute path name of install directory
mkdir -p "${SDSL_INSTALL_PREFIX}" 2> /dev/null
cd "${SDSL_INSTALL_PREFIX}" > /dev/null 2>&1
if [ $? != 0 ] ; then
echo "ERROR: directory '${SDSL_INSTALL_PREFIX}' does not exist nor could be created."
echo "Please choose another directory."
exit 1
else
SDSL_INSTALL_PREFIX=`pwd -P`
fi
echo "Library will be installed in '${SDSL_INSTALL_PREFIX}'"
cd "${CUR_DIR}"
OLD_DIR="$( cd "$( dirname "$0" )" && pwd )" # gets the directory where the script is located in
cd "${OLD_DIR}"
OLD_DIR=`pwd`
# (1) Copy pre-commit hook
if [ -d ".git/hooks" ]; then
echo "Copy pre-commit into .git/hooks"
cp extras/pre-commit .git/hooks/
if [ $? != 0 ]; then
echo "WARNING: could not copy pre-commit script into .git/hooks"
fi
chmod u+x .git/hooks/pre-commit
if [ $? != 0 ]; then
echo "WARNING: could not make pre-commit script executable"
fi
else
echo "WARNING: .git/hooks directory does not exists."
echo " The pre-commit hook is not installed."
fi
# (2) Install divsufsort, gtest, and sdsl
HEADER=test/CompileTest.hpp # Make a header-file that contains all other header-files
echo "#ifndef INCLUDED_SDSL_COMPILE_TEST" > ${HEADER}
echo "#define INCLUDED_SDSL_COMPILE_TEST" >> ${HEADER}
for HEADERFILE in include/sdsl/*.hpp
do
FILENAME=`basename ${HEADERFILE}`
echo "#include \"sdsl/${FILENAME}\"" >> ${HEADER}
done
echo "#endif" >> ${HEADER}
cd build # change into the build directory
if [ $? != 0 ]; then
exit 1
fi
./clean.sh # clean-up build directory
if [ $? != 0 ]; then
exit 1
fi
cmake -DCMAKE_INSTALL_PREFIX="${SDSL_INSTALL_PREFIX}" .. # run cmake
if [ $? != 0 ]; then
echo "ERROR: CMake build failed."
exit 1
fi
make # run make
if [ $? != 0 ]; then
echo "ERROR: Build failed."
exit 1
fi
echo "Removing old files"
echo "rm -rf '${SDSL_INSTALL_PREFIX}/include/sdsl/*'"
rm -rf "${SDSL_INSTALL_PREFIX}/include/sdsl/*"
if [ $? != 0 ]; then
echo "WARNING: Could not remove old header files."
fi
echo "rm -f '${SDSL_INSTALL_PREFIX}/lib/libsdsl*'"
rm -f "${SDSL_INSTALL_PREFIX}/lib/libsdsl*"
if [ $? != 0 ]; then
echo "WARNING: Could not remove old library file."
fi
make install # install library
if [ $? != 0 ]; then
echo "ERROR: Installation failed."
exit 1
fi
cd ..
if [ "`pwd`" != "${OLD_DIR}" ]; then
echo "ERROR: we are not in the original dir ${OLD_DIR} now."
exit 1
fi
echo "SUCCESS: sdsl was installed successfully!"
echo "The sdsl include files are located in '${SDSL_INSTALL_PREFIX}/include'."
echo "The library files are located in '${SDSL_INSTALL_PREFIX}/lib'."
echo " "
echo "Sample programs can be found in the examples-directory."
echo "A program 'example.cpp' can be compiled with the command: "
echo "g++ -std=c++11 -DNDEBUG -O3 [-msse4.2] \\"
echo " -I${SDSL_INSTALL_PREFIX}/include -L${SDSL_INSTALL_PREFIX}/lib \\"
echo " example.cpp -lsdsl -ldivsufsort -ldivsufsort64"
echo " "
echo "Tests in the test-directory"
echo "A cheat sheet in the extras/cheatsheet-directory."
echo "Have fun!"
|
#!/bin/bash
set -e
# Reference:
# - https://docs.docker.com/engine/userguide/containers/dockerimages/
# - https://github.com/dockerfile/java/blob/master/oracle-java8/Dockerfile
if [ $# -lt 1 ]; then
echo "-------------------------------------------------------------------------------------------"
echo "Usage: "
echo " ${0} [<Dockerfile> <imageTag> [<some more optional arguments...>] ] "
echo "e.g."
echo " ./build.sh ./centos/Dockerfile.centos.xfce.vnc openkbs/centos-xfce-vnc --no-cache --build-arg OS_TYPE=centos'"
echo " ./build.sh ./Dockerfile.ubuntu.xfce.vnc openkbs/ubuntu-xfce-vnc --no-cache --build-arg OS_TYPE=centos'"
echo "-------------------------------------------------------------------------------------------"
fi
MY_DIR=$(dirname "$(readlink -f "$0")")
DOCKERFILE=${1:-./Dockerfile}
DOCKERFILE=$(realpath $DOCKERFILE)
BUILD_CONTEXT=$(dirname ${DOCKERFILE})
imageTag=${2}
if [ $# -gt 2 ]; then
shift 2
options="$*"
else
options=""
fi
##########################################################
#### ---- Whether to remove previous build cache ---- ####
#### ---- Valid value: 0 (No remove); 1 (yes, remove)
##########################################################
REMOVE_CACHE=0
###############################################################################
###############################################################################
###############################################################################
#### ---- DO NOT Change the code below UNLESS you really want to !!!!) --- ####
#### ---- DO NOT Change the code below UNLESS you really want to !!!!) --- ####
#### ---- DO NOT Change the code below UNLESS you really want to !!!!) --- ####
###############################################################################
###############################################################################
###############################################################################
##########################################################
#### ---- Generate remove cache option if needed ---- ####
##########################################################
REMOVE_CACHE_OPTION=""
if [ ${REMOVE_CACHE} -gt 0 ]; then
REMOVE_CACHE_OPTION="--no-cache --rm"
fi
###################################################
#### ---- Change this only if want to use your own
###################################################
ORGANIZATION=openkbs
###################################################
#### ---- Detect Docker Run Env files ----
###################################################
function detectDockerBuildEnvFile() {
curr_dir=`pwd`
if [ -s "${DOCKER_ENV_FILE}" ]; then
echo "--- INFO: Docker Build Environment file '${DOCKER_ENV_FILE}' FOUND!"
else
echo "*** WARNING: Docker Build Environment file '${DOCKER_ENV_FILE}' NOT found!"
echo "*** WARNING: Searching for .env or docker.env as alternative!"
echo "*** --->"
if [ -s "./docker-build.env" ]; then
echo "--- INFO: ./docker-build.env FOUND to use as Docker Run Environment file!"
DOCKER_ENV_FILE="./docker-build.env"
else
if [ -s "./.env" ]; then
echo "--- INFO: ./.env FOUND to use as Docker Run Environment file!"
DOCKER_ENV_FILE="./.env"
else
echo "--- INFO: ./.env Docker Environment file (.env) NOT found!"
if [ -s "./docker.env" ]; then
echo "--- INFO: ./docker.env FOUND to use as Docker Run Environment file!"
DOCKER_ENV_FILE="./docker.env"
else
echo "*** WARNING: Docker Environment file (.env) or (docker.env) NOT found!"
fi
fi
fi
fi
}
detectDockerBuildEnvFile
###################################################
#### ---- Container package information ----
###################################################
DOCKER_IMAGE_REPO=`echo $(basename $PWD)|tr '[:upper:]' '[:lower:]'|tr "/: " "_" `
imageTag=${imageTag:-"${ORGANIZATION}/${DOCKER_IMAGE_REPO}"}
###################################################
#### ---- Generate build-arg arguments ----
###################################################
BUILD_ARGS=""
BUILD_DATE="`date -u +"%Y-%m-%dT%H:%M:%SZ"`"
VCS_REF="`git rev-parse --short HEAD`"
VCS_URL="https://github.com/`echo $(basename $PWD)`"
BUILD_ARGS="--build-arg BUILD_DATE=${BUILD_DATE} --build-arg VCS_REF=${VCS_REF}"
## -- ignore entries start with "#" symbol --
function generateBuildArgs() {
if [ "${DOCKER_ENV_FILE}" != "" ] && [ -s "${DOCKER_ENV_FILE}" ]; then
for r in `cat ${DOCKER_ENV_FILE} | grep -v '^#'`; do
echo "entry=> $r"
key=`echo $r | tr -d ' ' | cut -d'=' -f1`
value=`echo $r | tr -d ' ' | cut -d'=' -f2`
BUILD_ARGS="${BUILD_ARGS} --build-arg $key=$value"
done
fi
}
generateBuildArgs
echo "BUILD_ARGS=${BUILD_ARGS}"
###################################################
#### ---- Setup Docker Build Proxy ----
###################################################
# export NO_PROXY="localhost,127.0.0.1,.openkbs.org"
# export HTTP_PROXY="http://gatekeeper-w.openkbs.org:80"
# when using "wget", add "--no-check-certificate" to avoid https certificate checking failures
#
echo "... Setup Docker Build Proxy: ..."
PROXY_PARAM=
function generateProxyArgs() {
if [ "${HTTP_PROXY}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg HTTP_PROXY=${HTTP_PROXY}"
fi
if [ "${HTTPS_PROXY}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg HTTPS_PROXY=${HTTPS_PROXY}"
fi
if [ "${NO_PROXY}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg NO_PROXY=\"${NO_PROXY}\""
fi
if [ "${http_proxy}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg http_proxy=${http_proxy}"
fi
if [ "${https_proxy}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg https_proxy=${https_proxy}"
fi
if [ "${no_proxy}" != "" ]; then
PROXY_PARAM="${PROXY_PARAM} --build-arg no_proxy=\"${no_proxy}\""
fi
BUILD_ARGS="${BUILD_ARGS} ${PROXY_PARAM}"
}
generateProxyArgs
echo -e "BUILD_ARGS=> \n ${BUILD_ARGS}"
echo
###################################################
#### ---- Build Container ----
###################################################
cd ${BUILD_CONTEXT}
set -x
sudo docker build ${REMOVE_CACHE_OPTION} -t ${imageTag} \
${BUILD_ARGS} \
${options} \
-f $(basename ${DOCKERFILE}) .
set +x
cd -
echo "----> Shell into the Container in interactive mode: "
echo " docker exec -it --name <some-name> /bin/bash"
echo "e.g."
echo " docker run --name "my-$(basename $imageTag)" /bin/bash "
echo "----> Run: "
echo " docker run --name <some-name> -it ${imageTag} /bin/bash"
echo "e.g."
echo " docker run --name "my-$(basename $imageTag)" ${imageTag} "
echo "----> Run in interactive mode: "
echo " docker run -it --name <some-name> ${imageTag} /bin/bash"
echo "e.g."
echo " docker run -it --name "my-$(basename $imageTag)" -it ${imageTag} "
echo "----> Build Docker Images again: "
echo "To build again: (there is a dot at the end of the command!)"
echo " docker build -t ${imageTag} . "
echo
docker images |grep "$imageTag"
|
package com.platform.service;
import com.platform.entity.RepairEntity;
import java.util.List;
import java.util.Map;
/**
* Service接口
*
* @author lipengjun
* @email <EMAIL>
* @date 2018-11-21 09:25:22
*/
public interface RepairService {
/**
* 根据主键查询实体
*
* @param id 主键
* @return 实体
*/
RepairEntity queryObject(Integer id);
/**
* 分页查询
*
* @param map 参数
* @return list
*/
List<RepairEntity> queryList(Map<String, Object> map);
/**
* 分页统计总数
*
* @param map 参数
* @return 总数
*/
int queryTotal(Map<String, Object> map);
/**
* 保存实体
*
* @param repair 实体
* @return 保存条数
*/
int save(RepairEntity repair);
/**
* 根据主键更新实体
*
* @param repair 实体
* @return 更新条数
*/
int update(RepairEntity repair);
/**
* 根据主键删除
*
* @param id
* @return 删除条数
*/
int delete(Integer id);
/**
* 根据主键批量删除
*
* @param ids
* @return 删除条数
*/
int deleteBatch(Integer[] ids);
}
|
<gh_stars>0
// angular
import { Title, DOCUMENT } from '@angular/platform-browser';
import { Inject, Injectable } from '@angular/core';
import { Router, NavigationEnd, ActivatedRoute } from '@angular/router';
// libs
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/map';
// module
import { PageTitlePositioning } from './models/page-title-positioning';
import { MetadataSettings } from './models/metadata-settings';
export abstract class MetadataLoader {
abstract getSettings(): MetadataSettings;
}
export class MetadataStaticLoader implements MetadataLoader {
constructor(private metadataSettings: MetadataSettings = {
pageTitlePositioning: PageTitlePositioning.PrependPageTitle,
defaults: {}
}) {}
getSettings(): MetadataSettings {
return this.metadataSettings;
}
}
@Injectable()
export class MetadataService {
private metadataSettings: any;
private isMetadataSet: any;
constructor(private router: Router,
@Inject(DOCUMENT) private document: any,
private titleService: Title,
private activatedRoute: ActivatedRoute,
public loader: MetadataLoader) {
this.metadataSettings = loader.getSettings();
this.isMetadataSet = {};
this.router.events
.filter(event => (event instanceof NavigationEnd))
.subscribe((routeData: any) => {
let route = this.activatedRoute;
while (route.children.length > 0) {
route = route.firstChild;
if (!!route.snapshot.routeConfig.data) {
const metadata = route.snapshot.routeConfig.data['metadata'];
if (!!metadata)
this.updateMetadata(metadata, routeData.url);
}
}
});
}
setTitle(title: string, override = false): void {
const ogTitleElement = this.getOrCreateMetaTag('og:title');
switch (this.metadataSettings.pageTitlePositioning) {
case PageTitlePositioning.AppendPageTitle:
title = (!override
&& !!this.metadataSettings.pageTitleSeparator
&& !!this.metadataSettings.applicationName
? (this.metadataSettings.applicationName + this.metadataSettings.pageTitleSeparator)
: '')
+ (!!title ? title : (this.metadataSettings.defaults['title'] || ''));
break;
case PageTitlePositioning.PrependPageTitle:
title = (!!title ? title : (this.metadataSettings.defaults['title'] || ''))
+ (!override
&& !!this.metadataSettings.pageTitleSeparator
&& !!this.metadataSettings.applicationName
? (this.metadataSettings.pageTitleSeparator + this.metadataSettings.applicationName)
: '');
break;
default:
throw new Error(`Error: Invalid pageTitlePositioning specified [${this.metadataSettings.pageTitlePositioning}]!`);
}
if (!title)
console.warn('WARNING: No "page title" specified.');
ogTitleElement.setAttribute('content', title);
this.titleService.setTitle(title);
}
setTag(tag: string, value: string): void {
if (tag === 'title')
throw new Error(`Error: Attempt to set ${tag} through 'setTag': 'title' is a reserved tag name. `
+ `Please use 'MetadataService.setTitle' instead.`);
value = !!value
? value
: !!this.metadataSettings.defaults ? this.metadataSettings.defaults[tag] : '';
const tagElement = this.getOrCreateMetaTag(tag);
tagElement.setAttribute('content', tag === 'og:locale' ? value.replace(/-/g, '_') : value);
this.isMetadataSet[tag] = true;
if (tag === 'description') {
const ogDescriptionElement = this.getOrCreateMetaTag('og:description');
ogDescriptionElement.setAttribute('content', value);
} else if (tag === 'author') {
const ogAuthorElement = this.getOrCreateMetaTag('og:author');
ogAuthorElement.setAttribute('content', value);
} else if (tag === 'publisher') {
const ogPublisherElement = this.getOrCreateMetaTag('og:publisher');
ogPublisherElement.setAttribute('content', value);
} else if (tag === 'og:locale') {
const availableLocales = !!this.metadataSettings.defaults
? this.metadataSettings.defaults['og:locale:alternate']
: '';
this.updateLocales(value, availableLocales);
this.isMetadataSet['og:locale:alternate'] = true;
} else if (tag === 'og:locale:alternate') {
const ogLocaleElement = this.getOrCreateMetaTag('og:locale');
const currentLocale = ogLocaleElement.getAttribute('content');
this.updateLocales(currentLocale, value);
this.isMetadataSet['og:locale'] = true;
}
}
private createMetaTag(name: string): any {
const el = this.document.createElement('meta');
el.setAttribute(name.lastIndexOf('og:', 0) === 0 ? 'property' : 'name', name);
this.document.head.appendChild(el);
return el;
}
private getOrCreateMetaTag(name: string): any {
let selector = `meta[name="${name}"]`;
if (name.lastIndexOf('og:', 0) === 0)
selector = `meta[property="${name}"]`;
let el = this.document.querySelector(selector);
if (!el)
el = this.createMetaTag(name);
return el;
}
private updateLocales(currentLocale: string, availableLocales: any): void {
if (!currentLocale)
currentLocale = !!this.metadataSettings.defaults
? this.metadataSettings.defaults['og:locale']
: '';
const html = this.document.querySelector('html');
html.setAttribute('lang', currentLocale);
const selector = `meta[property="og:locale:alternate"]`;
let elements = this.document.querySelectorAll(selector);
// fixes "TypeError: Object doesn't support property or method 'forEach'" issue on IE11
elements = Array.prototype.slice.call(elements);
elements.forEach((el: any) => {
this.document.head.removeChild(el);
});
if (!!currentLocale && !!availableLocales) {
availableLocales.split(',')
.forEach((locale: string) => {
if (currentLocale !== locale) {
const el = this.createMetaTag('og:locale:alternate');
el.setAttribute('content', locale.replace(/-/g, '_'));
}
});
}
}
private updateMetadata(metadata: any, currentUrl: string): void {
if (metadata.disabled)
return;
this.setTitle(metadata.title, metadata.override);
Object.keys(metadata)
.forEach(key => {
let value = metadata[key];
if (key === 'title' || key === 'override')
return;
else if (key === 'og:locale')
value = value.replace(/-/g, '_');
else if (key === 'og:locale:alternate') {
const currentLocale = metadata['og:locale'];
this.updateLocales(currentLocale, metadata[key]);
return;
}
this.setTag(key, value);
});
if (!!this.metadataSettings.defaults)
Object.keys(this.metadataSettings.defaults)
.forEach(key => {
let value = this.metadataSettings.defaults[key];
if (key in this.isMetadataSet || key in metadata || key === 'title' || key === 'override')
return;
else if (key === 'og:locale')
value = value.replace(/-/g, '_');
else if (key === 'og:locale:alternate') {
const currentLocale = metadata['og:locale'];
this.updateLocales(currentLocale, this.metadataSettings.defaults[key]);
return;
}
this.setTag(key, value);
});
this.setTag('og:url', (this.metadataSettings.applicationUrl || '/') + currentUrl.replace(/\/$/g, ''));
}
}
|
<reponame>song28/reservoir
import Map from "ol/Map.js";
import View from "ol/View.js";
import {defaults} from "ol/control"
import { Tile as TileLayer, Vector as VectorLayer } from 'ol/layer.js';
import { XYZ, TileWMS, Vector as VectorSource, Cluster } from 'ol/source.js';
import { Circle as CircleStyle, Fill, Stroke, Style, Text, Icon } from 'ol/style.js';
import {Point} from "ol/geom"
import Feature from "ol/Feature"
import olMapServer from "./ol-mapServer"
import Overlay from 'ol/Overlay.js';
/**
* 说明:创建地图对象类
*/
class ZMap extends olMapServer {
constructor(option){
super()
this.fillStyle = new Fill({
color: 'rgba(255, 255, 255, 0.8)'
})
// 边界样式
this.strokeStyle = new Stroke({
color: '#ffcc33',
width: 2
})
// 形状原型样式
this.imageCircle = new CircleStyle({
radius: 17,
fill: new Fill({
color: '#ffcc33'
})
})
this.icon_style = {
anchor: [0.5, 1],
offset:[0,1],
//图标缩放比例
scale:0.8,
//透明度
opacity: 1,
//图标的url
src:"../skins/default/images/StationImages/yc_pp.png" //注意这里使用绝对路径传参
}
this.style = new Style({
//填充色
fill: this.fillStyle,
//边线颜色
stroke:this.strokeStyle ,
//形状
image: this.imageCircle
})
if(!option)
return
var target = option.target?option.target:'map'
var zoom = option.zoom?option.zoom:this.zoom
var center = option.center?option.center:this.center
var baseMap = option.baseMap?option.baseMap:true
var olMap = new Map({
target: target,
view: new View({
projection: 'EPSG:4326',
center:center ,
zoom: zoom
}),
controls: defaults({
zoom: false,
rotate: false,
attribution: false
})
});
this.map = olMap
/**
*鼠标移动到要素上面的时候变成小手
*/
olMap.on('pointermove', function (e) {
var pixel =olMap.getEventPixel(e.originalEvent);
var hit =olMap.hasFeatureAtPixel(pixel);
olMap.getTargetElement().style.cursor = hit ? 'pointer' : '';
});
// if(baseMap)
// var vetorLayers = this.tiandituVetorMap(olMap)
// return olMap
}
/**
* 说明:添加点
* @param {*} option 参数
* points:对象,坐标点,
* {
* center:坐标信息,
imgUrl:图标地址,
title:标注文本信息标题头,
element:自定义提示框写html
* }
* layerId:图层名称
* label: 布尔值,是否显示标注文本信息
* name:显示内容的名称
* key:是每一个点含有的一个数值key值
* divPop:全自定pop,不使用模板
* 示例:
* {
points: [{
center: [114.6727, 35.5028],
imgUrl:"images/StationImages/maker/压力计.png",
name:"水质",
title:"123",
element:"<div>3333333</div>"
}],
layerId:"222",
label:true,
name:"性别",
key:"name",
}
*/
addPoint(option){
var that = this
if(option.points&&option.points.length<1)
return false
let overlayes= []
//创建一个点
let points = []
option.points.map(function (item,index) {
let point_ = new Feature({
geometry: new Point(item.center),
});
point_.data = item;
point_.layerId = option.layerId? option.layerId:""
let style_ = new Style({
//形状
image:new Icon({
offset:[0,1],
//图标缩放比例
scale:item.scale? item.scale:0.8,
//透明度
opacity: 1,
//图标的url
src:item.imgUrl? item.imgUrl:"images/StationImages/marker.png" //注意这里使用绝对路径传参
})
})
//设置点1的样式信息
point_.setStyle( style_);
points.push(point_)
// 显示标签
if(option.label){
var point_div = document.createElement('div');
if(option.divPop){
if(item.element){
point_div.innerHTML = item.element
}else{
if(option.name){
point_div.innerHTML = `<div class="point-info-header">${option.name}:${option.key?item[option.key]:""}</div>`
}else{
point_div.innerHTML = `<div class="point-info-header">${option.key?item[option.key]:""}</div>`
}
}
}else{
let html = `<div class="point-info-box">`
// 默认标签样式
if(item.element){
html +=`<div class="point-info-header">${item.title?item.title:"信息"}</div>
<div class="point-info-content">${item.element?item.element:""}</div>`
}else{
html += ` <div class="point-info-header">${item.title?item.title:"信息"}</div>`
if(option.name){
html += `<div class="point-info-content">${option.name}:${option.key?item[option.key]:""}</div>`
}else{
html += `<div class="point-info-content">${option.key?item[option.key]:""}</div>`
}
}
html += `</div>`
point_div.innerHTML = html
}
let pointOver = new Overlay({
position: item.center,//默认空
positioning: 'center-bottom',
element: point_div,//绑定上面添加的元素
offset: [0, 10]//图片偏移量
});
that.map.addOverlay(pointOver);
overlayes.push(pointOver)
}
})
//实例化一个矢量图层Vector作为绘制层
var source = new VectorSource({
features: points
});
//创建一个图层
var vector = new VectorLayer({
source: source
});
vector.layerId = option.layerId
//将绘制层添加到地图容器中
this.map.addLayer(vector);
vector.setZIndex(3);
return {
layer:vector,
overLayer:overlayes
} ;
}
/**
* 移除添加的点图层
* @param {*} layer
*/
removeLayer(layer){
if(layer&&layer.layer){
this.map.removeLayer(layer.layer)
}
if(layer&&layer.overLayer){
layer.overLayer.map((item,index)=>{
this.map.removeOverlay(item)
})
}
}
/**
* 添加闪烁标注信息
* @param {*} option
*/
addAnimatePoint(option){
let makers =[]
if(option.points&&option.points.length>0){
option.points.map((item,index)=>{
// 定义默认的图标
let icon = "iconjingbaoxinxi-"
var point_div = document.createElement('div');
if(item.icon)
icon = item.icon
let html = `<div class="commodity-sign-wrap" style="cursor: pointer;">
<div class="circle"><i class="iconfont ${icon} "></i></div>
<div class="circle_bottom animation "></div>
<div class="circle_bottom2 animation2 "></div>
</div> `
point_div.innerHTML = html
// 创建图层
let point = new Overlay({
position: item.center?item.center:[0,0],//默认空
positioning: 'center-bottom',
element: point_div,//绑定上面添加的元素
offset: [0, 0]//图片偏移量
});
// 和地图关联
this.map.addOverlay(point);
makers.push(point)
// 绑定点击事件
point_div.onclick=function(){
if(option.click) option.click(item)
}
})
}
return {
overLayer:makers
}
}
/**
* 重置地图
* @param {*} map
* @param {*} params
*/
restMap(params){
let view = this.map.getView()
if(params&¶ms.center)
view.setCenter(params.center)
if(params&¶ms.zoom)
view.setZoom(params.zoom)
}
}
export default ZMap
|
<filename>mvvmfx-validation/src/test/java/de/saxsys/mvvmfx/utils/validation/cssvisualizer/CssVisualizerViewModel.java
package de.saxsys.mvvmfx.utils.validation.cssvisualizer;
import de.saxsys.mvvmfx.ViewModel;
import de.saxsys.mvvmfx.utils.validation.FunctionBasedValidator;
import de.saxsys.mvvmfx.utils.validation.ValidationMessage;
import de.saxsys.mvvmfx.utils.validation.ValidationStatus;
import de.saxsys.mvvmfx.utils.validation.Validator;
import java.util.function.Function;
import java.util.regex.Pattern;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
public class CssVisualizerViewModel implements ViewModel {
private static final Pattern EMAIL_REGEX = Pattern
.compile("^$|[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,4}");
private StringProperty emailAddress = new SimpleStringProperty("");
private Validator validator = new FunctionBasedValidator<String>(emailAddress, input -> {
if (input == null || input.trim().isEmpty() || !EMAIL_REGEX.matcher(input).matches()) {
return ValidationMessage.error("Invalid EMail address");
} else {
return null;
}
});
public ValidationStatus getValidationStatus() {
return validator.getValidationStatus();
}
public StringProperty emailAddressProperty() {
return emailAddress;
}
}
|
# download Julia 1.5.3
JULIA_VERSION=1.5.3
echo "## downloading Julia $JULIA_VERSION..."
curl -OJ "https://julialang-s3.julialang.org/bin/linux/x64/1.5/julia-1.5.3-linux-x86_64.tar.gz"
echo "## unpacking Julia..."
tar -xzf julia-1.5.3-linux-x86_64.tar.gz
echo "## setting up scripts..."
export JULIA_DEPOT_PATH=$PWD/julia-1.5.3/depot
./julia-1.5.3/bin/julia -e '@show DEPOT_PATH; import Pkg; Pkg.add(path="."); using sarscov2primers;'
echo "## cleaning up..."
rm julia-1.5.3-linux-x86_64.tar.gz
|
#!/bin/bash
dir="$(dirname "$0")"
function d_start
{
echo "Jars Refresh: starting service"
"$dir/jars-refreshd.sh"
echo $! > /tmp/jars-refresh.pid
echo "PID: $(cat /tmp/jars-refresh.pid)"
}
function d_stop
{
echo "Jars Refresh: stopping service (PID: $(cat /tmp/jars-refresh.pid))"
kill $(cat /tmp/jars-refresh.pid)
rm /tmp/jars-refresh.pid
}
function d_status
{
ps -ef | grep jars-refreshd.sh | grep -v grep
echo "PID indication file $(cat /tmp/jars-refresh.pid 2>/dev/null)"
}
case "$1" in
start )
d_start
;;
Stop )
d_stop
;;
Reload )
d_stop
sleep 1
d_start
;;
Status )
d_status
;;
* )
echo "Usage: $ 0 {start | stop | reload | status}"
exit 1
;;
esac
exit 0
|
<filename>clicktests/test.stash.js<gh_stars>0
var helpers = require('./helpers');
var testsuite = require('./testsuite');
var Environment = require('./environment');
var webpage = require('webpage');
var page = webpage.create();
var suite = testsuite.newSuite('discard', page);
var environment;
var testRepoPath;
suite.test('Init', function(done) {
environment = new Environment(page, { port: 8461 });
environment.init(function(err) {
if (err) return done(err);
testRepoPath = environment.path + '/testrepo';
environment.createRepos([
{ bare: false, path: testRepoPath, initCommits: 1 }
], done);
});
});
suite.test('Open repo screen', function(done) {
page.open(environment.url + '/#/repository?path=' + encodeURIComponent(testRepoPath), function () {
helpers.waitForElementVisible(page, '.graph', function() {
setTimeout(done, 1000); // Let it finnish loading
});
});
});
suite.test('Should be possible to stash a file', function(done) {
environment.createTestFile(testRepoPath + '/testfile2.txt', function(err) {
if (err) return done(err);
helpers.waitForElementVisible(page, '[data-ta-container="staging-file"]', function() {
helpers.click(page, '[data-ta-clickable="stash-all"]');
// if stash is currently collapsed show it. (localStorage['showStash'] might already be 'true')
if (helpers.elementVisible(page, '[data-ta-clickable="stash-toggle"]')) {
helpers.click(page, '[data-ta-clickable="stash-toggle"]');
}
helpers.waitForElementVisible(page, '[data-ta-container="stash-stash"]', function() {
done();
});
});
});
});
suite.test('Should be possible to open stash diff', function(done) {
helpers.click(page, '[data-ta-clickable="stash-diff"]');
helpers.waitForElementVisible(page, '[data-ta-container="stash-diff"]', function() {
done();
});
});
suite.test('Should be possible to pop a stash', function(done) {
helpers.click(page, '[data-ta-clickable="stash-pop"]');
helpers.waitForElementVisible(page, '[data-ta-container="staging-file"]', function() {
done();
});
});
suite.test('Shutdown', function(done) {
environment.shutdown(done);
});
testsuite.runAllSuits();
|
module.exports = {
'encryption': '3e#h*R2)39#'
}
|
#!/bin/bash
# Define input parameters
SCRATCH_DIR="/path/to/scratch"
ARCHIVE_DIR="/path/to/archive"
INPUT_PARENT_DIR="/path/to/input"
KEEP_OR_DELETE="delete" # or "keep"
# Construct the output parent directory path
OUTPUT_PARENT_DIR="$SCRATCH_DIR/$(sed "s|$ARCHIVE_DIR||g"<<<"$INPUT_PARENT_DIR")"
# Create parent folders of the output directory if they do not exist
mkdir -p $OUTPUT_PARENT_DIR
# Iterate through the list of files and perform file transfer
for i in "${@:2}"; do
echo $i
if [[ "$KEEP_OR_DELETE" = delete ]]; then
rsync --remove-source-files -ah --info=progress2 $INPUT_PARENT_DIR/$i $OUTPUT_PARENT_DIR
else
rsync -ah --info=progress2 $INPUT_PARENT_DIR/$i $OUTPUT_PARENT_DIR
fi
done
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.