text stringlengths 1 1.05M |
|---|
#!/bin/bash
#
# The laradockctl command to view application logs.
set -Ceuo pipefail
local NAME='logs'
local DESCRIPTION='View application logs'
handle() {
# Laravel application logs
if file_exists_in_workspace storage/logs/laravel.log; then
docker-compose exec -u laradock workspace tail "$@" storage/logs/laravel.log
fi
}
|
function generateChatboxMarkup(messages) {
let markup = `<div class="message__chatbox-section padding-bottom">
<div class="container">
<div class="message__chatbox">
<div class="message__chatbox__body">
<ul class="reply-message-area">`;
messages.forEach(message => {
if (message.admin_id === 0) {
markup += `<li class="user-message">
<div class="message-content">${message.content}</div>
</li>`;
} else {
markup += `<li class="admin-message">
<div class="message-content">${message.content}</div>
</li>`;
}
});
markup += `</ul>
</div>
</div>
</div>
</div>`;
return markup;
}
const messages = [
{ admin_id: 0, content: "Hello there!" },
{ admin_id: 1, content: "Welcome to our website." },
{ admin_id: 0, content: "How can we assist you today?" }
];
console.log(generateChatboxMarkup(messages)); |
require 'spec_helper'
describe 'puppet_metrics_collector::pe_metric' do
let(:title) { 'test-service' }
let(:params) {
{metrics_port: 9000}
}
# This define has an undeclared dependency on the main
# puppet_metrics_collector class.
let(:pre_condition) { 'include puppet_metrics_collector' }
it 'compiles with minimal parameters set' do
expect(subject).to compile
end
describe 'remote metric collection' do
it 'is disabled by default due to CVE-2020-7943' do
expect(subject).to contain_file('/opt/puppetlabs/puppet-metrics-collector/config/test-service.yaml').with_content(/remote_metrics_enabled: false/)
end
context 'when the PE version is 2019.8.5 or newer' do
let(:facts) {
{pe_server_version: '2019.8.5'}
}
it 'is enabled by default' do
expect(subject).to contain_file('/opt/puppetlabs/puppet-metrics-collector/config/test-service.yaml').with_content(/remote_metrics_enabled: true/)
end
end
context 'when the PE version is 2019.8.4 or older' do
let(:facts) {
{pe_server_version: '2019.8.4'}
}
it 'is disabled by default' do
expect(subject).to contain_file('/opt/puppetlabs/puppet-metrics-collector/config/test-service.yaml').with_content(/remote_metrics_enabled: false/)
end
end
end
end
|
#!/bin/sh
# Replace the API and APP keys below
# with the ones for your account
api_key=<YOUR_API_KEY>
app_key=<YOUR_APP_KEY>
slo_id=<YOUR_SLO_ID>
# Get a SLO
curl -X GET \
-H "DD-API-KEY: ${api_key}" \
-H "DD-APPLICATION-KEY: ${app_key}" \
"https://api.datadoghq.com/api/v1/slo/${slo_id}"
|
#!/bin/sh -ex
setup-xorg-base \
xorg-server xf86-video-vesa xf86-input-evdev xf86-input-mouse \
xf86-input-keyboard udev xinit
if [ -z "$DWM" ]; then DWM=dwm; fi
if [ -z "$ST" ]; then ST=st; fi
if [ -z "$DMENU" ]; then DMENU=dmenu; fi
if [ -z $"XRANDR" ]; then XRANDR=xrandr; fi
if [ -z $"XRDP" ]; then XRDP=xrdp; fi
apk add $DWM $ST $DMENU $XRANDR $XRDP
cat << EOF > /home/$VAGRANT_USERNAME/.xinitrc
#!/bin/sh
st &
exec dwm
EOF
chmod +x /home/$VAGRANT_USERNAME/.xinitrc
chown $VAGRANT_USERNAME /home/$VAGRANT_USERNAME/.xinitrc
|
<reponame>josebright/tulip_foundation_website
import { Modal } from 'react-bootstrap';
import React, { useState } from 'react';
import styled from 'styled-components';
//import { Project } from '../../Redux/constant';
import { Link } from 'react-router-dom';
import CountDown from '../CountDown/EventTimer';
const ProjectStyled = styled.div`
/* border: 2px solid blue; */
margin: 30px 0;
padding: 10px 0;
`;
const ProjectImg = styled.div`
width: 100%;
height: 400px;
overflow: hidden;
img {
width: 100%;
height: 100%;
object-fit: cover;
}
@media (max-width: 360px) {
margin: 10px 0;
}
`;
const ProjectContent = styled.div`
.date {
opacity: 0.5;
}
button {
background: transparent;
color: orange;
border-bottom: 1px solid;
transition: 0.3s;
}
button:hover {
transform: scale(1.2);
}
@media (max-width: 360px) {
h3 {
font-size: 20px;
}
}
`;
export default function Projects({
image,
order,
title,
date,
desc,
modal,
link,
companyname,
}) {
const [show, setShow] = useState(false);
const handleClose = () => setShow(false);
const handleShow = () => setShow(true);
const handleModal = () => {
if (show) {
return (
<Modal show={show} onHide={handleClose} size='lg'>
<Modal.Header image={image} closeButton>
{/* <img src={image} alt='' /> */}
<img src={image} alt='' className='img-fluid' />
<Modal.Title>
<div className='row'>
<div className='col-md-12'>
<h2>{title}</h2>
</div>
</div>
{date ? (
<CountDown
events={date
.replace('rd', '')
.replace('th', '')
.replace('st', '')
.replace('nd', '')}
/>
) : null}
{/* str.replace(/#|_/g,''); */}
</Modal.Title>
</Modal.Header>
<Modal.Body>
<div className='row'>
<div className='col-12'>
<p>{modal.article}</p>
</div>
</div>
</Modal.Body>
<Modal.Footer>
<div className='row'>
{!link ? (
<>
{' '}
<div className='col-md-6'>
<Link
to='/volunteer'
className='btn1 btn-d'
onClick={handleClose}
>
Volunteer
</Link>
</div>
<div className='col-md-6'>
<Link
to='/donation'
className='btn1 btn-d'
onClick={handleClose}
>
Donate
</Link>
</div>
</>
) : (
<div className='col-md-12'>
<button className='btn1 btn-d' onClick={handleClose}>
Close
</button>
</div>
)}
</div>
</Modal.Footer>
</Modal>
);
}
};
return (
<div className='container'>
<ProjectStyled>
<div className='row d-flex justify-content-start'>
{/* {console.log(date.replace('th', ''))} */}
<div
className={
order ? ` col-md-6 order-md-0 order-lg-3 ${order}` : 'col-md-6 '
}
>
<ProjectImg>
<img src={image} alt='' />
</ProjectImg>
</div>
<div className='col-md-6 '>
<ProjectContent>
<h3>{title}</h3>
{date ? (
<p className='date'>
<span>Program Date: </span>
{date ? date : 'Check back for the date'}
</p>
) : null}
{companyname ? (
<p className='companyname'>
{/* <span>By: </span> */}
<a
href={link}
rel='noreferrer'
target='_blank'
onClick={handleClose}
style={{ color: '#000 !' }}
>
{companyname}
</a>
</p>
) : null}
<p>{desc}</p>
<button className='read-more' onClick={handleShow}>
Read More
</button>
</ProjectContent>
</div>
</div>
</ProjectStyled>
{handleModal()}
</div>
);
}
|
import requests
from PIL import Image
from io import BytesIO
import os
# Function to apply box shadow effect to an image
def apply_box_shadow(image_path, output_path):
img = Image.open(image_path)
img_with_shadow = Image.new("RGBA", img.size)
img_with_shadow.paste(img, (0, 0), img)
shadow = Image.new("RGBA", img.size, (0, 0, 0, 0))
for x in range(-10, 11, 5):
for y in range(-10, 11, 5):
shadow.paste((204, 204, 204, 255), (x, y, x + img.size[0], y + img.size[1]), img)
img_with_shadow = Image.alpha_composite(img_with_shadow, shadow)
img_with_shadow.save(output_path)
# Function to process image URLs and apply box shadow effect
def process_image_urls(input_file, output_directory):
with open(input_file, 'r') as file:
image_urls = file.readlines()
for url in image_urls:
url = url.strip()
try:
response = requests.get(url)
if response.status_code == 200:
image = Image.open(BytesIO(response.content))
output_path = os.path.join(output_directory, os.path.basename(url))
apply_box_shadow(image, output_path)
else:
print(f"Failed to download image from {url}")
except Exception as e:
print(f"Error processing {url}: {e}")
# Example usage
input_file_path = 'image_urls.txt'
output_directory_path = 'modified_images'
process_image_urls(input_file_path, output_directory_path) |
<reponame>decosterkevin/foodtrack-back<filename>core/migrations/0002_remove_product_exploitation.py
# Generated by Django 2.2.4 on 2019-08-16 16:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='exploitation',
),
]
|
PART_NAME=firmware
platform_machine() {
grep "machine" /proc/cpuinfo | sed "s/.*:[ \t]*//" | sed "s/-.*//g"
}
platform_expected_image() {
local machine=$(platform_machine)
case "$machine" in
# this is from dts
"GL") [ -e /dev/ubi0 ] && echo "siwifi-1688a-nand" || echo "siwifi-1688a"; return;;
"sf16a18") echo "siwifi-1688a"; return;;
"sf19a28") echo "siwifi-1688a"; return;;
esac
}
siwifixx_identify() {
local magic
magic=$(get_magic_long "$1")
case "$magic" in
"27051956")
echo "a18"
return
;;
"xxxxxxxx")
echo "w18"
return
;;
esac
echo "Sysupgrade is unknown"
}
platform_check_image() {
[ "$#" -gt 1 ] && return 1
local file_type=$(siwifixx_identify "$1")
local magic
local error=0
case "$file_type" in
"a18")
local dev_siwifi_id=$(platform_expected_image)
#echo "Found A18 image with device siwifi_id $dev_siwifi_id"
if [ "$dev_siwifi_id" != "siwifi-1688a" ]; then
echo "Invalid image type."
error=1
fi
;;
"w18")
local dev_siwifi_id=$(platform_expected_image)
if [ "$dev_siwifi_id" != "siwifi-xxxxx" ]; then
echo "Invalid image type."
error=1
fi
;;
*)
local dev_siwifi_id=$(platform_expected_image)
if [ "$dev_siwifi_id" != "siwifi-1688a-nand" ]; then
echo "Invalid image type. Please use only .trx files"
error=1
fi
;;
esac
return $error
}
platform_do_upgrade() {
local siwifi_id=$(platform_expected_image)
local cmd=""
case "$siwifi_id" in
"siwifi-1688a-nand")
cmd=$ARGV
nand_do_upgrade "$cmd"
;;
"siwifi-1688a" |\
"siwifi-1688b")
cmd=$ARGV
default_do_upgrade "$cmd"
;;
*)
default_do_upgrade "$cmd"
;;
esac
}
disable_watchdog() {
killall watchdog
( ps | grep -v 'grep' | grep '/dev/watchdog' ) && {
echo 'Could not disable watchdog'
return 1
}
}
|
set -u
set -e
debootstrap --components=main,universe,multiverse --include=vim,build-essential,git,redis-server,lua5.1,postgresql,libpq-dev,python-dev,python3-dev,memcached,mongodb,libperl-dev,ruby2.0,ruby2.0-dev,wget,language-pack-en,libcurl4-openssl-dev,mysql-server,libyajl-dev,beanstalkd,ssh,rsync,libluajit-5.1-dev,curl,ipython,liblocal-lib-perl,python-virtualenv,python-pip,libpcre3-dev,libjansson-dev,quota,gawk,libreadline6-dev,libyaml-dev,libsqlite3-dev,sqlite3,autoconf,libgdbm-dev,libncurses5-dev,automake,libtool,bison,libffi-dev,libphp5-embed,php5-memcached,php5-memcache,php5-json,php5-mysql,php5-gd,php5-pgsql,php5-dev,libxml2-dev,libdb-dev,libbz2-dev,libjpeg-dev,libpng12-dev,ruby-rack,postgresql-contrib,postgis,libxslt-dev,libsphinxclient-dev,sphinxsearch,libmysqlclient-dev,imagemagick,libgraphicsmagick1-dev,libgraphicsmagick++1-dev,libmagick++-dev,libmagickcore-dev,libmagickwand-dev,libreoffice,tesseract-ocr,tesseract-ocr-ita,pdftk,wkhtmltopdf,graphicsmagick,poppler-utils,ghostscript,language-pack-it,language-pack-de,language-pack-es,language-pack-pt,language-pack-pl,nullmailer,nodejs,nano,htop,emacs,mercurial,screen,apache2-utils,unzip,erlang-nox,libdatetime-perl,libmemcached-dev,libapache2-svn,libapache2-mod-gnutls,apache2-mpm-prefork,libapache2-mod-xsendfile,libapache2-mod-php5,php-pear,db5.1-util,libcap2-bin,libcap2-dev,libode-dev,gettext saucy /distros/saucy
chroot /distros/saucy /bin/bash -x <<'EOF'
set -u
set -e
echo "exit 101" > /usr/sbin/policy-rc.d
chmod 755 /usr/sbin/policy-rc.d
mkdir /.old_root
mkdir /containers
mkdir -p /opt/unbit/uwsgi/plugins
rm /etc/hosts /etc/hostname
ln -s /run/resolvconf/hostname /etc/hostname
ln -s /run/resolvconf/hosts /etc/hosts
cd /root
git clone https://github.com/unbit/nss-unbit
cd nss-unbit
make
EOF
cp nsswitch.conf /distros/saucy/etc/nsswitch.conf
cp shortcuts.ini /distros/saucy/opt/unbit/uwsgi/shortcuts.ini
|
#!/bin/sh -xe
if [ ! -f /etc/ssh/ssh_host_rsa_key ]; then
echo "No RSA Host Key found - generating."
ssh-keygen -f /etc/ssh/ssh_host_rsa_key -N '' -t rsa
fi
if [ ! -f /etc/ssh/ssh_host_ecdsa_key ]; then
echo "No ECDSA Host Key found - generating."
ssh-keygen -f /etc/ssh/ssh_host_ecdsa_key -N '' -t ecdsa
fi
if [ ! -f /etc/ssh/ssh_host_ed25519_key ]; then
echo "No ED25519 Host Key found - generating."
ssh-keygen -f /etc/ssh/ssh_host_ed25519_key -N '' -t ed25519
fi
/usr/sbin/sshd
tail -f /dev/null
|
var _layer_test_result_8hpp =
[
[ "LayerTestResult", "struct_layer_test_result.xhtml", "struct_layer_test_result" ],
[ "GetTensorShapeAsArray", "_layer_test_result_8hpp.xhtml#ae0f8e7277a6e4c91446af326de42b811", null ]
]; |
class Genotype < ApplicationRecord
belongs_to :gene
has_many :cards
has_many :users, :through => :cards
validates :allele1, length: {is: 1}
validates :allele2, length: {is: 1}
validates :allele1, :allele2, presence: true
validates :title, presence: true, uniqueness: true
def repute_gradient
case repute
when 1 then ["rgb(235,255,235)", "rgb(180,255,180)", "rgb(140,255,140)", "rgb(80,225,80)", "rgb(0,200,0)"]
when 2 then ["rgb(255,235,235)", "rgb(255,180,180)", "rgb(255,140,140)", "rgb(225,80,80)", "rgb(200,0,0)"]
end
end
def color
case magnitude
when 0 then repute_gradient[0]
when 0.1..1.5 then repute_gradient[1]
when 1.6..3.5 then repute_gradient[2]
when 3.6..5.9 then repute_gradient[3]
else repute_gradient[4]
end
end
def rep_text
case repute
when 1 then "+"
when 2 then "-"
when 0 then "~"
end
end
def page_text
result = page_content
link_format = /(?<=\[\[)(.*?)(?=\]\])/
links = result.match(link_format) { |m| m.captures }
unless links.nil?
links.each do |l|
result.gsub!(l, make_link(l))
end
result.gsub! "[[", " "
result.gsub! "]]", " "
end
result += " " + read_more
result.strip.delete("\t\r\n").html_safe
end
def make_link(text, page=nil)
page = text.gsub " ", "_" if page.nil?
' <a href="' + "http://snpedia.com/index.php/#{page}" + '" target="_blank"' + "> #{' ' + text + ' '} </a> "
end
def read_more
if page_content.empty?
make_link("Read about the Gene", gene.title)
else
make_link(" ...read more", title)
end
end
end
|
import assert from 'assert';
import { NowRequest, NowResponse } from '@now/node';
import Hashids from 'hashids/cjs';
import _ from 'lodash/fp';
import octokit from '../common/octokit';
import { getConfig, generatePost } from '../common/utils';
const { owner, repo, site } = getConfig();
const hashids = new Hashids(owner, 8, '0123456789abcedf');
const query = `
query ($owner: String!, $repo: String!, $number: Int!) {
repository(owner: $owner, name: $repo) {
issue(number: $number) {
id
number
title
body
labels(first: 5) {
nodes {
name
}
}
createdAt
updatedAt
}
}
}
`;
export default async (req: NowRequest, res: NowResponse) => {
const id = _.toString(req.query.id);
assert(id, 'id is required');
const issueNumber = _.toNumber(hashids.decodeHex(id));
const data = await octokit(query, {
owner,
repo,
number: issueNumber,
});
const { issue } = data.repository;
res.status(200).json(generatePost(issue));
};
|
<gh_stars>0
/** @module event/MouseInputManager */
import { initFromConfig } from "/scripts/util/config.js"
import Vec2 from "/scripts/geom/Vec2.js"
export default class MouseInputManager {
static defaultPreventContextMenuDefault = true
static defaultPreventWheelDefault = true
#target
#pressed
#scroll
#move
#onMouseDown
#onMouseUp
#onFocusOut
#onContextMenu
#onWheel
#onMouseMove
constructor(target = window, config = {}) {
initFromConfig(this, config, MouseInputManager, "preventContextMenuDefault", "preventWheelDefault")
this.#target = target
this.#pressed = new Set()
this.#scroll = Vec2.zero()
this.#move = Vec2.zero()
this.#onMouseDown = e => this.#pressed.add(`Mouse${e.button}`)
this.#onMouseUp = e => this.#pressed.delete(`Mouse${e.button}`)
this.#onFocusOut = e => this.#pressed.clear()
this.#onContextMenu = e => {
if (this.preventContextMenuDefault)
e.preventDefault()
}
this.#onWheel = e => {
if (this.preventWheelDefault)
e.preventDefault()
this.#scroll.assign(e.wheelDeltaX, e.wheelDeltaY)
}
this.#onMouseMove = e => {
this.#move.assign(e.movementX, e.movementY)
}
target.addEventListener("mousedown", this.#onMouseDown )
target.addEventListener("mouseup", this.#onMouseUp )
target.addEventListener("focusout", this.#onFocusOut )
target.addEventListener("contextmenu", this.#onContextMenu, { passive: false })
target.addEventListener("wheel", this.#onWheel, { passive: false })
target.addEventListener("mousemove", this.#onMouseMove )
}
disable() {
this.target.removeEventListener("mousedown", this.#onMouseDown )
this.target.removeEventListener("mouseup", this.#onMouseUp )
this.target.removeEventListener("focusout", this.#onFocusOut )
this.target.removeEventListener("contextmenu", this.#onContextMenu)
this.target.removeEventListener("wheel", this.#onWheel )
this.target.removeEventListener("mousemove", this.#onMouseMove )
return this
}
isPressed(code) {
const syns = {
MouseLeft: "Mouse0",
MouseMiddle: "Mouse1",
MouseRight: "Mouse2",
MousePrevious: "Mouse3",
MouseNext: "Mouse4"
}
code = syns[code] ?? code
return this.#pressed.has(code)
}
move() {
return this.#move.clone()
}
scroll() {
return this.#scroll.clone()
}
reset() {
this.#scroll.assign(0)
this.#move.assign(0)
return this
}
get target() {
return this.#target
}
}
|
#!/bin/bash
##------------------------------------------------##
# Author: Mark Nguyen
##------------------------------------------------##
# This script takes a list of date-time values and
# convert each to epoc equivalent. It then
# prints out the longest length of time between
# any two date-time in the list.
##------------------------------------------------##
# File name: datetime_epoc.sh
#
# Usage: $0 [infile]
# infile should contain a list of date-time values
#
##------------------------------------------------##
#-----------------------function show_usage---------------------#
# This shows the program usage when invalid input args occured. #
#---------------------------------------------------------------#
function show_usage()
{
cat << EOF
This script takes an input text file containing date-time values
Usage: $0 [in_file]
Options:
-h Show this message.
e.g. $0 ./datetime.dat
EOF
}
export -f show_usage
#-----------------------function find_longest_time---------------------#
# Note:
# bash --date and -d option expects the date in US or ISO8601 format,
# i.e. mm/dd/yyyy or yyyy-mm-dd, not in UK, EU, or any other format.
#----------------------------------------------------------------------#
function find_longest_time()
{
local infile=$1
if test -f $infile; then
# read all line in file into an array
IFS=$'\n' read -d '' -r -a std_dt < $infile
# Explicitly report array content.
let idx=0
let MAX=0
let MIN=$((X=2**32))
if [[ "${#std_dt[@]}" -gt 0 ]]; then
#-- process each line --#
for line in "${std_dt[@]}" ; do
# ignore comment line
[[ "$line" =~ ^#.*$ ]] && continue
# convert to epoc value
epoc_dt=$(date --date "${line}" +%s)
# find max value
if [ ${epoc_dt} -gt $MAX ] ; then
MAX=${epoc_dt}
fi
# find min value
if [ ${epoc_dt} -lt $MIN ] ; then
MIN=${epoc_dt}
fi
done
# calculate long length of time between for two set of date-time
DIFF=$(( $MAX - $MIN ))
echo "The longest distance past (time length) is ${DIFF} seconds; starting from $(date -d @$MAX) and going back to $(date -d @$MIN)."
fi
else
echo "Input not found"
fi
}
export -f find_longest_time
#-- main program --#
# Checking input parameters
if [ $# -lt 1 ]; then
show_usage
exit 1;
fi
find_longest_time $1
exit 0;
'
Run-time output:
===============
markn@raspberrypi3:~/devel/py-src/DailyCodingChallenge/bash_scripts $ bash datetime_epoc.sh ./datetime.dat
The longest distance past (time length) is 751565047 seconds; starting from Sat 1 Jan 21:51:09 PST 2022 and going back to Tue 10 Mar 05:47:02 PST 1998.
'
|
<filename>DiscountApp.java
/*
* @author <NAME>
* 11/05/2017
* DiscountApp.java
*/
import javax.swing.JOptionPane; //Imports GUI input/output window
//Class Name
public class DiscountApp{
//Main method
public static void main (String args[]){
//Users Inputs
String member="";
String time="";
int cost=0;
double discount=0.0;
//Declare Objects
Discount myDiscount=new Discount();
//Input - Welcome
JOptionPane.showMessageDialog(null, "Welcome to your local Market. Please enter iwhether or not, you are a member; followed by the time of day; and finally the total cost of your shopping. We will then calculate your discount!");
//Asking of the questions and storing answers
member=JOptionPane.showInputDialog(null, "Are you a Market member? Please answer 'Yes' or 'No'.");
//Sends to instan class
myDiscount.setMember(member);
time=JOptionPane.showInputDialog(null, "What time of day is it? Please answer 'Morning' or 'Evening'.");
myDiscount.setTime(time);
cost=Integer.parseInt(JOptionPane.showInputDialog(null, "Please enter the total cost of your shopping."));
myDiscount.setCost(cost);
//Compute the process
myDiscount.compute();
//Output
discount=myDiscount.getDiscount();
JOptionPane.showMessageDialog(null, " Your total discount for the cost of your shopping is " +discount);
}
}
|
import IndexCardBox from './IndexCardBox'
export default IndexCardBox |
from RFEM.initModel import *
from tkinter import *
from tkinter import ttk, Entry
def window(mainFunc):
color = '#F2F3F4'
# Creating tkinter window
win = Tk()
win.resizable(False, False)
win.title('Power of automation with RFEM6')
win.geometry("520x160")
win.configure(bg=color)
win.grid_columnconfigure(index=0, minsize=140)
win.grid_columnconfigure(index=1, minsize=80)
win.grid_columnconfigure(index=2, minsize=80)
win.grid_columnconfigure(index=3, minsize=70)
win.grid_columnconfigure(index=4, minsize=139)
# Separators
y = 3
rh = 0.76
ttk.Separator(win, orient=VERTICAL).place(x=140, y=y, relheight=rh)
ttk.Separator(win, orient=VERTICAL).place(x=220, y=y, relheight=rh)
ttk.Separator(win, orient=VERTICAL).place(x=295, y=y, relheight=rh)
ttk.Separator(win, orient=VERTICAL).place(x=400, y=y, relheight=rh)
Label(text="Params Type", justify=CENTER, font="Segoe 9 bold", bg=color).grid(row=0, column=0)
Label(text="Data Type", justify=CENTER, font="Segoe 9 bold", bg=color).grid(row=0, column=1)
Label(text="Symbol", justify=CENTER, font="Segoe 9 bold", bg=color).grid(row=0, column=2)
Label(text="Units", justify=CENTER, font="Segoe 9 bold", bg=color).grid(row=0, column=3)
Label(text="Magnitude", justify=CENTER, font="Segoe 9 bold", bg=color).grid(row=0, column=4)
Label(text="hall width", bg=color).grid(row=1, column=0)
Label(text="hall height", bg=color).grid(row=2, column=0)
Label(text="hall height", bg=color).grid(row=3, column=0)
Label(text="frame spacing", bg=color).grid(row=4, column=0)
Label(text="number of frames", bg=color).grid(row=5, column=0)
Label(text="float", bg=color).grid(row=1, column=1)
Label(text="float", bg=color).grid(row=2, column=1)
Label(text="float", bg=color).grid(row=3, column=1)
Label(text="float", bg=color).grid(row=4, column=1)
Label(text="integer", bg=color).grid(row=5, column=1)
Label(text="L", bg=color).grid(row=1, column=2)
Label(text="h_o", bg=color).grid(row=2, column=2)
Label(text="h_m", bg=color).grid(row=3, column=2)
Label(text="f_s", bg=color).grid(row=4, column=2)
Label(text="n", bg=color).grid(row=5, column=2)
Label(text="meters", bg=color).grid(row=1, column=3)
Label(text="meters", bg=color).grid(row=2, column=3)
Label(text="meters", bg=color).grid(row=3, column=3)
Label(text="meters", bg=color).grid(row=4, column=3)
Label(text="-", bg=color).grid(row=5, column=3)
def validateAll(val): # 1 mandatory argument, not used
try:
float(e1.get())
float(e2.get())
float(e3.get())
float(e4.get())
int(e5.get())
button1['state']="normal"
return 1
except:
button1['state']="disabled"
print("disabled")
return 0
# Setting entry points
e1 = Entry(relief=FLAT, justify=CENTER, bg=color)
e1.grid(row=1, column=4)
e1.insert(INSERT, 20.0)
e2 = Entry(relief=FLAT, justify=CENTER, bg=color)
e2.grid(row=2, column=4)
e2.insert(INSERT, 5.2)
e3 = Entry(relief=FLAT, justify=CENTER, bg=color)
e3.grid(row=3, column=4)
e3.insert(INSERT, 7.3)
e4 = Entry(relief=FLAT, justify=CENTER, bg=color)
e4.grid(row=4, column=4)
e4.insert(INSERT, 6.0)
e5 = Entry(relief=FLAT, justify=CENTER, bg=color)
e5.grid(row=5, column=4)
e5.insert(INSERT, 6)
def start(val):
mainFunc(float(e1.get()),float(e2.get()),float(e3.get()),int(e5.get()),float(e4.get()))
def close_window(val):
win.destroy()
# substitute for validatecommand and validation options of Entry (e1-e5)
e1.bind('<FocusOut>', validateAll)
e1.bind('<Key>', validateAll)
e2.bind('<FocusOut>', validateAll)
e2.bind('<Key>', validateAll)
e3.bind('<FocusOut>', validateAll)
e3.bind('<Key>', validateAll)
e4.bind('<FocusOut>', validateAll)
e4.bind('<Key>', validateAll)
e5.bind('<FocusOut>', validateAll)
e5.bind('<Key>', validateAll)
button1=Button(text='Run', anchor=CENTER, width=12, height=1, bg=color, state="normal") # width=16
button1.grid(row=6, column=4)
button1.bind('<ButtonRelease-1>', start)
button2=Button(text='Close', anchor=CENTER, width=12, height=1, bg=color, state="normal")
button2.grid(row=6, column=3)
button2.bind('<ButtonRelease-1>', close_window)
win.mainloop()
|
#!/usr/bin/env bash
# Copyright 2019 Kohl's Department Stores, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euxo pipefail
TAG_OWNER="gitopsconfig.eunomia.kohls.io/owner"
TAG_APPLIED="gitopsconfig.eunomia.kohls.io/applied"
# this is needed because we want the current namespace to be set as default if a namespace is not specified.
function setContext {
$kubectl config set-context current --namespace="$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace)"
$kubectl config use-context current
}
function kube {
$kubectl \
-s https://kubernetes.default.svc:443 \
--token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" \
--certificate-authority=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt \
"$@"
}
# addLabels OWNER TIMESTAMP - patches the YAML&JSON files in $MANIFEST_DIR,
# adding labels tracking the OWNER and TIMESTAMP. The labels are intended to be
# used later in function deleteByOldLabels.
function addLabels {
local owner="$1"
local timestamp="$2"
local tmpdir="$(mktemp -d)"
for file in $(find "$MANIFEST_DIR" -iregex '.*\.\(ya?ml\|json\)'); do
cat "$file" |
yq -y -s "map(select(.!=null)|setpath([\"metadata\",\"labels\",\"$TAG_OWNER\"]; \"$owner\"))|.[]" |
yq -y -s "map(select(.!=null)|setpath([\"metadata\",\"labels\",\"$TAG_APPLIED\"]; \"$timestamp\"))|.[]" \
> "$tmpdir/labeled"
# We must use a helper file (can't do this in single step), as the file would be truncated if we read & write from it in one pipeline
cat "$tmpdir/labeled" > "$file"
done
}
# deleteByOldLabels OWNER [TIMESTAMP] - deletes all kubernetes resources which have
# the OWNER label as provided [optional: but TIMESTAMP label different than provided].
function deleteByOldLabels {
local owner="$1"
local timestamp="${2:-}"
# NOTE: removing componentstatus because it shows up unintended in ownedKinds: https://github.com/kubernetes/kubectl/issues/151#issuecomment-562578617
local allKinds="$(kube api-resources --verbs=list -o name | egrep -iv '^componentstatus(es)?$' | paste -sd, -)"
local ownedKinds="$(kube get "$allKinds" --ignore-not-found \
-l "$TAG_OWNER==$owner" \
-o custom-columns=kind:.kind \
--no-headers=true |
sort -u |
paste -sd, -)"
if [ -z "$ownedKinds" ]; then
return
fi
local filter="${TAG_OWNER}==${owner}"
if [[ "${timestamp}" ]]; then
filter="${filter},${TAG_APPLIED}!=${timestamp}"
fi
kube delete --wait=false "${ownedKinds}" -l "${filter}"
}
function createUpdateResources {
local owner="$1"
local timestamp="$(date +%s)"
case "$CREATE_MODE" in
Apply)
addLabels "$owner" "$timestamp"
kube apply -R -f "$MANIFEST_DIR"
deleteByOldLabels "$owner" "$timestamp"
;;
Create)
kube create -R -f "$MANIFEST_DIR"
;;
Delete)
kube delete --wait=false -R -f "$MANIFEST_DIR"
;;
Patch)
kube patch -R -f "$MANIFEST_DIR"
;;
Replace)
kube replace -R -f "$MANIFEST_DIR"
;;
esac
}
if [ "$CREATE_MODE" == "None" ] || [ "$DELETE_MODE" == "None" ]; then
echo "CREATE_MODE and/or DELETE_MODE is set to None; This means that the template processor already applied the resources. Skipping the Manage Resources step."
exit 0
fi
echo "Managing Resources"
setContext
# NOTE: Kubernetes currently requires that first *and last* character of
# label values are alphanumerical - we're adding the "own" prefix & suffix to
# ensure that. Also, Kubernetes requires it to be <=63 chars long, so we're
# taking a MD5 hash of actual name (MD5 hash is 33 chars long).
# See: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#syntax-and-character-set
owner="own.$( echo "$NAMESPACE $GITOPSCONFIG_NAME" | md5sum | awk '{print$1}' ).own"
case "$ACTION" in
create) createUpdateResources "$owner";;
delete) deleteByOldLabels "$owner";;
esac
|
python convnet.py --load-file "$1" \
--multiview-test 0 --test-only 1 --logreg-name logprob --test-range 10
python convnet.py --load-file "$1" \
--multiview-test 1 --test-only 1 --logreg-name logprob --test-range 10
|
<filename>camunda-user-task-demo/src/test/java/de/frvabe/bpm/camunda/UserTaskTest.java
package de.frvabe.bpm.camunda;
import static org.camunda.bpm.engine.test.assertions.ProcessEngineAssertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.Date;
import java.util.Map;
import org.camunda.bpm.engine.RuntimeService;
import org.camunda.bpm.engine.TaskService;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.task.Task;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = {Main.class})
public class UserTaskTest {
@Autowired
RuntimeService runtimeService;
@Autowired
TaskService taskService;
/**
* Checks if an unblocking outbound event of a User Task gets fired on the follow up date.
*
* @throws InterruptedException
*/
@Test
public void timerWithTimerDate() throws InterruptedException {
// start a process; the follow up date of the Demo Task will be set to 3 seconds from now
ProcessInstance processInstance =
runtimeService.startProcessInstanceByKey("demoProcessWithTimerDate");
assertThat(processInstance).isNotNull();
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId())
.singleResult();
Map<String, Object> variables = runtimeService.getVariables(processInstance.getId());
assertTrue(variables.containsKey("demoTask.followUpDate"));
Map<String, Object> variablesLocal =
runtimeService.getVariablesLocal(task.getExecutionId());
assertFalse(variablesLocal.containsKey("demoTaskBoundaryEvent.fired"));
Thread.sleep(2500); // nothing should have changed
variables = runtimeService.getVariables(processInstance.getId());
assertTrue(variables.containsKey("demoTask.followUpDate"));
variablesLocal = runtimeService.getVariablesLocal(task.getExecutionId());
assertFalse(variablesLocal.containsKey("demoTaskBoundaryEvent.fired"));
Thread.sleep(7500); // timer is expected to have been fired after this sleep
variables = runtimeService.getVariables(processInstance.getId());
assertTrue(variables.containsKey("demoTask.followUpDate"));
variablesLocal = runtimeService.getVariablesLocal(task.getExecutionId());
assertTrue(variablesLocal.containsKey("demoTaskBoundaryEvent.fired"));
// cleanup after test
runtimeService.deleteProcessInstance(processInstance.getId(), "JUnit test");
}
/**
* Checks if a Timer with cycle detects the maturity of a User Task.
*
* @throws InterruptedException
*/
@Test
public void timerWithTimeCycle() throws InterruptedException {
// start a process; the follow up date of the Demo Task will be set to 3 seconds from now
ProcessInstance processInstance =
runtimeService.startProcessInstanceByKey("demoProcessWithTimerCycle");
assertThat(processInstance).isNotNull();
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId())
.singleResult();
assertNotNull(task.getFollowUpDate());
assertTrue(task.getFollowUpDate().after(new Date()));
Map<String, Object> variablesLocal =
runtimeService.getVariablesLocal(task.getExecutionId());
assertFalse(variablesLocal.containsKey("demoTaskBoundaryEvent.followUpDateReached"));
Thread.sleep(2000); // nothing should have changed
variablesLocal = runtimeService.getVariablesLocal(task.getExecutionId());
assertFalse(variablesLocal.containsKey("demoTaskBoundaryEvent.followUpDateReached"));
Thread.sleep(7500); // timer is expected to have been fired after this sleep
variablesLocal = runtimeService.getVariablesLocal(task.getExecutionId());
assertTrue(variablesLocal.containsKey("demoTaskBoundaryEvent.followUpDateReached"));
// cleanup after test
runtimeService.deleteProcessInstance(processInstance.getId(), "JUnit test");
}
/**
* Test to update the followUp date of a given user task instance. Also check of task scope
* variables are not changed.
*/
@Test
public void updateFollowUpDate() {
ProcessInstance processInstance =
runtimeService.startProcessInstanceByKey("demoProcessWithTimerCycle");
Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId())
.singleResult();
runtimeService.setVariableLocal(task.getExecutionId(), "foo", "bar");
String taskInstanceId = task.getId();
String taskExecutionId = task.getExecutionId();
// followUpDate is set in BPM diagram
assertNotNull(task.getFollowUpDate());
// now unset followUpDate
task.setFollowUpDate(null);
taskService.saveTask(task);
// request task
Task task1 = taskService.createTaskQuery().processInstanceId(processInstance.getId())
.singleResult();
// check task properties
assertEquals(taskInstanceId, task1.getId());
assertEquals(taskExecutionId, task1.getExecutionId());
assertNull(task.getFollowUpDate());
// check task scope variable
assertEquals("bar", runtimeService.getVariableLocal(task.getExecutionId(), "foo"));
// cleanup after test
runtimeService.deleteProcessInstance(processInstance.getId(), "JUnit test");
}
}
|
var foods = ['Apples', 'Bananas', 'Oranges']; 1
for (var i=0; i<foods.length; i++) { 2
if (foods[i] == 'Apples') { 3
alert(foods[i] + ' are my favorite!'); 4
} else {
alert(foods[i] + ' are okay.'); 5
}
} |
<gh_stars>100-1000
import { Span } from '@opentelemetry/api';
import { callHookOnlyOnRecordingSpan } from '../payload-collection/recording-span';
import {
AwsSdkInstrumentationConfig,
NormalizedRequest,
NormalizedResponse,
} from '@opentelemetry/instrumentation-aws-sdk';
import { DbExtendedAttribute, MessagingExtendedAttribute } from '../enums';
import { AutoInstrumentationOptions } from '../types';
const enum AttributeNames {
AWS_REQUEST_PARAMS = 'aws.request.params',
}
const whiteListParams: Record<string, string[]> = {
sqs: ['QueueUrl', 'DelaySeconds', 'MaxNumberOfMessages', 'WaitTimeSeconds'],
s3: ['Bucket', 'Key', 'ACL', 'ContentType', 'ResponseContentType'],
sns: ['TopicArn'],
kinesis: ['StreamName', 'PartitionKey'],
firehose: ['DeliveryStreamName'],
ebs: ['SnapshotId'],
ssm: ['Name'],
lambda: ['FunctionName'],
athena: ['WorkGroup', 'QueryString'],
sts: ['RoleArn'],
};
const getRequestWhitelistedParams = (serviceName: string, requestParams: Record<string, any>): Record<string, any> => {
const paramsToCapture: string[] = whiteListParams[serviceName];
if (!paramsToCapture || !requestParams) return;
return paramsToCapture.reduce((whiteListParams: Record<string, any>, currParamName: string) => {
const val = requestParams[currParamName];
if (val !== undefined) {
whiteListParams[currParamName] = val;
}
return whiteListParams;
}, {});
};
const addSqsPayload = (span: Span, request: NormalizedRequest) => {
let payload;
switch (request.commandName) {
case 'sendMessage': {
payload = request.commandInput?.MessageBody;
if (typeof payload !== 'string') return;
break;
}
case 'sendMessageBatch': {
let messagesPayload = request.commandInput?.Entries?.map((entry) => ({
msgId: entry.Id,
payload: entry.MessageBody,
}));
try {
payload = JSON.stringify(messagesPayload);
} catch {}
break;
}
}
if (payload === undefined) return;
span.setAttribute(MessagingExtendedAttribute.MESSAGING_PAYLOAD, payload);
};
const awsSdkRequestHook = (options: AutoInstrumentationOptions) => (span: Span, request: NormalizedRequest) => {
const paramsToAttach = getRequestWhitelistedParams(request.serviceName, request.commandInput);
if (paramsToAttach) {
try {
span.setAttribute(AttributeNames.AWS_REQUEST_PARAMS, JSON.stringify(paramsToAttach));
} catch {}
}
switch (request.serviceName) {
case 'sqs':
if (options.collectPayloads) {
addSqsPayload(span, request);
}
break;
}
};
const awsSdkResponseHook = (span: Span, response: NormalizedResponse) => {
if (response.request.serviceName === 'dynamodb') {
if (typeof response.data !== 'object') return;
span.setAttribute(DbExtendedAttribute.DB_RESPONSE, JSON.stringify(response.data));
}
};
interface SqsMessage {
Body?: string;
}
const sqsProcessCapturePayload = (span: Span, message: SqsMessage) => {
if (message.Body === undefined) return;
span.setAttribute(MessagingExtendedAttribute.MESSAGING_PAYLOAD, message.Body);
};
export const awsSdkInstrumentationConfig = (options: AutoInstrumentationOptions): AwsSdkInstrumentationConfig => ({
preRequestHook: callHookOnlyOnRecordingSpan(awsSdkRequestHook(options)),
responseHook: options.collectPayloads && callHookOnlyOnRecordingSpan(awsSdkResponseHook),
sqsProcessHook: options.collectPayloads && callHookOnlyOnRecordingSpan(sqsProcessCapturePayload),
suppressInternalInstrumentation: options.suppressInternalInstrumentation,
});
|
#!/bin/sh
# Detect on which platform you are to do multiplatform script
case `uname` in
Darwin)
echo "You are on a mac -> Darwin"
;;
Linux)
echo "You are on linux"
;;
esac
|
package org.hiro.input.keyboard.equipment;
import org.hiro.Pack;
import org.hiro.RingMethod;
import org.hiro.character.Player;
import org.hiro.input.keyboard.KeyboardCommand;
import org.hiro.things.ObjectType;
import org.hiro.things.Thing;
public class PutOnRingCommand implements KeyboardCommand {
@Override
public void execute(Player player) {
Thing obj = Pack.get_item("put on", ObjectType.RING);
RingMethod.ring_on(player, obj);
}
}
|
<reponame>andrewjohnston99/SouthfaceEquityEvaluator<filename>resources/js/app.js<gh_stars>1-10
/**
* Load Javascript dependencies
*/
require('./bootstrap');
require('flatpickr/dist/flatpickr');
|
#!/bin/bash
# Copyright 2019 The VKB Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e # fail on error
set -x # display commands
SCRIPT_DIR=`dirname "$BASH_SOURCE"`
source $SCRIPT_DIR/../scripts/linux/build.sh RELEASE gcc
|
/*
problem:
Seven different symbols (known)
I = 1
Iv = 4
V = 5
IX = 9
X = 10
XL = 40
L = 50
XC = 90
C = 100
CD = 400
D = 500
CM = 900
M = 1000
Make the number by combining largest possible symbols
Largest to smallest, left to right
questions:
are we only doing this for positive numbers? YES
can the string be invalid roman numeral? NO
can the string be empty? NO
we return the NUMBER from the roman numeral
roman numeral always fits in int? YES
can you have combinations that would equal another numeral? ex. IVI = 5? NO
test cases:
I = 1
IV = 4
XIV = 14
XXIV = 24
XXIII = 23
solutions:
loop through string, match against know symbol, add to int
improvements:
could build up an array of changes to improve debugging
Just print the array to see how solution was arrived
ordered array of romanNumeral structs
where romanNumeral is the string "CM" and value 901
loop over ordered array to find match, apply value
don't repeat j++ and continue, see if a 2 letter match was found, do in one place
analysis:
linear time
loops through array len(s) times
linear space
number of strings increases with length of strings
could reference chars or bytes directly to improve thsi
*/
func romanToInt(s string) int {
var i int
for j := 0; j < len(s); j++ {
if j < len(s) - 1 {
switch fmt.Sprintf("%c%c", s[j], s[j + 1]) {
case "CM":
i += 900
j++
continue
case "CD":
i += 400
j++
continue
case "XC":
i += 90
j++
continue
case "XL":
i += 40
j++
continue
case "IX":
i += 9
j++
continue
case "IV":
i += 4
j++
continue
}
}
switch s[j] {
case 'M':
i += 1000
case 'D':
i += 500
case 'C':
i += 100
case 'L':
i += 50
case 'X':
i += 10
case 'V':
i += 5
case 'I':
i += 1
}
}
return i
}
|
alias reload='source ~/.zshrc'
# tmux
alias tmuxa='tmux attach -t'
alias tmuxn='tmux new-session -s'
alias tmuxl='tmux ls'
# tmuxinator
alias mux='tmuxinator'
alias editmux='code ~/.tmuxinator'
# code
# ssh
alias sshdev=''
# misc
# from: https://remysharp.com/2018/08/23/cli-improved
alias cat='bat'
alias preview="fzf --preview 'bat --color \"always\" {}'"
alias help='tldr'
# Heads and tails :D
# From paulmillr/dotfiles
alias -g f2='| head -n 2'
alias -g f10='| head -n 10'
alias -g l10='| tail -n 10' |
#!/bin/bash
#
# Author: vencol,neucrack
#
####################################
# help info
function help()
{
echo "Usage:"
echo "use './build.sh PROJECTNAME' to build the project in ./PROJECTNAME "
echo " eg: ./build.sh app "
echo "use './build.sh demo PROJECTNAME' to build demo in ./demo/PROJECTNAME "
echo "use './build.sh clean PROJECTNAME' to clean the project PROJECTNAME build files"
echo "use './build.sh clean all' to clean all the project build files "
echo "use './build.sh ... release' to build release software "
echo " eg: './build.sh demo gpio release' "
}
if [[ "$1x" == "helpx" || "$1x" == "-hx" || "$1x" == "-helpx" || "$1x" == "--helpx" ]]; then
help
exit 0
fi
######################################
compileMode='debug'
paramNum=$#
if [[ "${!#}x" == "releasex" ]]; then
compileMode='release'
paramNum=$(($paramNum-1))
fi
####################################
# check lib files
CSDK_MEM_DEF_PATH=./platform/csdk
CSDK_LIB_PATH=./platform/csdk/$compileMode
memdef_file=$(ls $CSDK_MEM_DEF_PATH|grep -w 'memd.def')
elf_file=$(ls $CSDK_LIB_PATH|grep '.elf')
lib_file=$(ls $CSDK_LIB_PATH|grep '.lod')
if [[ "${memdef_file}aa" = "aa" || "${elf_file}aa" = "aa" || "${lib_file}aa" = "aa" ]]; then
echo "!!!!!!!!!!!!!!"
echo "NO LIB FILES"
echo "!!!!!!!!!!!!!!"
echo "please check platform/csdk folder, it can not be empty"
echo ""
echo "Plese download again from "
echo ""
echo " https://github.com/Ai-Thinker-Open/GPRS_C_SDK/releases"
echo ""
exit 1
fi
#####################################
start_time=`date +%s`
#where the cygwin install in unix path,example if windows path is G:\CSDTK\cygwin,cygwin path may be /cygdrive/g/CSDTK/cygwin
# CYGWIN_HOME=
# if [[ ! -d $CYGWIN_HOME ]]; then
# echo PATH $CYGWIN_HOME is not exist
# exit
# fi
#set the path
# export PATH=$CYGWIN_HOME/bin:$CYGWIN_HOME/crosscompiler/bin:$CYGWIN_HOME/cooltools:/bin:/usr/bin;
export PATH=/bin:/crosscompiler/bin:/cooltools:/bin:/usr/bin:$PATH;
# echo path:$PATH
export SOFT_WORKDIR=`pwd`
echo "param number:$paramNum"
echo "compileMode:$compileMode"
if [[ $paramNum -eq 1 ]]; then
export IS_PROJECT_DIR=$SOFT_WORKDIR/$1
if [[ ! -d $IS_PROJECT_DIR ]]; then
echo "project $1 error path:$IS_PROJECT_DIR";
exit
fi
if [[ "$1x" == "initx" ]]; then
sed -i '15d' Makefile
sed -i "15i\#" Makefile
else
sed -i '15d' Makefile
sed -i "15i\LOCAL_MODULE_DEPENDS += $1" Makefile
fi
export PROJ_NAME=$1
elif [[ $paramNum -eq 2 ]]; then
if [[ "$1x" == "cleanx" ]]; then
if [[ "$2x" == "allx" ]]; then
rm -rf $SOFT_WORKDIR/build
rm -rf $SOFT_WORKDIR/hex
else
rm -rf $SOFT_WORKDIR/build/$2
rm -rf $SOFT_WORKDIR/hex/$2
rm -f $SOFT_WORKDIR/build/$2_build.log
fi
echo "clear project $2 end";
exit
elif [[ "$1x" == "demox" ]]; then
export IS_PROJECT_DIR=$SOFT_WORKDIR/demo/$2
if [[ ! -d $IS_PROJECT_DIR ]]; then
echo "demo $2 error path $IS_PROJECT_DIR";
exit
fi
export PROJ_NAME=$2
# sed -i '5d' Makefile
# sed -i "5i\LOCAL_LIBS += platform/lib/libinit.a" Makefile
sed -i '15d' Makefile
sed -i "15i\LOCAL_MODULE_DEPENDS += demo/$2" Makefile
elif [[ "$1x" == "projectx" ]]; then
export IS_PROJECT_DIR=$SOFT_WORKDIR/project/$2
if [[ ! -d $IS_PROJECT_DIR ]]; then
echo "demo $2 error path $IS_PROJECT_DIR";
exit
fi
export PROJ_NAME=$2
# sed -i '5d' Makefile
# sed -i "5i\LOCAL_LIBS += platform/lib/libinit.a" Makefile
sed -i '15d' Makefile
sed -i "15i\LOCAL_MODULE_DEPENDS += project/$2" Makefile
fi
else
help
exit 0
fi
# if [[ ! -d target/$PROJ_NAME ]]; then
# cp -rf target/init target/$PROJ_NAME
# echo "user default for init-target";
# fi
#build path and log
LOG_FILE_PATH=$SOFT_WORKDIR/build
if [ ! -d ${LOG_FILE_PATH} ]; then
mkdir ${LOG_FILE_PATH}
fi
LOG_FILE=${LOG_FILE_PATH}/${PROJ_NAME}_build.log
echo "compile project $PROJ_NAME";
echo "compile path $IS_PROJECT_DIR";
MAKE_J_NUMBER=`cat /proc/cpuinfo | grep vendor_id | wc -l`
echo "core number:$MAKE_J_NUMBER"
rm -rf $SOFT_WORKDIR/hex/$PROJ_NAME
cd $SOFT_WORKDIR
if [ ${MAKE_J_NUMBER} -gt 1 ]; then
make -j${MAKE_J_NUMBER} CT_RELEASE=$compileMode 2>&1 | tee ${LOG_FILE}
else
make CT_RELEASE=$compileMode 2>&1 | tee ${LOG_FILE}
fi
rm -f $SOFT_WORKDIR/hex/${PROJ_NAME}_${compileMode}/*
rm -rf $SOFT_WORKDIR/hex/${PROJ_NAME}_${compileMode}
mkdir $SOFT_WORKDIR/hex/${PROJ_NAME}_${compileMode}
cp -f $SOFT_WORKDIR/hex/$PROJ_NAME/* $SOFT_WORKDIR/hex/${PROJ_NAME}_${compileMode}
rm -rf $SOFT_WORKDIR/hex/$PROJ_NAME
# if [[ "$1x" == "initx" ]]; then
# cp build/init/init/lib/libinit_*.a platform/lib/libinit.a
# fi
end_time=`date +%s`
time_distance=`expr ${end_time} - ${start_time}`
date_time_now=$(date +%F\ \ %H:%M:%S)
echo === Build Time: ${time_distance}s at ${date_time_now} === | tee -a ${LOG_FILE}
# print RAM and ROM info
if [[ $paramNum -eq 1 ]]; then
if [[ "$1aa" != "cleanaa" ]]; then
mapPathName=$1
else
exit 0
fi
else
mapPathName=$2
fi
MAP_FILE_PATH=./build/$mapPathName/$mapPathName.map
MEMD_DEF_PATH=./platform/csdk/memd.def
map_file=$(ls ./build/$mapPathName|grep '.map')
if [[ "${map_file}aa" = "aa" ]]; then
echo "!!!!!!!!!!!!!!!!!!!!"
echo " BUILD FAILED"
echo "!!!!!!!!!!!!!!!!!!!!"
exit 1
fi
ram_total=$(grep -n "USER_RAM_SIZE" $MEMD_DEF_PATH | awk '{print $3}')
rom_total=$(grep -n "USER_ROM_SIZE" $MEMD_DEF_PATH | awk '{print $3}')
rom_start=$(grep -n "__rom_start = ." $MAP_FILE_PATH | awk '{print $2}')
rom_rw_start=$(grep -n "__user_rw_lma = ." $MAP_FILE_PATH | awk '{print $2}')
ram_start=$(grep -n "__user_rw_start = ." $MAP_FILE_PATH | awk '{print $2}')
ram_rw_data_end=$(grep -n "__user_rw_end = ." $MAP_FILE_PATH | awk '{print $2}')
ram_end=$(grep -n "__user_bss_end = ." $MAP_FILE_PATH | awk '{print $2}')
# echo $ram_start $ram_end
ram_used=$(($ram_end-$ram_start))
ram_used_percent=$(awk 'BEGIN{printf "%.2f%\n",('$ram_used'/'$ram_total')*100}')
rw_data_size=$(($ram_rw_data_end-$ram_start))
rom_used=$(($rom_rw_start-$rom_start+$rw_data_size))
rom_used_percent=$(awk 'BEGIN{printf "%.2f%\n",('$rom_used'/'$rom_total')*100}')
echo ROM total: ${rom_total}\($((${rom_total}))\) Bytes, used: $rom_used Bytes \($rom_used_percent\)
echo RAM total: ${ram_total}\($((${ram_total}))\) Bytes, used: $ram_used Bytes \($ram_used_percent\)
exit |
import { CanActivate, ExecutionContext, Injectable, Logger, UnauthorizedException } from '@nestjs/common';
import { Observable } from 'rxjs';
import { LoggerMiddleware } from './middelware/logger.middleware';
import { Reflector } from '@nestjs/core';
@Injectable()
export class AuthGuard implements CanActivate {
constructor(private reflector: Reflector){}
canActivate(
context: ExecutionContext,
): boolean | Promise<boolean> | Observable<boolean> {
const roles = this.reflector.get<string[]>('roles', context.getHandler());
const req = context.switchToHttp().getRequest()
const user = req.body["role"]
console.log("AuthGurd user name :" ,req.body["username"] );
// console.log(user,"AuthGurd User");
if (!roles) {
console.log('There is no Role field');
return true;
}
else{
if (roles == user) {
console.log("You are admin");
return true
}
else{
console.log("is Not Auth Gard");
throw new UnauthorizedException()
}
}
}
}
|
/*
* Copyright 2016-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.jaxrs;
import java.io.File;
import java.io.Serializable;
import com.ning.http.client.AsyncHttpClient;
import org.apache.catalina.Context;
import org.apache.catalina.loader.WebappLoader;
import org.apache.catalina.startup.Tomcat;
import org.glowroot.instrumentation.test.harness.AppUnderTest;
import org.glowroot.instrumentation.test.harness.util.Ports;
public class InvokeJaxrsResourceInTomcat implements AppUnderTest {
@Override
public void executeApp(Serializable... args) throws Exception {
String webapp = (String) args[0];
String contextPath = (String) args[1];
String path = (String) args[2];
executeApp(webapp, contextPath, path);
}
private void executeApp(String webapp, String contextPath, String url) throws Exception {
int port = Ports.getAvailable();
Tomcat tomcat = new Tomcat();
tomcat.setBaseDir("target/tomcat");
tomcat.setPort(port);
Context context = tomcat.addWebapp(contextPath,
new File("src/test/resources/" + webapp).getAbsolutePath());
WebappLoader webappLoader =
new WebappLoader(InvokeJaxrsResourceInTomcat.class.getClassLoader());
context.setLoader(webappLoader);
tomcat.start();
AsyncHttpClient asyncHttpClient = new AsyncHttpClient();
int statusCode = asyncHttpClient.prepareGet("http://localhost:" + port + contextPath + url)
.execute().get().getStatusCode();
asyncHttpClient.close();
if (statusCode != 200) {
throw new IllegalStateException("Unexpected status code: " + statusCode);
}
tomcat.stop();
tomcat.destroy();
}
}
|
#!/bin/sh
sed -e 's/}}/}/g' -e 's/{{/\${/g' config/$ENVIRONMENT-config.json > config/$ENVIRONMENT-config-tmp.json && mv config/$ENVIRONMENT-config-tmp.json config/$ENVIRONMENT-config.json
envsubst < config/$ENVIRONMENT-config.json > config/$ENVIRONMENT-config-tmp.json && mv config/$ENVIRONMENT-config-tmp.json config/$ENVIRONMENT-config.json
java -Xms32m \
-Xmx$JVM_XMX \
-Dfile.encoding=UTF-8 \
-Duser.timezone=Asia/Ho_Chi_Minh \
-Duser.language=en \
-Duser.country=US \
-Djava.io.tmpdir=/tmp \
-Denv=$ENVIRONMENT \
-Dmode=DEPLOY \
-jar app.jar
|
# Pipeline started at 06-15 07:17:11
ln -sf /project/shefflab/data//guertin/fastq/K562_70pctRNA.fastq.gz /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/K562_RNA-seq_70.fastq.gz
pigz -f -p 12 -d -c /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/K562_RNA-seq_70.fastq.gz > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1.fastq
cutadapt --version
(cutadapt -j 12 -m 2 -O 1 -a TGGAATTCTCGGGTGCCAAGG /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1.fastq -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_noadap.fastq --too-short-output /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_short.fastq ) > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt
seqtk trimfq -b 0 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_noadap.fastq | seqtk seq -L 2 -r - > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_processed.fastq
grep 'Reads with adapters:' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{print $(NF-1)}'
grep 'Total basepairs processed:' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{print $(NF-1)}'
awk '{sum+=$1*$2} END {printf "%.0f", sum}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt
wc -l /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_short.fastq | awk '{print $1}'
echo '### Calculate the number of trimmed reads'
fastqc --noextract --outdir /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastqc /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_processed.fastq
touch /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/processed_R1.flag
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R cutadapt -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt
awk '/count/,0' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk 'NR>2 {print prev} {prev=$0}' | awk '{if ($3/$2 < 0.01) print $1, $2}' | awk 'BEGIN{max= 0; max_len=0; len=0}{if ($2>0+max) {max=$2; len=$1}; max_len=$1} END{print max_len-len}'
awk 'NR>2 {print prev} {prev=$0}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{ if ($1 == 10) {status = 1}} END {if (status) {print status} else {print 0}}'
awk 'NR>2 {print prev} {prev=$0}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{ if ($1 == 20) {status = 1}} END {if (status) {print status} else {print 0}}'
awk 'NR>2 {print prev} {prev=$0}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{ if ($1 == 30) {status = 1}} END {if (status) {print status} else {print 0}}'
awk 'NR>2 {print prev} {prev=$0}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk '{ if ($1 == 40) {status = 1}} END {if (status) {print status} else {print 0}}'
awk '/count/,0' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/cutadapt/K562_RNA-seq_70_R1_cutadapt.txt | awk 'NR>2 {print prev} {prev=$0}' | awk '{if ($3/$2 < 0.01) print $1, $2}' | awk '{a[NR]=$1; b[NR]=$2; max_len=$1}{if ($1 > max_len) {max_len=$1}} END{ for (i in a) print 1+max_len-a[i], b[i]}' | sort -nk1 | awk '($1 <= 20 && $1 >= 10){degradedSum += $2}; ($1 >= 30 && $1 <= 40){intactSum += $2} END {if (intactSum < 1) {intactSum = 1} print degradedSum/intactSum}'
(bowtie2 -p 12 -k 1 -D 20 -R 3 -N 1 -L 20 -i S,1,0.50 -x /project/shefflab/genomes/human_rDNA/bowtie2_index/default/human_rDNA --rg-id K562_RNA-seq_70 -U /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/fastq/K562_RNA-seq_70_R1_processed.fastq --un /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/prealignments/K562_RNA-seq_70_human_rDNA_unmap.fq 2>&1 > /dev/null)
bowtie2 -p 12 --very-sensitive --rg-id K562_RNA-seq_70 -x /project/shefflab/genomes/hg38/bowtie2_index/default/hg38 -U /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/prealignments/K562_RNA-seq_70_human_rDNA_unmap.fq | samtools view -bS - -@ 1 | samtools sort - -@ 1 -T /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/tmpzz7s0tum -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam
samtools view -q 10 -b -@ 12 -U /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_fail_qc.bam /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam
samtools depth -b /project/shefflab/genomes/hg38/refgene_anno/default/hg38_pre-mRNA.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam | awk '{counter++;sum+=$3}END{print sum/counter}'
pigz -f -p 12 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/prealignments/K562_RNA-seq_70_human_rDNA_unmap.fq
samtools index /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam
samtools idxstats /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam | grep -we 'chrM' -we 'chrMT' -we 'M' -we 'MT' -we 'rCRSd' -we 'rCRSd_3k'| cut -f 3
samtools index /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam
samtools idxstats /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam | cut -f 1-2 | awk '{print $1, 0, $2}' | grep -vwe 'chrM' -vwe 'chrMT' -vwe 'M' -vwe 'MT' -vwe 'rCRSd' -vwe 'rCRSd_3k' > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/chr_sizes.bed
samtools view -L /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/chr_sizes.bed -b -@ 12 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_noMT.bam
mv /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_noMT.bam /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam
samtools index /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam
samtools stats /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam | grep '^SN' | cut -f 2- | grep 'maximum length:' | cut -f 2-
awk '{sum+=$2} END {printf "%.0f", sum}' /project/shefflab/genomes/hg38/fasta/default/hg38.chrom.sizes
/scratch/jps3dp/tools/databio//peppro/tools/bamQC.py --silent -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -c 12 -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_bamQC.tsv
awk '{ for (i=1; i<=NF; ++i) { if ($i ~ "NRF") c=i } getline; print $c }' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_bamQC.tsv
awk '{ for (i=1; i<=NF; ++i) { if ($i ~ "PBC1") c=i } getline; print $c }' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_bamQC.tsv
awk '{ for (i=1; i<=NF; ++i) { if ($i ~ "PBC2") c=i } getline; print $c }' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_bamQC.tsv
samtools view -b -@ 12 -f 4 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_unmap.bam
samtools view -c -f 4 -@ 12 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_temp.bam
samtools view -bh -F 20 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam
samtools view -bh -f 16 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam
sed -n -e '/[[:space:]]+/w /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/plus_TSS.tsv' -e '/[[:space:]]-/w /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/minus_TSS.tsv' /project/shefflab/genomes/hg38/refgene_anno/default/hg38_TSS.bed
/scratch/jps3dp/tools/databio//peppro/tools/pyTssEnrichment.py -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/plus_TSS.tsv -p ends -c 12 -z -v -s 6 -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_plus_TssEnrichment.txt
/scratch/jps3dp/tools/databio//peppro/tools/pyTssEnrichment.py -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/minus_TSS.tsv -p ends -c 12 -z -v -s 6 -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_minus_TssEnrichment.txt
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R tss -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_plus_TssEnrichment.txt /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_minus_TssEnrichment.txt
samtools view -H /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam | grep 'SN:' | awk -F':' '{print $2,$3}' | awk -F' ' -v OFS=' ' '{print $1,$3}' > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt
grep -wf /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt /project/shefflab/genomes/hg38/ensembl_gtf/default/hg38_ensembl_TSS.bed | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_ensembl_tss.bed
grep -wf /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt /project/shefflab/genomes/hg38/ensembl_gtf/default/hg38_ensembl_gene_body.bed | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_ensembl_gene_body.bed
bedtools coverage -sorted -counts -s -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_ensembl_tss.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | awk '$7>0' | sort -k4,4 -k7,7nr | sort -k4,4 -u > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_TSS_density.bed
bedtools coverage -sorted -counts -s -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_ensembl_gene_body.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | awk '$7>0' | sort -k4 > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_gene_body_density.bed
join --nocheck-order -j4 -o 1.1 1.2 1.3 1.4 1.6 1.7 2.2 2.3 2.7 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_TSS_density.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_gene_body_density.bed | awk -v OFS=' ' '{ if ($5 == "+"){print $1, $2, $8, $4, sqrt((($6+$9)/sqrt(($8-$2)^2))^2), ($6/sqrt(($3-$2)^2))/($9/sqrt(($8-$7)^2)), $5} else {print $1, $2, $8, $4, sqrt((($6+$9)/sqrt(($3-$7)^2))^2),($6/sqrt(($3-$2)^2))/($9/sqrt(($8-$7)^2)), $5}}' | env LC_COLLATE=C sort -k1,1 -k2,2n > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/tmp43ogt6mm
awk '{print $5}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/tmp43ogt6mm | sort -n | awk 'BEGIN{i=0} {s[i]=$1; i++;} END{print s[int(NR*0.5-0.5)]}'
awk -v OFS=' ' '{ if ($5 > 0.0384066) {print $1, $2, $3, $4, $6, $7}}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/tmp43ogt6mm > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_pause_index.bed
sort -k5,5n /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_pause_index.bed | awk ' { a[i++]=$5; } END { x=int((i+1)/2); if (x < (i+1)/2) print (a[x-1]+a[x])/2; else print a[x-1]; }'
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R pi --annotate -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_pause_index.bed
pigz -f -p 12 -f /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_pause_index.bed
samtools view -@ 4 -c -L /project/shefflab/genomes/hg38/refgene_anno/default/hg38_pre-mRNA.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam
samtools view -@ 4 -c -L /project/shefflab/genomes/hg38/refgene_anno/default/hg38_pre-mRNA.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam
grep -wf /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt /project/shefflab/genomes/hg38/refgene_anno/default/hg38_pre-mRNA.bed | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_gene_sort.bed
bedtools coverage -sorted -counts -s -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_gene_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/signal_hg38/K562_RNA-seq_70_gene_coverage.bed
ln -sf /project/shefflab/genomes/hg38/feat_annotation/default/hg38_annotations.bed.gz /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/hg38_annotations.bed.gz
pigz -f -p 12 -d -c /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/hg38_annotations.bed.gz > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/hg38_annotations.bed
cut -f 4 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/hg38_annotations.bed | sort -u
awk -F' ' '{print>"/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/"$4}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/raw/hg38_annotations.bed
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Enhancer | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Enhancer_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Enhancer_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Enhancer_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Enhancer_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Enhancer_minus_coverage.bed
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_minus_coverage.bed
mv "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter Flanking Region" "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_Flanking_Region"
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_Flanking_Region | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_Flanking_Region_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_Flanking_Region_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_Flanking_Region_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Promoter_Flanking_Region_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_Flanking_Region_minus_coverage.bed
mv "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5' UTR" "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5_UTR"
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5_UTR | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5_UTR_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5_UTR_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_5_UTR_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/5_UTR_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_5_UTR_minus_coverage.bed
mv "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3' UTR" "/project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3_UTR"
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3_UTR | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3_UTR_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3_UTR_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_3_UTR_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/3_UTR_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_3_UTR_minus_coverage.bed
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Exon | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Exon_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Exon_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Exon_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Exon_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Exon_minus_coverage.bed
cut -f 1 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | grep -wf - /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Intron | cut -f 1-3 | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Intron_sort.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Intron_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Intron_plus_coverage.bed
bedtools coverage -sorted -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/Intron_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Intron_minus_coverage.bed
samtools view -@ 12 -q 10 -c -F4 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R frif -s K562_RNA-seq_70 -z 3099922541 -n 22361014 -y cfrif --reads -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_cFRiF.pdf --bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Enhancer_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_Flanking_Region_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_5_UTR_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_3_UTR_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Exon_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Intron_plus_coverage.bed
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R frif -s K562_RNA-seq_70 -z 3099922541 -n 22361014 -y frif --reads -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_FRiF.pdf --bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Enhancer_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Promoter_Flanking_Region_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_5_UTR_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_3_UTR_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Exon_plus_coverage.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_Intron_plus_coverage.bed
grep -wf /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt /project/shefflab/genomes/hg38/refgene_anno/default/hg38_exons.bed | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_exons_sort.bed
grep -wf /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_keep.txt /project/shefflab/genomes/hg38/refgene_anno/default/hg38_introns.bed | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt | bedtools sort -i stdin -faidx /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_introns_sort.bed
bedtools coverage -sorted -counts -s -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_exons_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exons_coverage.bed
bedtools coverage -sorted -counts -s -a /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/hg38_introns_sort.bed -b /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_sort.bam -g /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/chr_order.txt > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_introns_coverage.bed
awk -v OFS=' ' '{chrom[$4] = $1; if($4!=prev4) {chromStart[$4] = $2} strand[$4] = $6; readCount[$4] += $7; exonCount[$4] += 1; geneSizeKB[$4] += (sqrt(($3-$2+0.00000001)^2)/1000); gene[$4] = $4; chromEnd[$4]=$3; prev4=$4} END { for (a in readCount) { print chrom[a], chromStart[a], chromEnd[a], gene[a], (readCount[a]/47.298385)/geneSizeKB[a], strand[a]}}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exons_coverage.bed | awk '$5>0' | sort -k4 > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exons_rpkm.bed
awk -v OFS=' ' '{chrom[$4] = $1; if($4!=prev4) {chromStart[$4] = $2} strand[$4] = $6; readCount[$4] += $7; exonCount[$4] += 1; geneSizeKB[$4] += (sqrt(($3-$2+0.00000001)^2)/1000); gene[$4] = $4; chromEnd[$4]=$3; prev4=$4} END { for (a in readCount) { print chrom[a], chromStart[a], chromEnd[a], gene[a], (readCount[a]/47.298385)/geneSizeKB[a], strand[a]}}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_introns_coverage.bed | awk '$5>0' | sort -k4 > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_introns_rpkm.bed
join --nocheck-order -a1 -a2 -j4 /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_introns_rpkm.bed /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exons_rpkm.bed | awk -v OFS=' ' 'NF==11 {print $7, $8, $9, $1, ($10/$5), $11}' | sort -k1,1 -k2,2n > /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exon_intron_ratios.bed
awk '{print $5}' /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exon_intron_ratios.bed | sort -n | awk ' { a[i++]=$1; } END { x=int((i+1)/2); if (x < (i+1)/2) print (a[x-1]+a[x])/2; else print a[x-1]; }'
Rscript /scratch/jps3dp/tools/databio//peppro/tools/PEPPRO.R mrna -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exon_intron_ratios.bed --annotate
pigz -f -p 12 -f /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/QC_hg38/K562_RNA-seq_70_exon_intron_ratios.bed
samtools index /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam
/scratch/jps3dp/tools/databio//peppro/tools/bamSitesToWig.py -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_plus.bam -c /project/shefflab/genomes/hg38/fasta/default/hg38.chrom.sizes -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/signal_hg38/K562_RNA-seq_70_plus_exact_body_0-mer.bw -w /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/signal_hg38/K562_RNA-seq_70_plus_smooth_body_0-mer.bw -p 8 --variable-step --tail-edge --scale 47298385.0
samtools index /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam
/scratch/jps3dp/tools/databio//peppro/tools/bamSitesToWig.py -i /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/aligned_hg38/K562_RNA-seq_70_minus.bam -c /project/shefflab/genomes/hg38/fasta/default/hg38.chrom.sizes -o /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/signal_hg38/K562_RNA-seq_70_minus_exact_body_0-mer.bw -w /project/shefflab/processed/peppro/paper/6.11.2020/results_pipeline/K562_RNA-seq_70/signal_hg38/K562_RNA-seq_70_minus_smooth_body_0-mer.bw -p 8 --variable-step --tail-edge --scale 47298385.0
|
<gh_stars>1-10
import React, {PureComponent, MouseEvent} from "react";
import {Workspace} from "../../types";
import {SelectionContext} from "./SelectionContext";
import CollectionMenuItem from "./CollectionMenuItem";
import EditWorkspace from "./Workspace/EditWorkspace";
import CreateCollection from "./Collection/CreateCollection";
import {IconButton, ListItem, ListItemSecondaryAction} from "@material-ui/core";
import CreateNewFolder from "@material-ui/icons/CreateNewFolder";
import EditIcon from "@material-ui/icons/Edit";
import {ExpandLess, ExpandMore} from "@material-ui/icons";
import ListSubheader from "@material-ui/core/ListSubheader";
import {ReactComponent as WorkspaceImg} from "../../images/icons/workspace.svg";
import Icon from "../ui/Icon";
export type WorkspaceMenuItemProps = {} & Workspace;
type State = {
expanded: boolean,
editing: boolean,
addCollection: boolean,
}
export default class WorkspaceMenuItem extends PureComponent<WorkspaceMenuItemProps, State> {
static contextType = SelectionContext;
context: React.ContextType<typeof SelectionContext>;
state: State = {
expanded: true,
editing: false,
addCollection: false,
};
expandWorkspace = async (force = false): Promise<void> => {
this.setState((prevState: State) => ({
expanded: !prevState.expanded || force,
}));
}
onClick = (e: MouseEvent): void => {
this.context.selectWorkspace(this.props.id, this.context.selectedWorkspace === this.props.id);
this.expandWorkspace(true);
}
onExpandClick = (e: MouseEvent) => {
e.stopPropagation();
this.expandWorkspace();
}
edit = (e: MouseEvent): void => {
e.stopPropagation();
this.setState({editing: true});
}
closeEdit = () => {
this.setState({editing: false});
}
addCollection = (e: MouseEvent): void => {
e.stopPropagation();
this.setState({addCollection: true});
}
closeCollection = () => {
this.setState({addCollection: false});
}
render() {
const {
id,
name,
capabilities,
collections,
} = this.props;
const {editing, expanded, addCollection} = this.state;
const selected = this.context.selectedWorkspace === id;
return <>
<ListSubheader
disableGutters={true}
className={'workspace-item'}
>
<ul>
<ListItem
onClick={this.onClick}
selected={selected}
button
>
<Icon
component={WorkspaceImg}
/>
{name}
<ListItemSecondaryAction>
{capabilities.canEdit && <IconButton
title={'Add collection in this workspace'}
onClick={this.addCollection}
className={'c-action'}
aria-label="add-child">
<CreateNewFolder/>
</IconButton>}
{capabilities.canEdit && <IconButton
title={'Edit this workspace'}
onClick={this.edit}
className={'c-action'}
aria-label="edit">
<EditIcon/>
</IconButton>}
{collections.length > 0 ? <IconButton
onClick={this.onExpandClick}
aria-label="expand-toggle">
{!expanded ? <ExpandLess
onClick={this.onExpandClick}
/> : <ExpandMore/>}
</IconButton> : ''}
</ListItemSecondaryAction>
</ListItem>
</ul>
</ListSubheader>
{editing && <EditWorkspace
id={this.props.id}
onClose={this.closeEdit}
/>}
{addCollection && <CreateCollection
workspaceId={this.props['@id']}
onClose={this.closeCollection}
/>}
{expanded && collections.map(c => <CollectionMenuItem
{...c}
key={c.id}
absolutePath={c.id}
level={0}
/>)}
</>
}
}
|
/*=============================================================================
Copyright (c) 2003 <NAME>
Copyright (c) 2004 <NAME>
Use, modification and distribution is subject to the Boost Software
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_ALGORITHM_FOLD_HPP)
#define FUSION_ALGORITHM_FOLD_HPP
#include <boost/spirit/fusion/algorithm/detail/fold.ipp>
namespace boost { namespace fusion
{
namespace meta
{
template <typename Sequence, typename State, typename F>
struct fold
{
typedef typename
detail::static_fold<
typename meta::begin<Sequence>::type
, typename meta::end<Sequence>::type
, State
, F
>::type
type;
};
}
namespace function
{
struct fold
{
template <typename Sequence, typename State, typename F>
struct apply : meta::fold<Sequence, State, F> {};
template <typename Sequence, typename State, typename F>
inline typename apply<Sequence const, State, F>::type
operator()(Sequence const& seq, State const& state, F const& f) const
{
return detail::fold(
fusion::begin(seq)
, fusion::end(seq)
, state
, f
, is_same<
BOOST_DEDUCED_TYPENAME meta::begin<Sequence const>::type
, BOOST_DEDUCED_TYPENAME meta::end<Sequence const>::type>()
);
}
template <typename Sequence, typename State, typename F>
inline typename apply<Sequence, State, F>::type
operator()(Sequence& seq, State const& state, F const& f) const
{
return detail::fold(
fusion::begin(seq)
, fusion::end(seq)
, state
, f
, is_same<
BOOST_DEDUCED_TYPENAME meta::begin<Sequence>::type
, BOOST_DEDUCED_TYPENAME meta::end<Sequence>::type>()
);
}
};
}
function::fold const fold = function::fold();
}}
#endif
|
<gh_stars>0
///////////////////////////////////////////////////////////////////////////////
// Name: src/generic/activityindicator.cpp
// Purpose: Generic wxActivityIndicator implementation.
// Author: <NAME>
// Created: 2015-03-06
// Copyright: (c) 2015 <NAME> <<EMAIL>>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// for compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#if wxUSE_ACTIVITYINDICATOR && !defined(__WXGTK3__)
#include "wx/activityindicator.h"
#ifndef WX_PRECOMP
#include "wx/dcclient.h"
#include "wx/timer.h"
#endif // WX_PRECOMP
#include "wx/graphics.h"
#include "wx/scopedptr.h"
// ----------------------------------------------------------------------------
// constants
// ----------------------------------------------------------------------------
// For now the appearance is fixed, we could make these constants customizable
// later if really needed.
namespace
{
// Total number of "running" dots.
static const int NUM_DOTS = 8;
// Delay between the consecutive updates in milliseconds.
static const int FRAME_DELAY = 150;
} // anonymous namespace
// ----------------------------------------------------------------------------
// wxActivityIndicatorImpl: class containing the real implementation.
// ----------------------------------------------------------------------------
class wxActivityIndicatorImpl
{
public:
explicit wxActivityIndicatorImpl(wxWindow* win)
: m_timer(this),
m_win(win)
{
m_frame = 0;
win->Bind(wxEVT_PAINT, &wxActivityIndicatorImpl::OnPaint, this);
}
void Start()
{
// Avoid restarting the timer if it's already running, this could
// result in jumps in appearance of the next frame while calling
// Start() is not supposed to have any effect at all in this case.
if ( m_timer.IsRunning() )
return;
m_timer.Start(FRAME_DELAY);
}
void Stop()
{
// Unlike above, it's not a problem to call Stop() even if we're not
// running.
m_timer.Stop();
}
bool IsRunning() const
{
return m_timer.IsRunning();
}
// This one is only called by AdvanceTimer.
void Advance()
{
if ( ++m_frame == NUM_DOTS )
m_frame = 0;
m_win->Refresh();
}
private:
class AdvanceTimer : public wxTimer
{
public:
explicit AdvanceTimer(wxActivityIndicatorImpl* owner)
: m_owner(owner)
{
}
virtual void Notify() wxOVERRIDE
{
m_owner->Advance();
}
private:
wxActivityIndicatorImpl* const m_owner;
wxDECLARE_NO_COPY_CLASS(AdvanceTimer);
};
void OnPaint(wxPaintEvent& WXUNUSED(event))
{
wxPaintDC pdc(m_win);
wxScopedPtr<wxGraphicsContext> const
gc(wxGraphicsRenderer::GetDefaultRenderer()->CreateContext(pdc));
const wxSize size = m_win->GetClientSize();
// Centre everything.
gc->Translate(size.x/2., size.y/2.);
// Radius of 1/10th allows to have reasonably sized dots with a bit of
// separation between them and so subjectively looks a bit nicer than
// perhaps more natural 1/8th.
static const double RADIUS_FACTOR = 10;
const double r = wxMin(size.x, size.y) / RADIUS_FACTOR;
// The initial dot touches the top border.
wxGraphicsPath path = gc->CreatePath();
path.AddCircle(0, -(RADIUS_FACTOR / 2. - 1.)*r, r);
// Subsequent dots are rotated by this angle with respect to the
// previous one.
const double angle = wxDegToRad(360. / NUM_DOTS);
// And the animation effect is achieved just by starting to draw from
// the next position every time.
gc->Rotate(m_frame*angle);
const bool isEnabled = m_win->IsThisEnabled();
for ( int n = 0; n < NUM_DOTS; n++ )
{
// Draw all dots uniformly grey when the window is disabled,
// otherwise each subsequent dot is slightly more opaque.
const int opacityIndex = isEnabled ? n + 1 : 2;
// wxALPHA_OPAQUE+1 is used just because it is divisible by the
// default NUM_DOTS value, and we need -1 because of this to keep
// it in 0..wxALPHA_OPAQUE range.
const int opacity = opacityIndex*(wxALPHA_OPAQUE + 1)/NUM_DOTS - 1;
gc->SetBrush(wxBrush(wxColour(0, 0, 0, opacity)));
gc->FillPath(path);
gc->Rotate(angle);
}
}
AdvanceTimer m_timer;
wxWindow* const m_win;
int m_frame;
wxDECLARE_NO_COPY_CLASS(wxActivityIndicatorImpl);
};
// ============================================================================
// implementation
// ============================================================================
#ifndef wxHAS_NATIVE_ACTIVITYINDICATOR
wxIMPLEMENT_DYNAMIC_CLASS(wxActivityIndicator, wxControl);
#endif
bool
wxActivityIndicatorGeneric::Create(wxWindow* parent,
wxWindowID winid,
const wxPoint& pos,
const wxSize& size,
long style,
const wxString& name)
{
// Notice that we skip wxControl version, we don't need the validator
// support that it adds.
if ( !wxWindow::Create(parent, winid, pos, size, style, name) )
return false;
m_impl = new wxActivityIndicatorImpl(this);
return true;
}
wxActivityIndicatorGeneric::~wxActivityIndicatorGeneric()
{
delete m_impl;
}
void wxActivityIndicatorGeneric::Start()
{
wxCHECK_RET( m_impl, wxS("Must be created first") );
m_impl->Start();
}
void wxActivityIndicatorGeneric::Stop()
{
wxCHECK_RET( m_impl, wxS("Must be created first") );
m_impl->Stop();
}
bool wxActivityIndicatorGeneric::IsRunning() const
{
return m_impl && m_impl->IsRunning();
}
wxSize wxActivityIndicatorGeneric::DoGetBestClientSize() const
{
int size = 0;
switch ( GetWindowVariant() )
{
case wxWINDOW_VARIANT_MAX:
wxFAIL_MSG(wxS("Invalid window variant"));
wxFALLTHROUGH;
case wxWINDOW_VARIANT_NORMAL:
size = 24;
break;
case wxWINDOW_VARIANT_SMALL:
size = 16;
break;
case wxWINDOW_VARIANT_MINI:
size = 12;
break;
case wxWINDOW_VARIANT_LARGE:
size = 32;
break;
}
wxASSERT_MSG( size, wxS("Unknown window variant") );
return FromDIP(wxSize(size, size));
}
#endif // wxUSE_ACTIVITYINDICATOR && !__WXGTK3__
|
<gh_stars>1-10
require 'spec_extensions'
require '_utils'
require 'pod/command/lib/docstats'
|
#!/bin/bash
set -e
##############
# VARIABLES #
#############
MON_SECRET_NAME=rook-ceph-mon
OPERATOR_CREDS=rook-ceph-operator-creds
CSI_RBD_NODE_SECRET_NAME=rook-csi-rbd-node
CSI_RBD_PROVISIONER_SECRET_NAME=rook-csi-rbd-provisioner
CSI_CEPHFS_NODE_SECRET_NAME=rook-csi-cephfs-node
CSI_CEPHFS_PROVISIONER_SECRET_NAME=rook-csi-cephfs-provisioner
MON_SECRET_CLUSTER_NAME_KEYNAME=cluster-name
MON_SECRET_FSID_KEYNAME=fsid
MON_SECRET_ADMIN_KEYRING_KEYNAME=admin-secret
MON_SECRET_MON_KEYRING_KEYNAME=mon-secret
MON_ENDPOINT_CONFIGMAP_NAME=rook-ceph-mon-endpoints
ROOK_EXTERNAL_CLUSTER_NAME=$NAMESPACE
ROOK_EXTERNAL_MAX_MON_ID=2
ROOK_EXTERNAL_MAPPING={}
ROOK_EXTERNAL_MONITOR_SECRET=mon-secret
: "${ROOK_EXTERNAL_ADMIN_SECRET:=admin-secret}"
#############
# FUNCTIONS #
#############
function checkEnvVars() {
if [ -z "$NAMESPACE" ]; then
echo "Please populate the environment variable NAMESPACE"
exit 1
fi
if [ -z "$ROOK_EXTERNAL_FSID" ]; then
echo "Please populate the environment variable ROOK_EXTERNAL_FSID"
exit 1
fi
if [ -z "$ROOK_EXTERNAL_CEPH_MON_DATA" ]; then
echo "Please populate the environment variable ROOK_EXTERNAL_CEPH_MON_DATA"
exit 1
fi
if [[ "$ROOK_EXTERNAL_ADMIN_SECRET" == "admin-secret" ]]; then
if [ -z "$ROOK_EXTERNAL_USER_SECRET" ]; then
echo "Please populate the environment variable ROOK_EXTERNAL_USER_SECRET"
exit 1
fi
if [ -z "$ROOK_EXTERNAL_USERNAME" ]; then
echo "Please populate the environment variable ROOK_EXTERNAL_USERNAME"
exit 1
fi
if [ -z "$CSI_RBD_NODE_SECRET_SECRET" ]; then
echo "Please populate the environment variable CSI_RBD_NODE_SECRET_SECRET"
exit 1
fi
if [ -z "$CSI_RBD_PROVISIONER_SECRET" ]; then
echo "Please populate the environment variable CSI_RBD_PROVISIONER_SECRET"
exit 1
fi
if [ -z "$CSI_CEPHFS_NODE_SECRET" ]; then
echo "Please populate the environment variable CSI_CEPHFS_NODE_SECRET"
exit 1
fi
if [ -z "$CSI_CEPHFS_PROVISIONER_SECRET" ]; then
echo "Please populate the environment variable CSI_CEPHFS_PROVISIONER_SECRET"
exit 1
fi
fi
}
function importSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$MON_SECRET_NAME" \
--from-literal="$MON_SECRET_CLUSTER_NAME_KEYNAME"="$ROOK_EXTERNAL_CLUSTER_NAME" \
--from-literal="$MON_SECRET_FSID_KEYNAME"="$ROOK_EXTERNAL_FSID" \
--from-literal="$MON_SECRET_ADMIN_KEYRING_KEYNAME"="$ROOK_EXTERNAL_ADMIN_SECRET" \
--from-literal="$MON_SECRET_MON_KEYRING_KEYNAME"="$ROOK_EXTERNAL_MONITOR_SECRET"
}
function importConfigMap() {
kubectl -n "$NAMESPACE" \
create \
configmap \
"$MON_ENDPOINT_CONFIGMAP_NAME" \
--from-literal=data="$ROOK_EXTERNAL_CEPH_MON_DATA" \
--from-literal=mapping="$ROOK_EXTERNAL_MAPPING" \
--from-literal=maxMonId="$ROOK_EXTERNAL_MAX_MON_ID"
}
function importCheckerSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$OPERATOR_CREDS" \
--from-literal=userID="$ROOK_EXTERNAL_USERNAME" \
--from-literal=userKey="$ROOK_EXTERNAL_USER_SECRET"
}
function importCsiRBDNodeSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$CSI_RBD_NODE_SECRET_NAME" \
--from-literal=userID=csi-rbd-node \
--from-literal=userKey="$CSI_RBD_NODE_SECRET_SECRET"
}
function importCsiRBDProvisionerSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$CSI_RBD_PROVISIONER_SECRET_NAME" \
--from-literal=userID=csi-rbd-provisioner \
--from-literal=userKey="$CSI_RBD_PROVISIONER_SECRET"
}
function importCsiCephFSNodeSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$CSI_CEPHFS_NODE_SECRET_NAME" \
--from-literal=adminID=csi-cephfs-node \
--from-literal=adminKey="$CSI_CEPHFS_NODE_SECRET"
}
function importCsiCephFSProvisionerSecret() {
kubectl -n "$NAMESPACE" \
create \
secret \
generic \
"$CSI_CEPHFS_PROVISIONER_SECRET_NAME" \
--from-literal=adminID=csi-cephfs-provisioner \
--from-literal=adminKey="$CSI_CEPHFS_PROVISIONER_SECRET"
}
########
# MAIN #
########
checkEnvVars
importSecret
importConfigMap
importCheckerSecret
importCsiRBDNodeSecret
importCsiRBDProvisionerSecret
importCsiCephFSNodeSecret
importCsiCephFSProvisionerSecret
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bincrafters import build_template_default
import platform
import copy
if __name__ == "__main__":
builder = build_template_default.get_builder()
items = []
for item in builder.items:
# skip mingw cross-builds
if not (platform.system() == "Windows" and item.settings["compiler"] == "gcc" and
item.settings["arch"] == "x86"):
new_build_requires = copy.copy(item.build_requires)
if platform.system() == "Windows" and item.settings["compiler"] == "gcc":
# add msys2 and mingw as a build requirement for mingw builds
new_build_requires["*"] = new_build_requires.get("*", []) + \
["mingw_installer/1.0@conan/stable",
"msys2_installer/latest@bincrafters/stable"]
items.append([item.settings, item.options, item.env_vars,
new_build_requires, item.reference])
elif platform.system() == "Windows" and item.settings["compiler"] != "gcc":
# add cygwin build requirement
new_build_requires["*"] = new_build_requires.get("*", []) + \
["cygwin_installer/2.9.0@bincrafters/stable"]
items.append([item.settings, item.options, item.env_vars,
new_build_requires, item.reference])
else:
# or just add build
items.append(item)
builder.items = items
builder.run()
|
// Copyright 2017 <NAME>. All rights reserved.
// Use of this source code is governed by a MIT license.
package main
import (
"flag"
"log"
"os"
"path"
"regexp"
)
var cmdGet = &Command{
UsageLine: "get [--exclude REGEXP] <gover.yaml>",
Short: "download packages",
Long: `Get downloads packages specified in configuration file.`,
Run: runGet,
Flag: cmdGetFlags(),
}
var exclude string
func cmdGetFlags() flag.FlagSet {
f := flag.NewFlagSet("get", flag.ContinueOnError)
f.StringVar(&exclude, "exclude", "", "regexp for excluding some deps")
return *f
}
func runGet(cmd *Command, configs *configsHierarchy, args []string) {
if len(args) == 0 {
log.Fatal("missing config file")
}
localCfg := new(config)
if err := localCfg.Load(args[0]); err != nil {
log.Fatalf("cannot load configuration file '%s': %v", args[0], err)
}
missing := []*dependency{}
for _, dep := range localCfg.Deps {
if !configs.Contains(dep) {
missing = append(missing, dep)
}
}
if exclude != "" {
reg, err := regexp.Compile(exclude)
if err != nil {
log.Fatal("incorrect 'exclude' parameter")
}
deps := []*dependency{}
for _, dep := range missing {
if !reg.MatchString(dep.Package) {
deps = append(deps, dep)
}
}
missing = deps
}
if len(missing) == 0 {
log.Println("everithing is up to date")
return
}
top := configs.Top()
if err := top.Update(localCfg.Name, missing); err != nil {
log.Fatal(err)
}
log.Println(top.file)
if err := download(path.Dir(top.file), missing); err != nil {
log.Fatalf("cannot download packages: %v", err)
}
if err := top.Save(); err != nil {
log.Fatalf("cannot save config: %v", err)
}
}
func download(root string, deps []*dependency) error {
var err error
for _, dep := range deps {
dst := path.Join(root, "src", dep.Package)
vcs := getVcsByUrl(dep.Url)
if vcs.exists(dst) {
log.Printf("warning: unmanaged repository '%s'. reset version\n", dst)
err = vcs.checkout(dst, dep.Version)
} else {
log.Printf("create new repository '%s'\n", dst)
if err = os.MkdirAll(path.Dir(dst), 0755); err == nil {
err = vcs.create(dst, dep.Url, dep.Version)
}
}
if err != nil {
return err
}
}
return nil
}
|
<gh_stars>0
#pragma once
#include <functional>
#include <string>
#include <vector>
#include <typed-geometry/tg-lean.hh>
#include <glow/common/log.hh>
#include <glow/common/property.hh>
#include <glow/common/shared.hh>
#include <glow/fwd.hh>
#include <glow-extras/camera/SmoothedCamera.hh>
#include <glow-extras/camera/controllers/LookAroundController.hh>
#include <glow-extras/camera/controllers/TargetOrbitController.hh>
#include <glow-extras/camera/controllers/WASDController.hh>
#include <glow-extras/input/InputState.hh>
#include <glow-extras/pipeline/RenderCallback.hh>
#include <glow-extras/pipeline/fwd.hh>
#include <glow-extras/timing/CpuTimer.hh>
#include <glow-extras/timing/GpuTimer.hh>
struct GLFWwindow;
struct CTwBar;
typedef struct CTwBar TwBar; // structure CTwBar is not exposed.
namespace glow
{
namespace debugging
{
GLOW_SHARED(class, DebugRenderer);
GLOW_SHARED(class, ProfilingOverlay);
}
namespace glfw
{
class GlfwContext;
enum class CursorMode
{
/// normal behavior
Normal,
/// normal behavior but hardware cursor is hidden
Hidden,
/// virtual unrestricted cursor, real cursor is hidden and locked to center
Disabled,
};
/**
* @brief The GlfwApp can be used to efficiently build small sample applications based on glfw
*
* Derive your own class from GlfwApp and override the functions you need:
* - init(...): initialize and allocate all your resources and objects
* - update(...): called with a constant rate (default 60 Hz, configurable) before rendering
* - render(...): called as fast as possible (affected by vsync)
* - onResize(...): called when window is resized
* - onClose(...): called when app is closed
* - input: see onKey/onChar/onMouseXYZ/onFileDrop (return true if you handled the event, if base returned true, you
* should probably return as well)
* be sure to call base function unless you know what you do!
*
* Additional important functions:
* - setUpdateRate(...): set the update rate
* - window(): get the GLFW window
* - tweakbar(): get the AntTweakBar instance
* - setWindowWidth/Height(...): set initial window size before run(...)
*
* Render Pipeline:
* - setUsePipeline(true): Enable the default pipeline
* - Override all of the RenderCallback methods you need
* For a reference implementation of a GlfwApp using the Pipeline, see glow-samples/wip/rendering-pipeline
*
* Notes:
* - if you use primitive/occlusion queries, use setQueryStats(false);
* - overwrite onResetView if you want a different default view
*
* Defaults:
* - A GenericCamera with input handling on LMB/RMB/WASD/... (setUseDefaultXYZ to configure)
*
* Usage:
* int main(int argc, char *argv[])
* {
* MyGlfwApp app;
* return app.run(argc, argv); // automatically sets up GLOW and GLFW and everything
* }
*/
class GlfwApp : virtual public pipeline::RenderCallback
{
public:
enum class Gui
{
None,
AntTweakBar,
ImGui
};
GlfwApp(Gui gui = Gui::None) : mGui(gui) {}
private:
std::string mTitle = "GLFW/GLOW Application"; ///< window title
double mUpdateRate = 60; ///< rate at which update(...) is called
int mMaxFrameSkip = 4; ///< maximum number of update(...) steps that are performed without rendering
double mScheduledUpdateInterval = 0; ///< number of seconds between calls to scheduledUpdate(...). 0 means never
GLFWwindow* mWindow = nullptr; ///< current GLFW window
input::InputState mInputState; ///< Input state
int mWindowWidth = 1280; ///< window width, only set before run!
int mWindowHeight = 720; ///< window height, only set before run!
bool mDumpTimingsOnShutdown = true; ///< if true, dumps AION timings on shutdown
CursorMode mCursorMode = CursorMode::Normal; ///< cursor mode
bool mVSync = true; ///< if true, enables vsync
int mSwapInterval = 1; ///< divisor of monitor frequency
double mOutputStatsInterval = 5.0; ///< number of seconds between stats output (0.0 for never)
bool mPrimitiveQueryStats = false; ///< if true, queries stats (vertices, fragments, ...)
bool mWarnOnFrameskip = true; ///< if true, outputs a warning on frameskips
bool mUseDefaultCamera = true; ///< if true, uses default camera
bool mUseDefaultCameraHandling = true; ///< if true, implements default cam handling
bool mUseDefaultCameraHandlingLeft = true; ///< if true, activates left mouse button handling
bool mUseDefaultCameraHandlingRight = true; ///< if true, activates right mouse button handling
bool mUsePipeline = false; ///< if true, uses rendering pipeline (requires default cam)
bool mUsePipelineConfigGui = false; ///< if true, enables the pipeline scen configuration gui (only if pipeline is used, requires ImGui)
bool mCacheWindowSize = false; ///< if true, saves window size and position on close, and restores on next launch
double mCurrentTime = 0.0; ///< current frame time (starts with 0)
double mCurrentRenderDeltaTime = 0.0; ///< current delta time for the render(dt) call
float mLastGpuTimeMs = 0.f; ///< The last measured GPU time, in ms
float mLastCpuTimeMs = 0.f; ///< The last measured CPU (render dispatch) time, in ms
double mDoubleClickTime = 0.35; ///< max number of seconds for multi clicks
int mClickCount = 0; ///< current click count
int mClickButton = -1; ///< last clicked button
tg::pos2 mClickPos; ///< last clicked position
timing::CpuTimer mClickTimer; ///< click timing
bool mMinimized = false; ///< is true while the application is minimized
bool mStartInvisible = false;
GLFWwindow* mSecondaryContext = nullptr; ///< shared OpenGL context for multithreaded loading / OpenGL object creation
bool mCreateSecondaryContext = false; ///< if true, creates a secondary, shared OpenGL context
bool mIsCleaned = false;
// extra features
private:
Gui mGui = Gui::None;
bool mDrawGui = true;
bool mGuiAfterRender = false;
// Default graphics
private:
/// Default Camera
camera::SharedSmoothedCamera mCamera;
/// Default Camera handling controllers
camera::WASDController mWASDController;
camera::LookAroundController mLookAroundController;
camera::TargetOrbitController mTargetOrbitController;
/// Default pipeline
pipeline::SharedRenderPipeline mPipeline;
pipeline::SharedRenderScene mPipelineScene;
pipeline::SharedStageCamera mPipelineCamera;
SharedPrimitiveQuery mPrimitiveQuery; ///< nr of primitives per frame
SharedOcclusionQuery mOcclusionQuery; ///< nr of pixels per frame
timing::SharedGpuTimer mGpuTimer;
debugging::SharedProfilingOverlay mProfilingOverlay;
bool mProfilingOverlayVisible = false;
public:
GLOW_PROPERTY(StartInvisible);
GLOW_PROPERTY(UpdateRate);
GLOW_PROPERTY(MaxFrameSkip);
GLOW_PROPERTY(ScheduledUpdateInterval);
GLOW_GETTER(Title);
GLOW_PROPERTY(WindowWidth);
GLOW_PROPERTY(WindowHeight);
tg::isize2 getWindowSize() const { return {mWindowWidth, mWindowHeight}; }
GLOW_PROPERTY(DumpTimingsOnShutdown);
GLOW_PROPERTY(VSync);
GLOW_PROPERTY(SwapInterval);
GLOW_PROPERTY(OutputStatsInterval);
GLOW_PROPERTY(PrimitiveQueryStats);
GLOW_PROPERTY(WarnOnFrameskip);
GLOW_PROPERTY(CreateSecondaryContext);
GLOW_PROPERTY(DoubleClickTime);
void setCursorMode(CursorMode newMode);
CursorMode getCursorMode() const { return mCursorMode; }
float getCurrentTime() const { return float(mCurrentTime); }
double getCurrentTimeD() const { return mCurrentTime; }
float getCurrentDeltaTime() const { return float(mCurrentRenderDeltaTime); }
double getCurrentDeltaTimeD() const { return mCurrentRenderDeltaTime; }
float getLastGpuTimeMs() const { return mLastGpuTimeMs; }
float getLastCpuTimeMs() const { return mLastCpuTimeMs; }
GLOW_GETTER(Camera);
GLOW_PROPERTY(UsePipeline);
GLOW_PROPERTY(UsePipelineConfigGui);
GLOW_GETTER(Pipeline);
GLOW_GETTER(PipelineScene);
GLOW_GETTER(PipelineCamera);
GLOW_PROPERTY(Gui);
GLOW_PROPERTY(DrawGui);
GLOW_PROPERTY(UseDefaultCamera);
GLOW_PROPERTY(UseDefaultCameraHandling);
GLOW_PROPERTY(UseDefaultCameraHandlingLeft);
GLOW_PROPERTY(UseDefaultCameraHandlingRight);
GLOW_PROPERTY(CacheWindowSize);
void setTitle(std::string const& title);
GLFWwindow* window() const { return mWindow; }
GLFWwindow* secondaryContext() const { return mSecondaryContext; }
input::InputState const& input() const { return mInputState; }
camera::WASDController& getWASDController() { return mWASDController; }
camera::LookAroundController& getLookAroundController() { return mLookAroundController; }
camera::TargetOrbitController& getTargetOrbitController() { return mTargetOrbitController; }
public:
/// sets the current clipboard content
void setClipboardString(std::string const& s) const;
/// gets the current clipboard content
std::string getClipboardString() const;
// NOTE: the naming has changed, pressed means "down this frame, but not down last frame"
// the deprecated isXPressed forwardings here just remain for backwards compatibility
[[deprecated("use isMouseButtonDown instead")]] bool isMouseButtonPressed(int button) const { return mInputState.isMouseButtonDown(button); }
bool isMouseButtonDown(int button) const { return mInputState.isMouseButtonDown(button); }
[[deprecated("use isKeyDown instead")]] bool isKeyPressed(int key) const { return mInputState.isKeyDown(key); }
bool isKeyDown(int key) const { return mInputState.isKeyDown(key); }
tg::pos2 getMousePosition() const { return tg::pos2(mInputState.getMousePosition()); }
/// Returns true iff the app should be closed
/// Defaults to glfw window closed
bool shouldClose() const;
/// Returns true iff the app is in fullscren mode
bool isFullscreen() const;
/// Returns true iff the app is minimized
bool isMinimized() const;
/// Requests glfw to close the window
void requestClose();
// Tweakbar helper
#ifdef GLOW_EXTRAS_HAS_ANTTWEAKBAR
private:
TwBar* mTweakbar = nullptr; ///< main tweakbar window
public:
TwBar* tweakbar() const
{
if (mGui != Gui::AntTweakBar)
{
glow::error() << "AntTweakBar is not active. Did you forget to call GlfwApp(Gui::AntTweakBar) in the ctor?";
TG_ASSERT(0 && "AntTweakBar not active");
return nullptr;
}
return mTweakbar;
}
/// create read-write tweakbar entries
void tweak(int& value, std::string const& name, std::string const& options = "");
void tweak(bool& value, std::string const& name, std::string const& options = "");
void tweak(float& value, std::string const& name, std::string const& options = "");
void tweak(double& value, std::string const& name, std::string const& options = "");
void tweak(glm::quat& value, std::string const& name, std::string const& options = "");
void tweak(std::string& value, std::string const& name, std::string const& options = "");
void tweak_dir(glm::vec3& value, std::string const& name, std::string const& options = "");
void tweak_color(glm::vec3& value, std::string const& name, std::string const& options = "");
void tweak_color(glm::vec4& value, std::string const& name, std::string const& options = "");
void tweak_color(uint32_t& value, std::string const& name, std::string const& options = "");
// NOTE: function will currently be leaked!
void tweak_button(std::string const& name, std::function<void()> const& fun, std::string const& options = "");
#endif
#ifdef GLOW_EXTRAS_HAS_IMGUI // imgui support
private:
bool mEnableDebugOverlay = true; ///< if true, enables debugging::DebugOverlay (requires ImGui)
public:
GLOW_PROPERTY(EnableDebugOverlay);
#endif
/// if called, performs onGui after render()
void performGuiAfterRender() { mGuiAfterRender = true; }
/// if called, performs onGui before render()
void performGuiBeforeRender() { mGuiAfterRender = false; }
protected:
/// Called once GLOW is initialized. Allocated your resources and init your logic here.
virtual void init();
/// Called at the start of a frame
virtual void onFrameStart() {}
/// Called with at 1 / getUpdateRate() Hz (timestep)
virtual void update(float elapsedSeconds);
/// Called as fast as possible for rendering (elapsedSeconds is not fixed here)
virtual void render(float elapsedSeconds);
/// Called once every getScheduledUpdateInterval() seconds (0 means never), for tasks that only have to happen rarely
virtual void scheduledUpdate();
/// Called once in the beginning after (init) and whenever the window size changed
virtual void onResize(int w, int h);
/// Called at the end, when application is closed
virtual void onClose();
/// Called when the gui is handled (currently only for imgui)
virtual void onGui();
/// Called whenever a key is pressed
virtual bool onKey(int key, int scancode, int action, int mods);
/// Called whenever a character is entered (unicode)
virtual bool onChar(unsigned int codepoint, int mods);
/// Called whenever the mouse position changes
virtual bool onMousePosition(double x, double y);
/// Called whenever a mouse button is pressed (clickCount is 1 for single clicks, 2 for double, 3+ for multi)
virtual bool onMouseButton(double x, double y, int button, int action, int mods, int clickCount);
/// Called whenever the mouse is scrolled
virtual bool onMouseScroll(double sx, double sy);
/// Called whenever the mouse enters the window
virtual bool onMouseEnter();
/// Called whenever the mouse leaves the window
virtual bool onMouseExit();
/// Called whenever the window gets focus
virtual bool onFocusGain();
/// Called whenever the window loses focus
virtual bool onFocusLost();
/// Called whenever files are dropped (drag'n'drop), parameters is file paths
virtual bool onFileDrop(std::vector<std::string> const& files);
/// Called when view should be reset
virtual void onResetView();
/// Blocking call that executes the complete main loop
virtual void mainLoop();
private:
glfw::GlfwContext* mInternalContext = nullptr;
bool mInternalContextOwner = false;
private:
void internalInit();
void internalCleanUp();
void internalOnMouseButton(double x, double y, int button, int action, int mods);
void internalOnGui();
void internalPerformGui();
protected:
/// performs glfw polling
void updateInput();
/// should be called before rendering
void beginRender();
/// should be called after rendering
/// calls swapBuffers
void endRender();
/// Blocks the thread for a given number of seconds
void sleepSeconds(double seconds) const;
public:
/// Initializes GLFW and GLOW, and runs until window is closed
void run();
/// Initializes GLFW and GLOW but does not create a window
void startHeadless();
/// Toggle fullscreen mode
void toggleFullscreen();
/// Toggle profiling overlay
void toggleProfilingOverlay();
#ifdef GLOW_EXTRAS_HAS_IMGUI // imgui support
/// Toggle OpenGL debug overlay
void toggleDebugOverlay();
#endif
public:
virtual ~GlfwApp(); // virtual dtor
};
}
}
|
function searchItem($arr, $item) {
if (in_array($item, $arr)) {
return true;
}
return false;
} |
// -- API REQ --
const url = "https://labs.inforcedata.com.br/desafio-frontend/banners.json"
async function BannerReq(url) {
const response = await fetch(url);
const photos = await response.json();
console.log(photos)
async function CreateImg() {
console.log("entrou")
const imgTag = document.createElement('img')
const container = document.getElementById('carousel-content')
container.appendChild(imgTag)
imgTag.setAttribute("src", `${photos[0].imagem}`)
imgTag.setAttribute("class", "slide")
const imgTag2 = document.createElement('img')
container.appendChild(imgTag2)
imgTag2.setAttribute("src", `${photos[1].imagem}`)
imgTag2.setAttribute("class", "slide slide-2")
}
CreateImg()
}
BannerReq(url)
// -- CARROUSEL --
var carousel = document.querySelector('.carousel');
var carouselContent = document.querySelector('.carousel-content');
var slides = document.querySelectorAll('.slide');
var arrayOfSlides = Array.prototype.slice.call(slides);
var carouselDisplaying;
var screenSize;
setScreenSize();
var lengthOfSlide;
function addClone() {
var lastSlide = carouselContent.lastElementChild.cloneNode(true);
lastSlide.style.left = (-lengthOfSlide) + "px";
carouselContent.insertBefore(lastSlide, carouselContent.firstChild);
}
// addClone();
function removeClone() {
var firstSlide = carouselContent.firstElementChild;
firstSlide.parentNode.removeChild(firstSlide);
}
function moveSlidesRight() {
var slides = document.querySelectorAll('.slide');
var slidesArray = Array.prototype.slice.call(slides);
var width = 0;
slidesArray.forEach(function(el, i){
el.style.left = width + "px";
width += lengthOfSlide;
});
addClone();
}
moveSlidesRight();
function moveSlidesLeft() {
var slides = document.querySelectorAll('.slide');
var slidesArray = Array.prototype.slice.call(slides);
slidesArray = slidesArray.reverse();
var maxWidth = (slidesArray.length - 1) * lengthOfSlide;
slidesArray.forEach(function(el, i){
maxWidth -= lengthOfSlide;
el.style.left = maxWidth + "px";
});
}
window.addEventListener('resize', setScreenSize);
function setScreenSize() {
if ( window.innerWidth >= 500 ) {
carouselDisplaying = 3;
} else if ( window.innerWidth >= 300 ) {
carouselDisplaying = 2;
} else {
carouselDisplaying = 1;
}
getScreenSize();
}
function getScreenSize() {
var slides = document.querySelectorAll('.slide');
var slidesArray = Array.prototype.slice.call(slides);
lengthOfSlide = ( carousel.offsetWidth / carouselDisplaying );
var initialWidth = -lengthOfSlide;
slidesArray.forEach(function(el) {
el.style.width = lengthOfSlide + "px";
el.style.left = initialWidth + "px";
initialWidth += lengthOfSlide;
});
}
var rightNav = document.querySelector('.nav-right');
rightNav.addEventListener('click', moveLeft);
var moving = true;
function moveRight() {
if ( moving ) {
moving = false;
var lastSlide = carouselContent.lastElementChild;
lastSlide.parentNode.removeChild(lastSlide);
carouselContent.insertBefore(lastSlide, carouselContent.firstChild);
removeClone();
var firstSlide = carouselContent.firstElementChild;
firstSlide.addEventListener('transitionend', activateAgain);
moveSlidesRight();
}
}
function activateAgain() {
var firstSlide = carouselContent.firstElementChild;
moving = true;
firstSlide.removeEventListener('transitionend', activateAgain);
}
var leftNav = document.querySelector('.nav-left');
leftNav.addEventListener('click', moveRight);
// var moveLeftAgain = true;
function moveLeft() {
if ( moving ) {
moving = false;
removeClone();
var firstSlide = carouselContent.firstElementChild;
firstSlide.addEventListener('transitionend', replaceToEnd);
moveSlidesLeft();
}
}
function replaceToEnd() {
var firstSlide = carouselContent.firstElementChild;
firstSlide.parentNode.removeChild(firstSlide);
carouselContent.appendChild(firstSlide);
firstSlide.style.left = ( (arrayOfSlides.length -1) * lengthOfSlide) + "px";
addClone();
moving = true;
firstSlide.removeEventListener('transitionend', replaceToEnd);
}
carouselContent.addEventListener('mousedown', seeMovement);
var initialX;
var initialPos;
function seeMovement(e) {
initialX = e.clientX;
getInitialPos();
carouselContent.addEventListener('mousemove', slightMove);
document.addEventListener('mouseup', moveBasedOnMouse);
}
function slightMove(e) {
if ( moving ) {
var movingX = e.clientX;
var difference = initialX - movingX;
if ( Math.abs(difference) < (lengthOfSlide/4) ) {
slightMoveSlides(difference);
}
}
}
function getInitialPos() {
var slides = document.querySelectorAll('.slide');
var slidesArray = Array.prototype.slice.call(slides);
initialPos = [];
slidesArray.forEach(function(el){
var left = Math.floor( parseInt( el.style.left.slice(0, -2 ) ) );
initialPos.push( left );
});
}
function slightMoveSlides(newX) {
var slides = document.querySelectorAll('.slide');
var slidesArray = Array.prototype.slice.call(slides);
slidesArray.forEach(function(el, i){
var oldLeft = initialPos[i];
el.style.left = (oldLeft + newX) + "px";
});
}
function moveBasedOnMouse(e) {
var finalX = e.clientX;
if ( initialX - finalX > 0) {
moveRight();
} else if ( initialX - finalX < 0 ) {
moveLeft();
}
document.removeEventListener('mouseup', moveBasedOnMouse);
carouselContent.removeEventListener('mousemove', slightMove);
}
|
#!/usr/bin/env bash
# ==========================================================================
# Setup script for installing project dependencies.
# NOTE: Run this script while in the project root directory.
# It will not run correctly when run from another directory.
# ==========================================================================
# Set script to exit on any errors.
set -e
# Build project dependencies.
build(){
if hash gem 2>/dev/null; then
echo 'Installing project dependencies...'
gem install jekyll
gem install pygments.rb
else
echo 'Ruby Gems is not installed...'
echo 'Go here to install: https://rubygems.org/pages/download'
fi
}
build
|
class myDialogBox {
public:
// Other class members and methods
// Override the OnClose method to handle the close event
bool OnClose(void) {
if (unsavedChangesExist()) {
// Prompt the user with a confirmation dialog
if (showConfirmationDialog()) {
// User confirmed the close action
return true;
} else {
// User canceled the close action
return false;
}
} else {
// No unsaved changes, allow the dialog box to close
return true;
}
}
private:
bool unsavedChangesExist() {
// Check if there are unsaved changes in the dialog box
// Return true if unsaved changes exist, false otherwise
}
bool showConfirmationDialog() {
// Display a confirmation dialog to the user
// Return true if the user confirms, false if the user cancels
}
}; |
export const SET_USER = 'SET_USER'
export const SET_ERROR = 'SET_ERROR'
|
<filename>Application/src/br/com/matheuslino/pacman/LabyrinthMap.java<gh_stars>1-10
package br.com.matheuslino.pacman;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import br.com.matheuslino.pacman.game.LabyrinthObjectVisitor;
public class LabyrinthMap {
// Attributes
private PacMan pacman;
private Level level; // Difficulty level
private List<Checkpoint> checkpoints = new ArrayList<>();
private List<Wall> walls = new ArrayList<>();
private List<Ghost> ghosts = new ArrayList<>();
private int width; // Map width
private int height; // Map height
private int[][] grafo; // Adjacency matrix containing graph's elements
private int timerCheckSpecial; // "Timer" - Special checkpoint duration
private int pos[] = new int[3]; // Stores the graph's vertices corresponding to evasive,
// pursuer and PacMan, respectively
// Construtor (protected visibility)
protected LabyrinthMap(PacMan pacman, List<Checkpoint> checkpoints, List<Wall> walls, List<Ghost> ghosts, int width,
int height, Level level) {
// Local variable, just to generate a random number
Random r = new Random();
// Project decision: 10% of special checkpoints
for(int i=0; i<Math.ceil(checkpoints.size()*0.1); i++) {
int n = r.nextInt(checkpoints.size());
// Ensures positive random numbers
if(n<0)
n = n*(-1);
// Set special checkpoints
if(!checkpoints.get(n).isSpecial()) {
checkpoints.get(n).setSpecial(true);
}else {
if(i>0) {
i--;
}
}
}
this.pacman = pacman;
this.checkpoints = checkpoints;
this.walls = walls;
this.ghosts = ghosts;
this.width = width;
this.height = height;
this.level = level;
this.grafo = new int[checkpoints.size()+2][checkpoints.size()+2]; // free positions for movement: checkpoints +
// PacMan and Ghost start positions
}
// Return Difficulty level
public Level getLevel() {
return level;
}
// Call the accept method for each map element, passing a visitor as argument
public void accept(LabyrinthObjectVisitor visitor) {
for(Wall wall : walls) {
wall.accept(visitor);
}
for(Checkpoint checkpoint : checkpoints) {
checkpoint.accept(visitor);
}
for(Ghost ghost : ghosts) {
ghost.accept(visitor);
}
pacman.accept(visitor);
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
// Manage the ghosts movement
private void moveGhosts(List<Wall> walls) {
// Definitions of local use variables (only)
Random1 random = (Random1) ghosts.get(0); // Returns the random ghost
Evasive evasive = (Evasive) ghosts.get(1); // Returns the evasive ghost
Jumper jumper = (Jumper) ghosts.get(2); // Returns the jumper ghost
Pursuer pursuer = (Pursuer) ghosts.get(3); // Returns the pursuer ghost
Direction dir_evasive = null; // Evasive ghost direction
Direction dir_pursuer = null; // Pursuer ghost direction
int[] coord_evasive = null; // coord[x][y] - evasive ghost coordinates
int[] next_evasive = null; // next [x][y] - coordinates of the next vertex
int[] coord_pursuer = null; // coord[x][y] - pursuer ghost coordinates
int[] next_pursuer = null; // next [x][y] - coordinates of the next vertex
int nextElement_e = -1; // Vertex of the next element to be visited (evasive)
int nextElement_p = -1; // Vertex of the next element to be visited (pursuer)
// evasive ghost direction - Dijkstra Algorithm
nextElement_e = Dijkstra.shortWay(grafo,pos[0],pointToVertex(midpoint().getX(), midpoint().getY()));
next_evasive = (nextElement_e >= 0) ? vertexToPoint(nextElement_e) : null;
coord_evasive = vertexToPoint(pos[0]);
if(next_evasive != null && coord_evasive != null) {
if(coord_evasive[1] == next_evasive[1] && next_evasive[0] > coord_evasive[0]) { // Checks if it should move right
dir_evasive = Direction.RIGHT;
}
else if(coord_evasive[1] == next_evasive[1] && next_evasive[0] < coord_evasive[0]) { // Checks if it should move left
dir_evasive = Direction.LEFT;
}
else if(coord_evasive[0] == next_evasive[0] && next_evasive[1] < coord_evasive[1]) { // Checks if it should move up
dir_evasive = Direction.UP;
}
else if(coord_evasive[0] == next_evasive[0] && next_evasive[1] > coord_evasive[1]) { // Checks if it should move down
dir_evasive = Direction.DOWN;
}
}
// Pursuer ghost direction - Dijkstra Algorithm
nextElement_p = Dijkstra.shortWay(grafo,pos[1],pos[2]);
next_pursuer = (nextElement_p >=0) ? vertexToPoint(nextElement_p) : null;
coord_pursuer = vertexToPoint(pos[1]);
if(next_pursuer != null && coord_pursuer != null) {
if(coord_pursuer[1] == next_pursuer[1] && next_pursuer[0] > coord_pursuer[0]) { // Checks if it should move right
dir_pursuer = Direction.RIGHT;
}
else if(coord_pursuer[1] == next_pursuer[1] && next_pursuer[0] < coord_pursuer[0]) { // Checks if it should move left
dir_pursuer = Direction.LEFT;
}
else if(coord_pursuer[0] == next_pursuer[0] && next_pursuer[1] < coord_pursuer[1]) { // Checks if it should move up
dir_pursuer = Direction.UP;
}
else if(coord_pursuer[0] == next_pursuer[0] && next_pursuer[1] > coord_pursuer[1]) { // Checks if it should move down
dir_pursuer = Direction.DOWN;
}
}
// Move ghosts
random.moveRandom(walls);
jumper.moveJumper(walls, checkpoints);
if(dir_evasive != null) evasive.move(dir_evasive, walls);
if(dir_pursuer != null) pursuer.move(dir_pursuer, walls);
}
// Checks collision between ghosts and PacMan
private void ghostCollision(){
for(Ghost ghost : ghosts) {
if(pacman.isSameCoordinates(ghost)) { // Checks collision between a ghost and PacMan
if(timerCheckSpecial>0) { // Ensures PacMan ate a special checkpoint
pacman.setScore(pacman.getScore()+100); // Increases 100 points by killing a ghost
ghost.changeCoordinates(ghost.getInitialCoordinate().getX(), // Moves dead ghost to origin
ghost.getInitialCoordinate().getY());
}else {
pacman.setLife(pacman.getLife()-1); // Decreases the PacMan life
pacman.changeCoordinates(pacman.getInitialCoordinate().getX(), // Moves PacMan to origin
pacman.getInitialCoordinate().getY());
for(Ghost g : ghosts) {
g.changeCoordinates(g.getInitialCoordinate().getX(), // Moves ghosts to origin
g.getInitialCoordinate().getY());
}
}
break;
}
}
}
private void checkpointUpdate() {
for(Checkpoint checkpoint : checkpoints) { // visits each list element
if(pacman.isSameCoordinates(checkpoint)) { // Checks if PacMan has earned a checkpoint
if(!checkpoint.isConquered()) {
checkpoint.conquer();
if(checkpoint.isSpecial()) {
timerCheckSpecial = 15;
pacman.setScore(pacman.getScore()+10); // Increases 10 points per special checkpoint
}else {
pacman.setScore(pacman.getScore()+1); // Increases 1 point per commom checkpoint
}
if((pacman.getScore()%10000)==0) { // Increases 1 life by earning 10000 points
pacman.setLife(pacman.getLife()+1);
}
}
}
}
}
// Checks if the map is done (all checkpoints earned)
public boolean isDone() {
for(Checkpoint checkpoint : checkpoints) {
if(!checkpoint.isConquered()) {
return false;
}
}
return true;
}
// Adjacency matrix used to create the graph with route map
private void adjacenceMatrice(){
char[][] aux = new char[getHeight()][getWidth()]; // temporary map
int cont = -1; // local counter
for (int i = 0; i < aux.length; i++) {
for (int j = 0; j < aux[0].length; j++) {
aux[i][j] = ' '; // Fills matrix with 'empty char'
}
}
for(Wall wall : walls) { // Fills positions with wall
aux[wall.getY()][wall.getX()] = 'x';
}
for (int i = 1; i < aux.length-1; i++) { // Creation algorithm. Walls must be placed at the edges (project decision)
for (int j = 1; j < aux[0].length-1; j++) {
if(aux[i][j] != 'x') { // Ensures that reading will be performed in positions without a wall
cont++; // Increases adjacency table row (vertex)
if(ghosts.get(1).isSameCoordinates(j, i)) { // Checks if it's reading the evasive ghost position
pos[0] = cont; // Stores the vertex in pos vector
}
if(ghosts.get(3).isSameCoordinates(j, i)) { // Checks if it's reading the pursuer ghost position
pos[1] = cont; // Stores the vertex in pos vector
}
if(pacman.isSameCoordinates(j, i)) { // Checks if it's reading the PacMan position
pos[2] = cont; // Stores the vertex in pos vector
}
if(aux[i-1][j]!='x') { // Checks if there is a wall above of read position
int x = 0; // Calculates the number of walls between the elements I[i-1][j] e I[i][j]
int elements = 0; // Calculates the number of positions between the elements I[i-1][j] e I[i][j]
for(int k = i-1; k < i; k++) {
for(int l = j; l < aux[0].length-1; l++) {
elements += 1;
if(aux[k][l] == 'x')
x += 1;
}
}
if(j > 1) {
for(int k = i; k <= i; k++) {
for(int l = 1; l < j; l++) {
elements += 1;
if(aux[k][l] == 'x')
x += 1;
}
}
}
grafo[cont][cont-(elements-x)] = 1;
}
if(aux[i+1][j]!='x') { // Checks if there is a wall below of read position
int x = 0; // Calculates the number of walls between the elements I[i][j] e I[i+1][j]
int elements = 0; // Calculates the number of positions between the elements I[i][j] e I[i+1][j]
for(int k = i; k <= i; k++) {
for(int l = j; l < aux[0].length-1; l++) {
elements += 1;
if(aux[k][l] == 'x')
x += 1;
}
}
if(j > 1) {
for(int k = i+1; k <= i+1; k++) {
for(int l = 1; l < j; l++) {
elements += 1;
if(aux[k][l] == 'x')
x += 1;
}
}
}
grafo[cont][cont+(elements-x)] = 1;
}
if(aux[i][j-1]!='x') { // Checks if there is a wall to the left of read position
grafo[cont][cont-1] = 1;
}
if(aux[i][j+1]!='x') { // Checks if there is a wall to the right of read position
grafo[cont][cont+1] = 1;
}
}
}
}
}
// Returns the point corresponding to a given adjacency matrix vertex
private int[] vertexToPoint(int v){
int cont = -1; // vertices counter
char[][] aux = new char[getHeight()][getWidth()]; // auxiliary map (temporary)
int[] point = new int[2]; // Stores x and y coordinates to be returned (p[x][y])
// Aux fill
for (int i = 0; i < aux.length; i++) {
for (int j = 0; j < aux[0].length; j++) {
if(pacman.getX()==j && pacman.getY()==i) {
aux[i][j] = 'P';
}else {
aux[i][j] = ' ';
}
}
}
// Aux fill
for(Wall wall : walls) {
aux[wall.getY()][wall.getX()] = 'X';
}
// Verification Logic
for (int i = 1; i < aux.length-1; i++) {
for (int j = 1; j < aux[0].length-1; j++) {
if(aux[i][j]!='X') {
cont++;
}
if(cont == v) {
point[0] = j;
point[1] = i;
return point;
}
}
}
return null;
}
// Returns the vertex corresponding to a given map point (except borders, according to defined business rule)
private int pointToVertex(int x, int y) {
int cont =- 1; // vertices counter
char[][] aux = new char[getHeight()][getWidth()]; // Auxiliary map (temporary)
// Aux fill
for (int i = 0; i < aux.length; i++) {
for (int j = 0; j < aux[0].length; j++) {
aux[i][j] = ' ';
}
}
// Aux fill
for(Wall wall : walls) {
aux[wall.getY()][wall.getX()] = 'X';
}
// Verification Logic
for (int i = 1; i < aux.length-1; i++) {
for (int j = 1; j < aux[0].length-1; j++) {
if(aux[i][j]!='X') {
cont++;
}
if(x==j && y==i) {
return cont;
}
}
}
return -1;
}
private Coordinate midpoint(){
Coordinate aux = new Coordinate(0, 0); // Baricenter of the triangle (biggest distance between the gosts)
// Formula: G[(xa+xb+xc)/3][(ya+yb+yc)/3]
Wall aux2 = new Wall(0, 0);
aux.changeCoordinates(Math.round((ghosts.get(0).getX()+ghosts.get(2).getX()+ghosts.get(3).getX())/3),
Math.round((ghosts.get(0).getY()+ghosts.get(2).getY()+ghosts.get(3).getY())/3));
aux2.changeCoordinates(aux.getX(), aux.getY());; // Checks if there is a wall at that coordinate
if(!walls.contains(aux2)) {
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX()+1,aux.getY()); // Checks if there is a wall to the right
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX()-1,aux.getY()); // Checks if there is a wall to the left
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX(),aux.getY()+1); // Checks if there is a wall below
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX(),aux.getY()-1); // Checks if there is a wall above
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX()+2,aux.getY()); // Checks if there is a wall to the right (2 units forward)
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX()-2,aux.getY()); // Checks if there is a wall to the left (2 units backward)
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX(),aux.getY()+2); // Checks if there is a wall bellow (2 units forward)
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
aux2.changeCoordinates(aux.getX(),aux.getY()-2); // Checks if there is a wall above (2 units backward)
if(!walls.contains(aux2)){
aux.changeCoordinates(aux2.getX(), aux2.getY());
return aux;
}
return aux;
}
// Method responsible by updating and map control
public void updateMap(Direction direction) {
if(direction != null) {
// Assemble adjacency matrix (Graph)
adjacenceMatrice();
// Special CheckPoint duration control
if(timerCheckSpecial>0) {
timerCheckSpecial--;
}
pacman.move(direction, walls, ghosts); // Moves the PacMan
moveGhosts(walls); // Moves the ghosts
ghostCollision(); // Checks collision between ghosts and PacMan
checkpointUpdate(); // Achieve checkpoints
}
}
} |
function factorial(n) {
if (n <= 1) {
return 1;
}
return n * factorial(n - 1);
} |
<reponame>v55448330/cattle<filename>code/iaas/model/src/main/java/io/cattle/platform/core/dao/impl/LoadBalancerDaoImpl.java
package io.cattle.platform.core.dao.impl;
import static io.cattle.platform.core.model.tables.CertificateTable.CERTIFICATE;
import static io.cattle.platform.core.model.tables.LoadBalancerCertificateMapTable.LOAD_BALANCER_CERTIFICATE_MAP;
import static io.cattle.platform.core.model.tables.LoadBalancerConfigListenerMapTable.LOAD_BALANCER_CONFIG_LISTENER_MAP;
import static io.cattle.platform.core.model.tables.LoadBalancerListenerTable.LOAD_BALANCER_LISTENER;
import static io.cattle.platform.core.model.tables.LoadBalancerTable.LOAD_BALANCER;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.dao.GenericMapDao;
import io.cattle.platform.core.dao.GenericResourceDao;
import io.cattle.platform.core.dao.LoadBalancerDao;
import io.cattle.platform.core.model.Certificate;
import io.cattle.platform.core.model.LoadBalancer;
import io.cattle.platform.core.model.LoadBalancerConfig;
import io.cattle.platform.core.model.LoadBalancerConfigListenerMap;
import io.cattle.platform.core.model.LoadBalancerListener;
import io.cattle.platform.core.model.tables.records.LoadBalancerListenerRecord;
import io.cattle.platform.core.model.tables.records.LoadBalancerRecord;
import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao;
import java.util.List;
import javax.inject.Inject;
public class LoadBalancerDaoImpl extends AbstractJooqDao implements LoadBalancerDao {
@Inject
GenericMapDao mapDao;
@Inject
GenericResourceDao resourceDao;
@Override
public boolean updateLoadBalancer(long lbId, Long glbId, Long weight) {
int i = create()
.update(LOAD_BALANCER)
.set(LOAD_BALANCER.GLOBAL_LOAD_BALANCER_ID, glbId)
.set(LOAD_BALANCER.WEIGHT, weight)
.where(LOAD_BALANCER.ID.eq(lbId))
.execute();
return i == 1;
}
@Override
public List<? extends LoadBalancer> listByConfigId(long configId) {
return create()
.selectFrom(LOAD_BALANCER)
.where(
LOAD_BALANCER.LOAD_BALANCER_CONFIG_ID.eq(configId)
.and(LOAD_BALANCER.REMOVED.isNull())).fetchInto(LoadBalancerRecord.class);
}
@Override
public List<? extends LoadBalancerListener> listActiveListenersForConfig(long configId) {
return create()
.select(LOAD_BALANCER_LISTENER.fields())
.from(LOAD_BALANCER_LISTENER)
.join(LOAD_BALANCER_CONFIG_LISTENER_MAP)
.on(LOAD_BALANCER_CONFIG_LISTENER_MAP.LOAD_BALANCER_LISTENER_ID.eq(LOAD_BALANCER_LISTENER.ID)
.and(LOAD_BALANCER_CONFIG_LISTENER_MAP.LOAD_BALANCER_CONFIG_ID.eq(configId))
.and(LOAD_BALANCER_CONFIG_LISTENER_MAP.STATE.in(CommonStatesConstants.ACTIVATING,
CommonStatesConstants.ACTIVE)))
.where(LOAD_BALANCER_LISTENER.REMOVED.isNull())
.fetchInto(LoadBalancerListenerRecord.class);
}
@Override
public LoadBalancer getActiveLoadBalancerById(long lbId) {
List<? extends LoadBalancer> lbs = create()
.select(LOAD_BALANCER.fields())
.from(LOAD_BALANCER)
.where(LOAD_BALANCER.REMOVED.isNull()
.and(LOAD_BALANCER.ID.eq(lbId))
.and(
LOAD_BALANCER.STATE.in(CommonStatesConstants.ACTIVATING,
CommonStatesConstants.ACTIVE)))
.fetchInto(LoadBalancerRecord.class);
if (lbs.isEmpty()) {
return null;
}
return lbs.get(0);
}
@Override
public void addListenerToConfig(LoadBalancerConfig config, long listenerId) {
LoadBalancerConfigListenerMap lbConfigListenerMap = mapDao.findNonRemoved(LoadBalancerConfigListenerMap.class,
LoadBalancerConfig.class, config.getId(),
LoadBalancerListener.class, listenerId);
if (lbConfigListenerMap == null) {
resourceDao.createAndSchedule(LoadBalancerConfigListenerMap.class,
LOAD_BALANCER_CONFIG_LISTENER_MAP.LOAD_BALANCER_CONFIG_ID,
config.getId(), LOAD_BALANCER_CONFIG_LISTENER_MAP.LOAD_BALANCER_LISTENER_ID, listenerId,
LOAD_BALANCER_CONFIG_LISTENER_MAP.ACCOUNT_ID, config.getAccountId());
}
}
@Override
public List<Certificate> getLoadBalancerCertificates(LoadBalancer lb) {
return create()
.select(CERTIFICATE.fields())
.from(CERTIFICATE)
.join(LOAD_BALANCER_CERTIFICATE_MAP)
.on(CERTIFICATE.ID.eq(LOAD_BALANCER_CERTIFICATE_MAP.CERTIFICATE_ID)
.and(LOAD_BALANCER_CERTIFICATE_MAP.LOAD_BALANCER_ID.eq(lb.getId()))
.and(LOAD_BALANCER_CERTIFICATE_MAP.STATE.in(CommonStatesConstants.ACTIVATING,
CommonStatesConstants.ACTIVE, CommonStatesConstants.REQUESTED)))
.where(CERTIFICATE.REMOVED.isNull())
.fetchInto(Certificate.class);
}
@Override
public Certificate getLoadBalancerDefaultCertificate(LoadBalancer lb) {
List<? extends Certificate> certs = create()
.select(CERTIFICATE.fields())
.from(CERTIFICATE)
.join(LOAD_BALANCER_CERTIFICATE_MAP)
.on(CERTIFICATE.ID.eq(LOAD_BALANCER_CERTIFICATE_MAP.CERTIFICATE_ID)
.and(LOAD_BALANCER_CERTIFICATE_MAP.LOAD_BALANCER_ID.eq(lb.getId()))
.and(LOAD_BALANCER_CERTIFICATE_MAP.IS_DEFAULT.eq(true))
.and(LOAD_BALANCER_CERTIFICATE_MAP.STATE.in(CommonStatesConstants.ACTIVATING,
CommonStatesConstants.ACTIVE, CommonStatesConstants.REQUESTED)))
.where(CERTIFICATE.REMOVED.isNull())
.fetchInto(Certificate.class);
if (certs.isEmpty()) {
return null;
}
return certs.get(0);
}
}
|
<filename>test/programs/java/Statements/SubClass.java
public class SubClass extends SuperClass {
@Override
public void method() {
int startSubMethod;
int endSubMethod;
}
}
|
def generate_primes(n):
primes = []
is_prime = [True] * n
for i in range(2, n+1):
if is_prime[i]:
primes.append(i)
for j in range(i*i, n+1, i):
is_prime[j] = False
return primes
n = 10
prime_list = generate_primes(n)
print(prime_list) # Output: [2,3,5,7] |
<gh_stars>10-100
/**
* Implementation in progress.
*/
export class Buffer{
constructor(gamma = 0.99){
this.gamma = gamma;
this.obs = [];
this.act = [];
this.ret = [];
this.rtg = [];
}
store(temp_traj, last_sv){
if(temp_traj.length > 0){
let nulls = temp_traj.map((el)=>{
return el[0];
});
let firsts = temp_traj.map((el)=>{
return el[1];
});
let seconds = temp_traj.map((el)=>{
return el[2];
});
let thirds = temp_traj.map((el)=>{
return el[3];
});
this.obs = this.obs.concat(nulls);
let rtg = this.discounted_rewards(firsts, last_sv, self.gamma);
let temp = rtg.map((value, index) => {
value - thirds[i];
});
this.ret = this.ret.concat(temp);
this.rtg.concat(rtg);
this.act.concat(seconds);
}
}
getBatch(){
return tf.tensor(this.obs), tf.tensor(this.act), tf.tensor(this.ret), tf.tensor(this.rtg);
}
discounted_rewards(rews, last_sv, gamma){
let rtg = tf.zerosLike(rews).buffer();
rtg.set(rews[rews.length-1] + gamma * last_sv, rews.length-1);
for(let i = rews.length-1;i >= 0; i--){
rtg.set(rews[i] + gamma * rtg[i+1], i);
}
return rtg.toTensor().dataSync();
}
get length(){
return this.obs.length;
}
}
export class AC{
constructor(hidden_sizes=[64], ac_lr=0.004, cr_lr=0.015, gamma=0.99, steps_per_epoch=100, ){
}
log_summary(writer, step, p_loss, entropy, p_log, ret_batch){
}
mlp(x, hidden_layers, output_size, activation= tf.layers.reLU, last_activation=null){
//multilayer perceptron
let inputt = tf.input({shape: [null, x[0]]});
x = inputt;
for (let l in hidden_layers){
x = tf.layers.dense({units: l, activation: activation}).apply(x);
}
let output = tf.layers.dense({units: output_size, activation: last_activation}).apply(x);
return tf.model({inputs: inputt, outputs: output})
}
softmax_entropy(values){
return tf.sum(values.softmax() * values.logSoftmax(), axis=-1);
}
}
|
<filename>src/de/erichseifert/gral/graphics/DrawingContext.java
/*
* GRAL: GRAphing Library for Java(R)
*
* (C) Copyright 2009-2012 <NAME> <dev[at]erichseifert.de>,
* <NAME> <michael[at]erichseifert.de>
*
* This file is part of GRAL.
*
* GRAL is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GRAL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with GRAL. If not, see <http://www.gnu.org/licenses/>.
*/
package de.erichseifert.gral.graphics;
import java.awt.Graphics2D;
/**
* Class that stores an object for drawing and additional context information
* that may be necessary to determine how to draw the object. This includes
* information on drawing quality and the target media (screen, paper, etc.).
*/
public class DrawingContext {
/**
* Data type that describes the quality mode of drawing operations.
*/
public static enum Quality {
/** Fast drawing mode. */
DRAFT,
/** Standard drawing mode. */
NORMAL,
/** High quality drawing mode. */
QUALITY
}
/**
* Data type that describes the type of the drawing target.
*/
public static enum Target {
/** Bitmap drawing target consisting of pixels. */
BITMAP,
/** Vector drawing target consisting of lines and curves. */
VECTOR
}
/** Graphics instance used for drawing. */
private final Graphics2D graphics;
/** Quality level used for drawing. */
private final Quality quality;
/** Target media. */
private final Target target;
/**
* Initializes a new context with a {@code Graphics2D} object.
* @param graphics Object for drawing geometry.
*/
public DrawingContext(Graphics2D graphics) {
this(graphics, Quality.NORMAL, Target.BITMAP);
}
/**
* Initializes a new context with a {@code Graphics2D} object.
* @param graphics Object for drawing geometry.
* @param quality Drawing quality.
* @param target Target media.
*/
public DrawingContext(Graphics2D graphics, Quality quality, Target target) {
this.graphics = graphics;
this.quality = quality;
this.target = target;
}
/**
* Returns the object for drawing geometry.
* @return Graphics object.
*/
public Graphics2D getGraphics() {
return graphics;
}
/**
* Returns the desired display quality.
* @return Display quality mode.
*/
public Quality getQuality() {
return quality;
}
/**
* Returns the drawing target.
* @return Drawing target.
*/
public Target getTarget() {
return target;
}
}
|
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Dry::ElasticModel::Base do
class Foo < described_class
field :text_field, :text
field :keyword_field, :keyword, index: false
field :date_field, :date
field :long_field, :long
field :double_field, :double
field :boolean_field, :boolean
field :ip_field, :ip
list :list_text_field, :text
range :range_long_field, :long
end
describe "mapping" do
let(:expected_mapping) do
{
foo: {
properties: {
text_field: {
type: "text",
index: "not_analyzed"
},
keyword_field: {
type: "keyword",
index: false
},
date_field: {
type: "date",
format: "strict_date_optional_time||epoch_millis"
},
long_field: {
type: "long"
},
double_field: {
type: "double"
},
boolean_field: {
type: "boolean"
},
ip_field: {
type: "ip"
},
list_text_field: {
type: "text"
},
range_long_field: {
type: "long_range"
}
}
}
}
end
it "is valid" do
expect(Foo.mapping).to eq(expected_mapping)
end
end
describe "as_json" do
it "returns instance of model" do
model = Foo.new(
text_field: "foo",
keyword_field: :test,
date_field: Date.today,
long_field: 10,
double_field: 1.0,
boolean_field: true,
ip_field: "127.0.0.1",
list_text_field: %w(a b c),
range_long_field: {
gt: 0,
lt: 10
}
)
expect(model.as_json).to(
eq(
text_field: "foo",
keyword_field: :test,
date_field: Date.today,
long_field: 10,
double_field: 1.0,
boolean_field: true,
ip_field: "127.0.0.1",
list_text_field: %w(a b c),
range_long_field: {
gt: 0,
gte: nil,
lt: 10,
lte: nil
}
)
)
end
end
end
|
<reponame>isabella232/aurora
from base_rest_helper import *
class SecurityGroupHelper(BaseRESTHelper):
def create_security_group(self, parameters=None):
"""
Arguments:
- parameters: dict, parameters of sec. group (see Wiki for details).
Return:
- dict, parameters of just created group.
"""
params = {
'name': '',
'description': 'REST auto-test', # optional
}
# apply non-empty user-defined parameters
if parameters is not None:
for k in parameters:
if parameters[k] != "":
params[k] = parameters[k]
if params['name'] == "":
groups = self.utils.get_list('securitygroups')
params['name'] = self.utils.generate_string(4, *[i['name'] for i in groups])
# launch group creation and verify result (inside of send_request)
res = self.utils.send_request("POST", 'create_security_group', data=params)
return json.loads(res.content)['resp']
def delete_security_group(self, gid):
"""
Arguments:
- gid: string - group id.
Return:
- bool: True if success.
"""
params = {'id': gid}
res = self.utils.send_request('POST', 'delete_security_group', data=params)
remaining = [i for i in self.utils.get_list('securitygroups') if i['id'] == gid]
return len(remaining) == 0
def show_security_group(self, id):
"""
Arguments:
- id: string
Return:
- JSON dict with group parameters
"""
params = {'id': id}
res = self.utils.send_request('GET', 'show_security_group', data=params)
return json.loads(res.content)['securityGroup']
def add_rule(self, group_id):
params = {
'id': group_id,
'ipProtocol': 'TCP',
'fromPort': 1, # int
'toPort': 2, # int
'sourceGroup': group_id, # one of sec. groups
# 'cidr': '' # optional. format: 'ip/mask' where ip - from 0.0.0.0 to 255.255.255.255, mask 0 to 32
}
res = self.utils.send_request('GET', 'add_rule', data=params)
return json.loads(res.content)['resp']['security_group_rule']
def delete_rule(self, selected_rules):
params = {'selectedRules': selected_rules} # selected_rules can be str or list of str.
res = self.utils.send_request('POST', 'delete_rule', data=params)
return json.loads(res.content)
class KeypairHelper(BaseRESTHelper):
def create_keypair(self, parameters=None):
params = {'name': ''}
# apply non-empty user-defined parameters
if parameters is not None:
for k in parameters:
if parameters[k] != "":
params[k] = parameters[k]
if params['name'] == "":
pairs = self.utils.get_list('keypairs')
params['name'] = self.utils.generate_string(3, *[p['name'] for p in pairs])
res = self.utils.send_request('POST', 'create_keypair', data=params)
return json.loads(res.content)['keypair']
def delete_keypair(self, pairname):
params = {'keypairName': pairname}
res = self.utils.send_request('POST', 'delete_keypair', data=params)
# res usually contains not very useful [None] so return True/False result.
remaining = [p for p in self.utils.get_list('keypairs') if p['name'] == pairname]
return len(remaining) == 0
def import_keypair(self, params):
"""
Arguments:
- params: dict {'name' '<value>', 'publicKey': '<value>'}
"""
res = self.utils.send_request('POST', 'import_keypair', data=params)
return json.loads(res.content)
|
def transform_string(input_string):
modified_string = input_string.replace("django", "Django", -1)
modified_string = modified_string.replace("http", "HTTPS", -1)
return modified_string |
#!/bin/bash
set -euox pipefail
source ci/openstack/vars.sh
if [ "${RUN_OPENSTACK_CI:-}" != "true" ]; then
echo RUN_OPENSTACK_CI is set to false, skipping the openstack end to end test.
exit
fi
echo SET UP DNS
cp /etc/resolv.conf resolv.conf.orig
DNS_IP=$(openstack server show dns-0.$ENV_ID.example.com --format value --column addresses | awk '{print $2}')
grep -v '^nameserver' resolv.conf.orig > resolv.conf.openshift
echo nameserver "$DNS_IP" >> resolv.conf.openshift
sudo cp resolv.conf.openshift /etc/resolv.conf
function restore_dns {
echo RESTORING DNS
sudo cp resolv.conf.orig /etc/resolv.conf
}
trap restore_dns EXIT
mkdir -p bin
scp -o "StrictHostKeyChecking no" -o "UserKnownHostsFile /dev/null" openshift@console.$ENV_ID.example.com:/usr/bin/oc bin/
ls -alh bin
export PATH="$PWD/bin:$PATH"
ENV_ID="openshift-$TRAVIS_BUILD_NUMBER"
oc login --insecure-skip-tls-verify=true https://console.$ENV_ID.example.com:8443 -u test -p password
oc new-project test
oc new-app --template=cakephp-mysql-example
set +x
echo Waiting for the pods to come up
STATUS=timeout
for i in $(seq 600); do
if [ "$(oc status -v | grep 'deployment.*deployed' | wc -l)" -eq 2 ]; then
STATUS=success
echo Both pods were deployed
break
elif [ "$(oc status -v | grep -i 'error\|fail' | wc -l)" -gt 0 ]; then
STATUS=error
echo ERROR: The deployment failed
break
else
printf .
sleep 15
fi
done
if [ "$STATUS" = timeout ]; then
echo ERROR: Timed out waiting for the pods
fi
echo 'Output of `oc status -v`:'
oc status -v
echo
echo 'Output of `oc logs bc/cakephp-mysql-example`:'
oc logs bc/cakephp-mysql-example
if [ "$STATUS" != success ]; then
echo "ERROR: The deployment didn't succeed"
exit 1
fi
set -o pipefail
curl "http://cakephp-mysql-example-test.apps.$ENV_ID.example.com" | grep 'Welcome to your CakePHP application on OpenShift'
echo "SUCCESS \o/"
|
<filename>lang/py/pyadmin/13/hello_optparse.py
#! /usr/bin/env python3
# -*-coding:utf-8 -*-
# @Time : 2019/06/19 22:44:36
# @Author : che
# @Email : <EMAIL>
import optparse
def main():
p = optparse.OptionParser()
p.add_option("--sysadmin", "-s", dest="sysadmin", default="BOFH")
options, arguments = p.parse_args()
print("Hello, %s" % options.sysadmin)
if __name__ == "__main__":
main()
|
<gh_stars>0
const DrawCard = require('../../../drawcard.js');
class ObaraSand extends DrawCard {
setupCardAbilities(ability) {
this.persistentEffect({
condition: () => (
this.game.currentChallenge &&
this.game.currentChallenge.challengeType === 'power' &&
this.game.currentChallenge.defendingPlayer === this.controller
),
match: this,
effect: [
// Add the icon as a UI hint, but Obara can be declared even if
// the opponent removes that icon somehow.
ability.effects.addIcon('power'),
ability.effects.canBeDeclaredWithoutIcon(),
ability.effects.canBeDeclaredWhileKneeling()
]
});
}
}
ObaraSand.code = '01108';
module.exports = ObaraSand;
|
#!/usr/bin/env sh
timestampfile=$USER-$HOSTNAME-lastrun.timestamp
euler2 align cases/$1.txt -e $3 -r $2 >& $1.out
folder=$(head -n 1 $timestampfile)
if [[ -n "$(diff $folder/3-MIR/$1_mir.csv expected/$1_mir.expected)" ]]; then
diff $folder/3-MIR/$1_mir.csv expected/$1_mir.expected > $1.dif;
echo "There is new dif in $1.dif!"
echo "$1 $2 Failed!"
else
rm $1.out;
echo "$1 $2 Passed!"
rm -rf $USER-$HOSTNAME
rm $timestampfile
rm report.csv
fi
|
#!/bin/bash -x
# This script installes gitblit on a system running under systemd.
# The script assumes the server is running as user giblit
# First create a file with the default settings
cat > /tmp/gitblit.defaults << EOF
GITBLIT_PATH=/opt/gitblit
GITBLIT_BASE_FOLDER=/opt/gitblit/data
GITBLIT_HTTP_PORT=0
GITBLIT_HTTPS_PORT=8443
GITBLIT_LOG=/var/log/gitblit.log
EOF
# Create a systemd service file
cat > /tmp/gitblit.service << EOF
[Unit]
Description=Gitblit managing, viewing, and serving Git repositories.
After=network.target
[Service]
User=gitblit
Group=gitblit
Environment="ARGS=-server -Xmx1024M -Djava.awt.headless=true -jar"
EnvironmentFile=-/etc/sysconfig/gitblit
WorkingDirectory=/opt/gitblit
ExecStart=/usr/bin/java \$ARGS gitblit.jar --httpsPort \$GITBLIT_HTTPS_PORT --httpPort \$GITBLIT_HTTP_PORT --baseFolder \$GITBLIT_BASE_FOLDER --dailyLogFile
ExecStop=/usr/bin/java \$ARGS gitblit.jar --baseFolder \$GITBLIT_BASE_FOLDER --stop
[Install]
WantedBy=multi-user.target
EOF
# Finally copy the files to the destination and register the systemd unit.
sudo su -c "cp /tmp/gitblit.defaults /etc/sysconfig/gitblit && cp /tmp/gitblit.service /usr/lib/systemd/system/"
sudo su -c "systemctl daemon-reload && systemctl enable gitblit.service && systemctl start gitblit.service"
# Prepare the logfile
sudo su -c "touch /var/log/gitblit.log && chown gitblit:gitblit /var/log/gitblit.log"
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
import subprocess
import shlex, subprocess
def run():
while True:
command = str(input('''
██▓ ▄▄▄ ▄▄▄█████▓▄▄▄█████▓ ██▓ ▄████▄ ▓█████ ▓█████▄
▓██▒ ▒████▄ ▓ ██▒ ▓▒▓ ██▒ ▓▒▓██▒▒██▀ ▀█ ▓█ ▀ ▒██▀ ██▌
▒██░ ▒██ ▀█▄ ▒ ▓██░ ▒░▒ ▓██░ ▒░▒██▒▒▓█ ▄ ▒███ ░██ █▌
▒██░ ░██▄▄▄▄██░ ▓██▓ ░ ░ ▓██▓ ░ ░██░▒▓▓▄ ▄██▒▒▓█ ▄ ░▓█▄ ▌
░██████▒▓█ ▓██▒ ▒██▒ ░ ▒██▒ ░ ░██░▒ ▓███▀ ░░▒████▒░▒████▓
░ ▒░▓ ░▒▒ ▓▒█░ ▒ ░░ ▒ ░░ ░▓ ░ ░▒ ▒ ░░░ ▒░ ░ ▒▒▓ ▒
░ ░ ▒ ░ ▒ ▒▒ ░ ░ ░ ▒ ░ ░ ▒ ░ ░ ░ ░ ▒ ▒
░ ░ ░ ▒ ░ ░ ▒ ░░ ░ ░ ░ ░
░ ░ ░ ░ ░ ░ ░ ░ ░ ░
░ ░
Herramienta para hacer visualizaciones y animaciones con retículas en 2D, 3D y 4D.
CIC - IPN - ESFM
- Made with manim, python and LaTex.
¿Cuál es la dimensión del Espacio donde vas a trabajar?
[D] Dimensión Dos |R^2
[T] Dimensión Tres |R^3
[C] Dimensión Cuatro |R^4
[s] salir
'''))
####################################################################################################################################################
# This is already! :D
if command == 'D':
dim1 = int(input('¿Cuál es la dimensión de la retícula?: '))
if dim1 == 1 :
command_line = 'python3.7 -m manim Laticed.py SpanLattice1D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if dim1 == 2 :
do1 = str(input('''
¿Qué deseas hacer?
[a] Animar la retícula por el conjunto generador.
[b] Dibujar la retícula directamente.
[c] Diujar la reítcula y su Dominio Fundamental.
[d] Reducir la retícula por algoritmo de Gauss.
'''))
if do1 == 'a':
command_line = 'python3.7 -m manim Laticed.py SpanLattice2D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do1 == 'b':
command_line = 'python3.7 -m manim Laticed.py DrawLattice2D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do1 == 'c':
command_line = 'python3.7 -m manim Laticed.py DrawDomFundLattice2D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do1 == 'd':
command_line = 'python3.7 -m manim Laticed.py GaussLatticeReduction -pl'
args = shlex.split(command_line)
subprocess.call(args)
else:
print("¡Selecione una opción!")
if dim1 >= 3:
print('¡La dimensión de la retícula no debe ser mayor a la del espacio!')
########################################################################################################################
if command == 'T':
dim2 = int(input('¿Cuál es la dimensión de la retícula?: '))
if dim2 == 1 :
command_line = 'python3.7 -m manim Laticed.py DrawLattice1Din3D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if dim2 == 2 :
do2 = str(input('''
¿Qué deseas hacer?
[a] Dibujar la retícula directamente.
[b] Diujar la reítcula y su Dominio Fundamental.
'''))
if do2 == 'a':
command_line = 'python3.7 -m manim Laticed.py DrawLattice2Din3D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do2 == 'b':
command_line = 'python3.7 -m manim Laticed.py DrawLatticeDomFund2Din3D -pl'
args = shlex.split(command_line)
subprocess.call(args)
else:
break
if dim2 == 3:
do3 = str(input('''
¿Qué deseas hacer?
[a] Dibujar la retícula directamente.
[b] Diujar la reítcula y su Dominio Fundamental.
[c] Reducir la retícula por algoritmo LLL.
'''))
# This is already! :D
if do3 == 'a':
command_line = 'python3.7 -m manim Laticed.py DrawLattice3D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do3 == 'b':
command_line = 'python3.7 -m manim Laticed.py DrawDomFund3D -pl'
args = shlex.split(command_line)
subprocess.call(args)
if do3 == 'c':
command_line = 'python3.7 -m manim Laticed.py LLLReduceLattice -pl'
args = shlex.split(command_line)
subprocess.call(args)
else:
break
if dim2 >= 4:
print('¡La dimensión de la retícula no debe ser mayor a la del espacio!')
#############################################################################################################################################
if command == 'C':
print("Se va a dibujar la represenación de una retícula de cuatro dimensiones.")
command_line = 'python3.7 -m manim Laticed.py DrawLattice4D -pl'
args = shlex.split(command_line)
subprocess.call(args)
#############################################################################################################################################
if command == 's':
break
else:
print("¡Seleccione una de las opciones!")
if __name__ == '__main__':
run()
|
const db = require("../models");
const Comment = db.comments;
//Create and save a new comment
exports.create = async (req, res) => {
if (!req.body.comment) {
res.status(400).send({
message: "Cannot post an empty comment"
})
}
try {
const comment = {
comment: req.body.comment
};
const response = await Comment.create(comment)
res.send(response);
} catch {err => {
res.status(500).send({
message:
err.message || "Something went wrong while creating your comment"
});
}};
};
//Retrieve all Comments from the DB
exports.findAll = async (req, res) => {
try {
const comments = await Comment.findAll()
res.send(comments);
} catch {
(err) => {
res.status(500).send({
message:
err.message || "Some error occurred while retrieving comments."
});
};
};
} |
<reponame>dqian/node-boilerplate<filename>src/index.ts
import * as http from 'http'
import config from '~/config'
import './packages/api/auth'
import { getConnection } from './packages/database'
import "reflect-metadata"
import server from './server'
const PORT = config.SERVER_PORT || '3000'
async function onStart(): Promise<void> {
try {
// initialize database connection
await getConnection()
console.log(`Database successfully connected.`)
} catch (err) {
// tslint:disable-next-line:no-console
console.log(err)
throw err
}
console.log(`Server up and running on port ${PORT}.`)
}
const currentServer = http.createServer({}, server)
currentServer.listen(PORT, onStart) |
<reponame>SDurand7/AVLIT-Engine<gh_stars>0
#pragma once
#include <Core/Base/include/SceneObject.hpp>
#include <Core/Renderer/include/RenderTarget.hpp>
namespace AVLIT {
class Light : public SceneObject {
public:
Light(const Light &light) = delete;
// transform should be a composition of a rotation and a translation, debug models should be scaled on load
Light(const std::string &name, const Transform &transform, uint shadowMapWidth, uint shadowMapHeight,
const Color3 &color);
virtual ~Light() = default;
virtual void setParameters(const std::string &name, Shader *shader, int textureUnit) const = 0;
virtual const Mat4 &projection() const = 0;
inline Mat4 view() const;
inline const RenderTarget *shadowMap() const;
inline uint shadowMapWidth() const;
inline uint shadowMapHeight() const;
AVLIT_API inline Color3 color() const;
AVLIT_API inline void setColor(const Color3 &color);
AVLIT_API inline bool isLit() const;
AVLIT_API inline void switchState();
protected:
RenderTargetUptr m_shadowMap;
uint m_shadowMapWidth;
uint m_shadowMapHeight;
Color3 m_color;
bool m_lit = true;
};
class PointLight : public Light {
public:
PointLight(const std::string &name, const Transform &transform, uint shadowMapWidth, uint shadowMapHeight,
const Color3 &color);
void setParameters(const std::string &name, Shader *shader, int textureUnit) const override;
const Mat4 &projection() const override;
private:
static const float maxRange;
static const Mat4 m_projection;
};
class DirectionalLight : public Light {
public:
DirectionalLight(const std::string &name, const Transform &transform, uint shadowMapWidth, uint shadowMapHeight,
const Color3 &color);
void setParameters(const std::string &name, Shader *shader, int textureUnit) const override;
const Mat4 &projection() const override;
private:
static const Mat4 m_projection;
};
class SpotLight : public Light {
public:
SpotLight(const std::string &name, const Transform &transform, uint shadowMapWidth, uint shadowMapHeight,
const Color3 &color, float innerAngle, float outerAngle);
void setParameters(const std::string &name, Shader *shader, int textureUnit) const override;
const Mat4 &projection() const override;
AVLIT_API float innerAngle() const;
AVLIT_API void setInnerAngle(float angle);
AVLIT_API float outerAngle() const;
AVLIT_API void setOuterAngle(float angle);
private:
static const float maxRange;
static const Mat4 m_projection;
float m_cosInnerAngle;
float m_cosOuterAngle;
};
class AmbientLight : public Light {
public:
AmbientLight(const std::string &name, const Transform &transform, const Color3 &color);
void setParameters(const std::string &name, Shader *shader, int textureUnit) const override;
const Mat4 &projection() const override;
private:
static const Mat4 m_projection;
};
} // namespace AVLIT
#include <Core/Base/inline/Light.inl> |
#!/bin/bash
PYTHON="coverage run -p"
URL=http://127.0.0.1/file
mkdir -p tmp
$PYTHON shadowsocks/local.py -c tests/aes.json &
LOCAL=$!
$PYTHON shadowsocks/server.py -c tests/aes.json --forbidden-ip "" &
SERVER=$!
sleep 3
time curl -o tmp/expected $URL
time curl -o tmp/result --socks5-hostname 127.0.0.1:1081 $URL
kill -s SIGINT $LOCAL
kill -s SIGINT $SERVER
sleep 2
diff tmp/expected tmp/result || exit 1
|
public static String extractPackageName(String packageDeclaration) {
int startIndex = packageDeclaration.indexOf("package") + "package".length();
int endIndex = packageDeclaration.indexOf(";");
return packageDeclaration.substring(startIndex, endIndex).trim();
} |
#!/bin/bash
set -e
# import common functions
src_dir="$(dirname "$0")"
source ${src_dir}/_common.sh
# define input arguments
fn="${1}"
# remove raw miriad files
fn_xx=${fn}
fn_yy=$(replace_pol $fn "yy")
fn_xy=$(replace_pol $fn "xy")
fn_yx=$(replace_pol $fn "yx")
echo rm -rf "${fn_xx}"
rm -rf "${fn_xx}"
echo rm -rf "${fn_yy}"
rm -rf "${fn_yy}"
echo rm -rf "${fn_xy}"
rm -rf "${fn_xy}"
echo rm -rf "${fn_yx}"
rm -rf "${fn_yx}"
|
<filename>Modulo 6 - NodeJS/myapp/moduloDNS.js
var dns = require("dns");
var w3 = dns.lookup("www.google.es", function(err, address, family) {
console.log(address);
});
|
<reponame>edjannoo/api-documentation-frontend<gh_stars>0
/*
* Copyright 2020 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.apidocumentation
import org.raml.v2.api.model.v10.api.{Api, DocumentationItem}
import uk.gov.hmrc.apidocumentation.models.{DocsVisibility, ExtendedAPIVersion}
import uk.gov.hmrc.apidocumentation.views.helpers.VersionDocsVisible
import scala.collection.JavaConverters._
package object services {
type RAML = Api
implicit class RicherRAML(val x: Api) {
def documentationForVersion(version: Option[ExtendedAPIVersion]): Seq[DocumentationItem] = versionVisibility(version) match {
case DocsVisibility.VISIBLE => x.documentation.asScala.toSeq
case DocsVisibility.OVERVIEW_ONLY => x.documentation.asScala.filter(_.title.value == "Overview")
case _ => Seq.empty
}
private def versionVisibility(version: Option[ExtendedAPIVersion]): DocsVisibility.Value = version match {
case Some(v) => VersionDocsVisible(v.visibility)
case _ => DocsVisibility.VISIBLE
}
}
}
|
#include <boost/test/unit_test.hpp>
#include "eosio/chain/name.hpp"
BOOST_AUTO_TEST_SUITE(name_test)
BOOST_AUTO_TEST_CASE(default_to_string)
{
eosio::chain::name name;
std::string result = name.to_string();
BOOST_CHECK_EQUAL("", result);
}
BOOST_AUTO_TEST_SUITE_END()
|
#!/bin/sh
PRIVATE_IP=`ip a | grep 172 | awk '{print $2}' | sed 's#/.*$##'`
echo -e "Running a command\n /bin/thanos $@ --cluster.address="${PRIVATE_IP}:10900""
exec /bin/thanos $@ --cluster.address="${PRIVATE_IP}:10900" |
<reponame>segfaultxavi/nem.core
package org.nem.core.test;
import org.nem.core.crypto.KeyPair;
import org.nem.core.node.*;
import org.nem.core.time.NetworkTimeStamp;
import org.nem.core.time.synchronization.*;
import java.security.SecureRandom;
import java.util.*;
public class TimeSyncUtils {
private static final long MINUTE = 60L * 1000L;
private static final long TOLERATED_DEVIATION_START = 120 * MINUTE;
private static final KeyPair KEY_PAIR = new KeyPair();
/**
* Creates count sorted synchronization samples with a time offset that is tolerable.
*
* @param startValue The time offset to start with.
* @param count The number of samples needed.
* @return the sorted list of samples
*/
public static List<TimeSynchronizationSample> createTolerableSortedSamples(final long startValue, final int count) {
final List<TimeSynchronizationSample> samples = createTolerableSamples(startValue, count);
Collections.sort(samples);
return samples;
}
/**
* Creates count unsorted synchronization samples with a time offset that is tolerable.
*
* @param startValue The time offset to start with.
* @param count The number of samples needed.
* @return the unsorted list of samples
*/
public static List<TimeSynchronizationSample> createTolerableUnsortedSamples(final long startValue, final int count) {
final List<TimeSynchronizationSample> samples = createTolerableSamples(startValue, count);
Collections.shuffle(samples);
return samples;
}
private static List<TimeSynchronizationSample> createTolerableSamples(final long startValue, final int count) {
final List<TimeSynchronizationSample> samples = new ArrayList<>();
for (int i = 1; i <= count; i++) {
samples.add(createTimeSynchronizationSample(startValue + i));
}
return samples;
}
/**
* Creates count synchronization samples with a time offset that is not tolerable.
*
* @param count The number of samples needed.
* @return the list of samples
*/
public static List<TimeSynchronizationSample> createIntolerableSamples(final int count) {
final List<TimeSynchronizationSample> samples = new ArrayList<>();
for (int i = 1; i <= count; i++) {
samples.add(createTimeSynchronizationSample(TOLERATED_DEVIATION_START + i));
}
return samples;
}
/**
* Creates a synchronization sample with a given time offset.
*
* @param timeOffset The time offset in ms.
* @return The time synchronization sample
*/
private static TimeSynchronizationSample createTimeSynchronizationSample(final long timeOffset) {
return new TimeSynchronizationSample(
new Node(new NodeIdentity(KEY_PAIR, "node"), new NodeEndpoint("http", "10.10.10.12", 13), null),
new CommunicationTimeStamps(new NetworkTimeStamp(0), new NetworkTimeStamp(10)),
new CommunicationTimeStamps(new NetworkTimeStamp(5 + timeOffset), new NetworkTimeStamp(5 + timeOffset)));
}
/**
* Creates a synchronization sample with a given time offset.
*
* @param startValue The time offset to start with.
* @param count The number of samples needed.
* @return The time synchronization sample
*/
public static List<TimeSynchronizationSample> createTimeSynchronizationSamplesWithDifferentKeyPairs(final int startValue,
final int count) {
final List<TimeSynchronizationSample> samples = new ArrayList<>();
for (int i = 0; i < count; i++) {
samples.add(createTimeSynchronizationSampleWithKeyPair(new KeyPair(), startValue + i));
}
return samples;
}
/**
* Creates a synchronization sample with a given time offset.
*
* @param count The number of samples needed.
* @param mean The mean time offset the samples should have.
* @return The time synchronization sample
*/
public static List<TimeSynchronizationSample> createRandomTolerableSamplesWithDifferentKeyPairsAroundMean(final int count,
final long mean) {
final SecureRandom random = new SecureRandom();
final List<TimeSynchronizationSample> samples = new ArrayList<>();
if (count % 2 == 1) {
samples.add(createTimeSynchronizationSampleWithKeyPair(new KeyPair(), mean));
}
for (int i = 0; i < count / 2; i++) {
final int value = random.nextInt(1000);
samples.add(createTimeSynchronizationSampleWithKeyPair(new KeyPair(), mean + value));
samples.add(createTimeSynchronizationSampleWithKeyPair(new KeyPair(), mean - value));
}
return samples;
}
/**
* Creates a synchronization sample with a given time offset.
*
* @param keyPair The key pair to tie the node to.
* @param timeOffset The time offset in ms.
* @return The time synchronization sample
*/
private static TimeSynchronizationSample createTimeSynchronizationSampleWithKeyPair(final KeyPair keyPair, final long timeOffset) {
return new TimeSynchronizationSample(new Node(new NodeIdentity(keyPair, "node"), new NodeEndpoint("http", "10.10.10.12", 13), null),
new CommunicationTimeStamps(new NetworkTimeStamp(0), new NetworkTimeStamp(10)),
new CommunicationTimeStamps(new NetworkTimeStamp(5 + timeOffset), new NetworkTimeStamp(5 + timeOffset)));
}
/**
* Creates a synchronization sample with a given duration.
*
* @param duration The duration in ms.
* @return The time synchronization sample
*/
public static TimeSynchronizationSample createTimeSynchronizationSampleWithDuration(final long duration) {
return new TimeSynchronizationSample(
new Node(new NodeIdentity(new KeyPair(), "node"), new NodeEndpoint("http", "10.10.10.12", 13), null),
new CommunicationTimeStamps(new NetworkTimeStamp(0), new NetworkTimeStamp(duration)),
new CommunicationTimeStamps(new NetworkTimeStamp(duration / 2), new NetworkTimeStamp(duration / 2)));
}
/**
* Creates a synchronization sample.
*
* @param keyPair The remote node's key pair.
* @param localSendTimeStamp The local send time stamp.
* @param localReceiveTimeStamp The local receive time stamp.
* @param remoteSendTimeStamp The remote send time stamp.
* @param remoteReceiveTimeStamp The remote receive time stamp.
* @return The time synchronization sample
*/
public static TimeSynchronizationSample createTimeSynchronizationSample(final KeyPair keyPair, final long localSendTimeStamp,
final long localReceiveTimeStamp, final long remoteSendTimeStamp, final long remoteReceiveTimeStamp) {
return new TimeSynchronizationSample(new Node(new NodeIdentity(keyPair, "node"), new NodeEndpoint("http", "10.10.10.12", 13), null),
new CommunicationTimeStamps(new NetworkTimeStamp(localSendTimeStamp), new NetworkTimeStamp(localReceiveTimeStamp)),
new CommunicationTimeStamps(new NetworkTimeStamp(remoteSendTimeStamp), new NetworkTimeStamp(remoteReceiveTimeStamp)));
}
}
|
<reponame>pulsar-chem/BPModule
#!/usr/bin/env python3
import os
import sys
import argparse
import traceback
# Add the pulsar path
thispath = os.path.dirname(os.path.realpath(__file__))
psrpath = os.path.join(os.path.dirname(thispath), "modules")
sys.path.insert(0, psrpath)
import pulsar as psr
from pulsar.system import System, CreateAtom
def Run():
try:
out = psr.output.GetGlobalOut()
tester = psr.testing.Tester("Testing System class")
tester.PrintHeader()
atoms = [ CreateAtom(0, [ 0.000000000000, 0.000000000000, 0.000000000000], 6),
CreateAtom(1, [ 0.000000000000, 0.000000000000, 2.845112131228], 6),
CreateAtom(2, [ 1.899115961744, 0.000000000000, 4.139062527233], 8),
CreateAtom(3, [-1.894048308506, 0.000000000000, 3.747688672216], 1),
CreateAtom(4, [ 1.942500819960, 0.000000000000, -0.701145981971], 1),
CreateAtom(5, [-1.007295466862, -1.669971842687, -0.705916966833], 1),
CreateAtom(6, [-1.007295466862, 1.669971842687, -0.705916966833], 1),
# same as above, shifted 2,2,2
CreateAtom(7, [ 2.000000000000, 2.000000000000, 2.000000000000 ], 6),
CreateAtom(8, [ 2.000000000000, 2.000000000000, 4.845112131228 ], 6),
CreateAtom(9, [ 3.899115961744, 2.000000000000, 6.139062527233 ], 8),
CreateAtom(10, [ 0.105951691494, 2.000000000000, 5.747688672216 ], 1),
CreateAtom(11, [ 3.942500819960, 2.000000000000, 1.298854018029 ], 1),
CreateAtom(12, [ 0.992704533138, 0.330028157313, 1.294083033167 ], 1),
CreateAtom(13, [ 0.992704533138, 3.669971842687, 1.294083033167 ], 1)
]
molu = psr.system.AtomSetUniverse()
for a in atoms:
molu.Insert(a)
tester.TestValue("System universe size", molu.size(), 14)
mol = psr.system.System(molu, True)
tester.TestValue("System size", mol.size(), 14)
tester.TestValue("System size", len(mol), 14)
# Does the system contain all the atoms?
idx = 0
for a in atoms:
tester.Test("System has atom {}".format(idx), True, mol.HasAtom, a)
tester.Test("System has atom {} - via 'in'".format(idx), True, lambda m, el: el in m, mol, a)
idx += 1
noatom = CreateAtom(14, [ 0.0, 0.0, 0.0 ], 7)
tester.Test("System doesn't have atom", False, mol.HasAtom, noatom)
tester.Test("System doesn't have atom - via 'in'", False, lambda m, el: el in m, mol, noatom)
# Starting with empty system
mol = psr.system.System(molu, False)
tester.TestValue("System size", mol.size(), 0)
tester.TestValue("System size", len(mol), 0)
idx = 0
for a in atoms:
tester.Test("System has atom {}".format(idx), False, mol.HasAtom, a)
tester.Test("Inserting atom {}".format(idx), True, InsertAtom, mol, a)
tester.Test("System has atom {}".format(idx), True, mol.HasAtom, a)
idx += 1
tester.Test("Inserting atom not in universe", False, InsertAtom, mol, noatom)
#PrintMol(mol, out)
#mol2 = mol.Translate( [ 2.0, 3.0, 4.0 ] )
#PrintMol(mol2, out)
#mol2 = mol.Rotate( [ 0.0, 1.0, 0.0,
# 0.0, 0.0, 1.0,
# 1.0, 0.0, 0.0 ] )
#PrintMol(mol2, out)
tester.PrintResults()
except Exception as e:
psr.output.GlobalOutput("Caught exception in main handler. Contact the developers\n")
traceback.print_exc()
psr.output.GlobalError("\n")
psr.output.GlobalError(str(e))
psr.output.GlobalError("\n")
psr.Init(sys.argv, out = "stdout", color = True, debug = True)
Run()
psr.Finalize()
|
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense, Activation
# Create the model
model = Sequential()
# Add a hidden layer with 30 neurons
model.add(Dense(30, input_dim=2))
model.add(Activation('relu'))
# Add the output layer
model.add(Dense(1))
model.add(Activation('linear'))
# Compile the model with MSE as the loss function and SGD as the optimizer
model.compile(loss='mse',
optimizer='sgd',
metrics=['accuracy'])
# Create dummy data
X_train = np.random.random((100, 2))
y_train = np.array([ [ x1 + x2 ] for x1,x2 in X_train])
# Fit the model
model.fit(X_train, y_train, epochs=10) |
<reponame>yunsean/yoga<gh_stars>10-100
package com.yoga.admin.tenant.controller;
import com.github.pagehelper.PageInfo;
import com.yoga.admin.tenant.dto.*;
import com.yoga.admin.tenant.vo.TenantMenuVo;
import com.yoga.admin.tenant.vo.TenantVo;
import com.yoga.core.base.BaseController;
import com.yoga.core.base.BaseDto;
import com.yoga.core.base.BaseEnum;
import com.yoga.core.data.ApiResult;
import com.yoga.core.data.ApiResults;
import com.yoga.core.data.CommonPage;
import com.yoga.core.exception.BusinessException;
import com.yoga.core.exception.IllegalArgumentException;
import com.yoga.core.property.PropertiesService;
import com.yoga.core.utils.StringUtil;
import com.yoga.setting.annotation.Settable;
import com.yoga.setting.customize.CustomPage;
import com.yoga.setting.model.SettableItem;
import com.yoga.setting.model.Setting;
import com.yoga.setting.service.SettingService;
import com.yoga.tenant.menu.MenuItem;
import com.yoga.tenant.menu.MenuLoader;
import com.yoga.tenant.tenant.model.Tenant;
import com.yoga.tenant.tenant.model.TenantCustomize;
import com.yoga.tenant.tenant.model.TenantMenu;
import com.yoga.tenant.tenant.model.TenantSetting;
import com.yoga.tenant.tenant.service.TemplateService;
import com.yoga.tenant.tenant.service.TenantService;
import com.yoga.utility.alidns.service.AliDnsService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.shiro.authz.annotation.RequiresAuthentication;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import javax.validation.Valid;
import java.util.*;
@Controller
@Settable
@Api(tags = "租户管理")
@RequestMapping("/admin/tenant/tenant")
public class TenantController extends BaseController {
@Value("${app.system.public-ip:}")
private String publicIp;
@Autowired
private TenantService tenantService;
@Autowired
private SettingService settingService;
@Autowired
private TemplateService templateService;
@Autowired
private PropertiesService propertiesService;
@Autowired
private AliDnsService dnsService;
@ApiIgnore
@GetMapping("/list")
@RequiresPermissions("gbl_tenant")
public String allTenants(ModelMap model, CustomPage page, @Valid TenantListDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
PageInfo<Tenant> tenants = tenantService.list(dto.getName(), dto.getCode(), dto.getTemplateId(), null, page.getPageIndex(), page.getPageSize());
model.put("param", dto.wrapAsMap());
model.put("tenants", tenants.getList());
model.put("page", new CommonPage(tenants));
model.put("templates", templateService.list(null, 0, 1000).getList());
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/tenants";
}
@ApiIgnore
@GetMapping("/modules")
@RequiresPermissions("gbl_tenant")
public String allModules(ModelMap model, @Valid TenantModuleDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
List<MenuItem> menuItems = MenuLoader.getInstance().getAllMenus(false);
List<String> modules = null;
String[] moduleArray = tenantService.getModules(dto.getTenantId());
if (moduleArray != null) modules = Arrays.asList(moduleArray);
else modules = new ArrayList<>();
for (MenuItem item : menuItems) {
if (item.getChildren() == null) continue;
for (MenuItem child : item.getChildren()) {
if (modules.contains(child.getCode())) {
child.setChecked(true);
} else {
child.setChecked(false);
}
}
}
model.put("tenantId", dto.getTenantId());
model.put("modules", menuItems);
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/modules";
}
@ApiIgnore
@GetMapping("/menus")
@RequiresPermissions("gbl_tenant")
public String allMenus(ModelMap model, @Valid TenantMenuDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
List<MenuItem> customMenus = tenantService.getMenus(dto.getTenantId());
List<MenuItem> systemMenus = MenuLoader.getInstance().getAllMenus();
Set<String> groups = new HashSet<>();
if (customMenus != null) {
for (MenuItem item : customMenus) {
groups.add(item.getName());
if (item.getChildren() != null) {
Collections.sort(item.getChildren(), new Comparator<MenuItem>() {
@Override
public int compare(MenuItem o1, MenuItem o2) {
return o1.getSort() - o2.getSort();
}
});
}
}
}
if (systemMenus != null) {
for (MenuItem item : systemMenus) {
groups.add(item.getName());
}
}
model.put("tenantId", dto.getTenantId());
model.put("menus", customMenus);
model.put("groups", groups);
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/menus";
}
@ApiIgnore
@GetMapping("/settings")
@RequiresPermissions("gbl_tenant")
public String setting(ModelMap model, CustomPage page, @Valid TenantListSettingsDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
String[] modules = tenantService.getModules(dto.getTenantId());
String[] modeles1 = new String[modules.length + 1];
System.arraycopy(modules, 0, modeles1, 0, modules.length);
modeles1[modules.length] = "gbl_tenant";
PageInfo<SettableItem> items = SettingService.allSettable(dto.getName(), modeles1, page.getPageIndex(), page.getPageSize(), true);
List<Map<String, Object>> settings = new ArrayList<>();
for (SettableItem item : items.getList()) {
Setting setting = settingService.get(dto.getTenantId(), item.getModule(), item.getKey());
if (setting != null) {
item.setValue(setting.getValue());
item.setShowValue(setting.getShowValue());
} else {
item.setValue(StringUtil.isNotBlank(item.getDefValue()) ? item.getDefValue() : null);
}
String url = item.getUrl();
if (url == null) url = "";
if (StringUtil.isNotBlank(url)) {
if (url.indexOf('?') > 0) url += "&tenantId=" + dto.getTenantId();
else url += "?tenantId=" + dto.getTenantId();
}
item.setUrl(url);
Map<String, Object> map = new HashMap<>();
if (item.getType() != null && BaseEnum.class.isAssignableFrom(item.getType())) {
map.put("enums", item.getType().getEnumConstants());
}
map.put("setting", item);
settings.add(map);
}
model.put("param", dto.wrapAsMap());
model.put("tenantId", dto.getTenantId());
model.put("settings", settings);
model.put("page", new CommonPage(items));
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/settings";
}
@ApiIgnore
@GetMapping("/setting")
@RequiresPermissions("gbl_tenant")
@Settable(module = TenantService.ModuleName, key = TenantService.Key_Setting, name = "租户设置-全局设置", systemOnly = true)
public String tenantSetting(ModelMap model, @Valid TenantSettingDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
TenantSetting setting = tenantService.readSetting(dto.getTenantId());
model.put("setting", setting);
model.put("tenantId", dto.getTenantId());
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/setting";
}
@ApiIgnore
@GetMapping("/customize")
@RequiresPermissions("gbl_tenant")
@Settable(module = TenantService.ModuleName, key = TenantService.Key_Customize, name = "租户设置-框架自定义", systemOnly = true)
public String tenantCustomize(ModelMap model, @Valid TenantSettingDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
TenantCustomize customize = tenantService.readCustomize(dto.getTenantId());
model.put("customize", customize);
model.put("tenantId", dto.getTenantId());
model.put("tenantAlias", propertiesService.getTenantAlias());
return "/admin/tenant/customize";
}
@ResponseBody
@GetMapping("/list.json")
@RequiresAuthentication
@ApiOperation(value = "租户列表")
public ApiResults<TenantVo> list(@ModelAttribute TenantListDto dto) {
PageInfo<Tenant> tenants = tenantService.list(dto.getName(), dto.getCode(), dto.getTemplateId(), false, 0, 1000);
return new ApiResults<>(tenants, TenantVo.class);
}
@ResponseBody
@GetMapping("/get.json")
@ApiOperation(value = "租户详情")
public ApiResult<TenantVo> get(@ModelAttribute @Valid TenantGetDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
Tenant tenant = tenantService.get(dto.getId());
return new ApiResult<>(tenant, TenantVo.class);
}
@ResponseBody
@GetMapping("/info.json")
@ApiOperation(value = "当前租户详情")
public ApiResult<TenantVo> info(@ModelAttribute @Valid BaseDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
Tenant tenant = tenantService.get(dto.getTid());
return new ApiResult<>(tenant, TenantVo.class);
}
@ResponseBody
@PostMapping("/add.json")
@RequiresPermissions("gbl_tenant.add")
@ApiOperation(value = "增加新租户")
public ApiResult add(@Valid @ModelAttribute TenantAddDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
if (dto.getPassword().length() < 6) throw new IllegalArgumentException("管理员密码至少6个字符!");
tenantService.add(dto.getName(), dto.getCode(), dto.getRemark(), dto.getTemplateId(), dto.getUsername(), dto.getPassword(), dto.getNickname(), dto.getMobile());
if (StringUtil.isNotBlank(publicIp)) {
try {
dnsService.addARecord(dto.getCode(), publicIp, null);
} catch (Exception ex) {
return new ApiResult(1, "创建租户成功,但添加域名解析失败,请手动管理域名解析!");
}
}
return new ApiResult();
}
@ResponseBody
@PostMapping("/update.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "修改租户信息")
public ApiResult update(@Valid @ModelAttribute TenantUpdateDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
tenantService.update(dto.getId(), dto.getName(), dto.getRemark());
return new ApiResult();
}
@ResponseBody
@DeleteMapping("/delete.json")
@RequiresPermissions("gbl_tenant.del")
@ApiOperation(value = "删除现有租户")
public ApiResult delete(@Valid @ModelAttribute TenantDeleteDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
Tenant tenant = tenantService.get(dto.getId());
tenantService.delete(dto.getId());
if (StringUtil.isNotBlank(publicIp) && StringUtil.isNotBlank(tenant.getCode())) {
try {
dnsService.deleteARecord(tenant.getCode(), null);
} catch (Exception ex) {
return new ApiResult(1, "删除租户成功,但删除域名解析失败,请手动管理域名解析!");
}
}
return new ApiResult();
}
@ResponseBody
@PostMapping("/renew.json")
@RequiresPermissions("gbl_tenant.del")
@ApiOperation(value = "恢复删除的租户")
public ApiResult renew(@Valid @ModelAttribute TenantRenewDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
tenantService.renew(dto.getId(), dto.getCode());
if (StringUtil.isNotBlank(publicIp)) {
try {
dnsService.addARecord(dto.getCode(), publicIp, null);
} catch (Exception ex) {
return new ApiResult(1, "恢复租户成功,但添加域名解析失败,请手动管理域名解析!");
}
}
return new ApiResult();
}
@ResponseBody
@PostMapping("/repair.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "修复租户权限")
public ApiResult repair(@Valid @ModelAttribute TenantRepairDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
return new ApiResult<>(tenantService.repair(dto.getId()));
}
@ResponseBody
@PostMapping("/module/set.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "设置租户可用的模块")
public ApiResult saveModules(@Valid @ModelAttribute TenantSetModuleDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
tenantService.setModules(dto.getTenantId(), dto.getModules());
return new ApiResult();
}
@ResponseBody
@PostMapping("/menu/add.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "增加租户新自定义菜单")
public ApiResult addMenu(@Valid @ModelAttribute TenantAddMenuDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
tenantService.addMenu(dto.getTenantId(), dto.getCode(), dto.getGroup(), dto.getName(), dto.getUrl(), dto.getRemark(), dto.getSort());
return new ApiResult();
}
@ResponseBody
@DeleteMapping("/menu/delete.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "删除租户现有自定义菜单")
public ApiResult deleteMenu(@Valid @ModelAttribute TenantDeleteMenuDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
tenantService.deleteMenu(dto.getTenantId(), dto.getMenuId());
return new ApiResult();
}
@ResponseBody
@PostMapping("/menu/update.json")
@RequiresPermissions("gbl_tenant.update")
@ApiOperation(value = "修改租户自定义菜单")
public ApiResult updateMenu(@Valid @ModelAttribute TenantUpdateMenuDto dto, BindingResult bindingResult) throws Exception {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
if (dto.getTid() != 0) throw new BusinessException("非法操作");
tenantService.updateMenu(dto.getTenantId(), dto.getMenuId(), dto.getCode(), dto.getGroup(), dto.getName(), dto.getUrl(), dto.getRemark(), dto.getSort());
return new ApiResult();
}
@ApiOperation(value = "获取租户自定义菜单详情")
@ResponseBody
@GetMapping("/menu/get.json")
@RequiresPermissions("gbl_tenant.update")
public ApiResult<TenantMenuVo> getMenu(@Valid @ModelAttribute TenantGetMenuDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
TenantMenu menu = tenantService.getMenu(dto.getTenantId(), dto.getMenuId());
return new ApiResult<>(menu, TenantMenuVo.class);
}
@ApiIgnore
@ResponseBody
@RequiresPermissions("gbl_tenant.update")
@PostMapping("/setting/save.json")
public ApiResult saveSetting(@Valid TenantSaveSettingDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
TenantSetting setting = new TenantSetting();
setting.setPlatformName(dto.getPlatform());
setting.setFooterRemark(dto.getFooter());
setting.setResourcePrefix(dto.getResource());
setting.setLoginBackUrl(dto.getLoginbg());
setting.setLoginLogoUrl(dto.getLoginlogo());
setting.setAdminIcon(dto.getAdminIcon());
setting.setTopImageUrl(dto.getTopimage());
setting.setMenuColor(dto.getMenuColor());
tenantService.saveSetting(dto.getTenantId(), setting);
return new ApiResult();
}
@ApiIgnore
@ResponseBody
@RequiresPermissions("gbl_tenant.update")
@PostMapping("/customize/save.json")
public ApiResult saveCustomize(@Valid TenantSaveCustomizeDto dto, BindingResult bindingResult) {
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
TenantCustomize customize = new TenantCustomize();
customize.setAdminIndex(dto.getAdminIndex());
customize.setAdminLeft(dto.getAdminLeft());
customize.setAdminLogin(dto.getAdminLogin());
customize.setAdminTop(dto.getAdminTop());
customize.setAdminWelcome(dto.getAdminWelcome());
customize.setFrontIndex(dto.getFrontIndex());
customize.setFrontLogin(dto.getFrontLogin());
tenantService.saveCustomize(dto.getTenantId(), customize);
return new ApiResult();
}
@ApiIgnore
@RequiresPermissions("sys_config.update")
@PostMapping("/settings/save.json")
public ApiResult save(@RequestBody @Valid TenantSaveSettingsDto bean, BindingResult bindingResult){
if (bindingResult.hasErrors()) throw new IllegalArgumentException(bindingResult);
settingService.save(bean.getTenantId(), bean.getSettings());
return new ApiResult();
}
}
|
#! /bin/sh
topdir=$( cd "$( dirname "$0" )/../" && pwd )
NAME=mars-censere
. ${topdir}/scripts/standard-args.sh
ParseArgs $*
if [ -z "$REGISTRY" ]
then
echo "push.sh: Ignoring request to push image, --registry not set"
exit 0
fi
set -x
docker push ${REGISTRY}/${TAG}
st=$?
docker image ls "${REGISTRY}/${TAG}"
set +x
exit $st
|
<button type="button" onclick="loadText()">Load Text</button> |
"""
Write a code that can check if a given word is a palindrome
"""
# Function to check if a given word is a palindrome
def is_palindrome(word):
# Reverse the word
rev_word = ''.join(reversed(word))
# Check if the reversed word is the same as the original
if rev_word == word:
# If same, return True
return True
else:
# If not, return False
return False
# Test
word = "radar"
print(is_palindrome(word)) |
#!/bin/bash
#
# Oracle Linux DTrace.
# Copyright (c) 2006, Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at
# http://oss.oracle.com/licenses/upl.
#
##
#
# ASSERTION:
# Testing -f option with an invalid function name.
#
# SECTION: dtrace Utility/-f Option
#
##
if [ $# != 1 ]; then
echo expected one argument: '<'dtrace-path'>'
exit 2
fi
dtrace=$1
$dtrace $dt_flags -f read:
if [ $? -ne 1 ]; then
echo $tst: dtrace failed
exit 1
fi
exit 0
|
import { createIframe, isObject, objectToUrlQuery, isInQuickApp, createQuickAppScript } from './utils';
export default class QuickApp {
constructor(config = {
packageName: 'com.linksure.tt.quickapp',
sourceName: ''
}) {
this.config = config;
createQuickAppScript();
}
showError (message) {
console.warn('QuickApp error: ', message);
}
showInfo (message, name) {
name = name || '';
console.log('QuickApp debug: ' + name, message);
}
open (options) {
if (!isObject(options)) {
this.showError('options must be a object!');
return;
}
if (!options.path) {
this.showError('options.path is required fields!');
return;
}
if (!/^\//.test(options.path) ) {
this.showError('options.path must start with /!');
return;
}
if (options.params && !isObject(options.params)) {
this.showError('options.params must be a object!');
return;
}
if (!isInQuickApp) {
this.showError('请在快应用外的网页中使用!');
return;
}
if (!options.packageName) {
options.packageName = this.config.packageName;
}
if (options.openType && options.openType === 'url') {
this.openAppRouter(options);
} else if (options.openType && options.openType === 'deeplink') {
this.openDeepLink(options);
} else {
this.openAppRouter(options);
this.openDeepLink(options);
}
return true;
}
openAppRouter (options) {
// https://doc.quickapp.cn/tutorial/platform/url-jump-configuration.html
const isRun = this.openAppRouterOnce(options);
let stopIntervalId;
function loopInterval(optionsLoop) {
stopIntervalId = setTimeout( () => {
if ( typeof window.appRouter !== 'undefined' ) {
clearTimeout(stopIntervalId);
this.openAppRouterOnce(optionsLoop);
} else {
loopInterval(optionsLoop);
}
}, 600);
}
if (!isRun) {
loopInterval(options);
}
}
openAppRouterOnce(options) {
if ( typeof window.appRouter !== 'undefined' ) {
this.showInfo(options, 'URL appRouter 方式跳转: ');
if (options.confirm) {
window.appRouter(options.packageName, options.path, options.params, options.confirm);
} else {
window.appRouter(options.packageName, options.path, options.params);
}
return true;
}
return false;
}
openDeepLink (options) {
// https://doc.quickapp.cn/tutorial/platform/deeplink.html
let deepLinkUrl = 'hap://app/' + options.packageName + options.path + '?' + objectToUrlQuery(options.params);
this.showInfo(deepLinkUrl, 'Deeplink hap 方式跳转: ');
createIframe(deepLinkUrl);
}
}
|
<filename>testdirectory/options.go
package testdirectory
import (
"github.com/hashicorp/go-hclog"
"github.com/jimlambrt/gldap"
)
// Option defines a common functional options type which can be used in a
// variadic parameter pattern.
type Option func(interface{})
// getOpts gets the defaults and applies the opt overrides passed in
func getOpts(t TestingT, opt ...Option) options {
if v, ok := interface{}(t).(HelperT); ok {
v.Helper()
}
opts := defaults(t)
applyOpts(&opts, opt...)
return opts
}
// applyOpts takes a pointer to the options struct as a set of default options
// and applies the slice of opts as overrides.
func applyOpts(opts interface{}, opt ...Option) {
for _, o := range opt {
if o == nil { // ignore any nil Options
continue
}
o(opts)
}
}
// options are the set of available options for test functions
type options struct {
withPort int
withLogger hclog.Logger
withNoTLS bool
withMTLS bool
withDisablePanicRecovery bool
withDefaults *Defaults
withMembersOf []string
withTokenGroupSIDs [][]byte
withFirst bool
}
func defaults(t TestingT) options {
if v, ok := interface{}(t).(HelperT); ok {
v.Helper()
}
debugLogger := hclog.New(&hclog.LoggerOptions{
Name: "testdirectory-default-logger",
Level: hclog.Error,
})
return options{
withLogger: debugLogger,
withDefaults: &Defaults{
UserAttr: DefaultUserAttr,
GroupAttr: DefaultGroupAttr,
UserDN: DefaultUserDN,
GroupDN: DefaultGroupDN,
},
}
}
// Defaults define a type for composing all the defaults for Directory.Start(...)
type Defaults struct {
UserAttr string
GroupAttr string
// Users configures the user entries which are empty by default
Users []*gldap.Entry
// Groups configures the group entries which are empty by default
Groups []*gldap.Entry
// TokenGroups configures the tokenGroup entries which are empty be default
TokenGroups map[string][]*gldap.Entry
// UserDN is the base distinguished name to use when searching for users
// which is "ou=people,dc=example,dc=org" by default
UserDN string
// GroupDN is the base distinguished name to use when searching for groups
// which is "ou=groups,dc=example,dc=org" by default
GroupDN string
// AllowAnonymousBind determines if anon binds are allowed
AllowAnonymousBind bool
// UPNDomain is the userPrincipalName domain, which enables a
// userPrincipalDomain login with [username]@UPNDomain (optional)
UPNDomain string
}
// WithDefaults provides an option to provide a set of defaults to
// Directory.Start(...) which make it much more composable.
func WithDefaults(t TestingT, defaults *Defaults) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
if defaults != nil {
if defaults.AllowAnonymousBind {
o.withDefaults.AllowAnonymousBind = true
}
if defaults.Users != nil {
o.withDefaults.Users = defaults.Users
}
if defaults.Groups != nil {
o.withDefaults.Groups = defaults.Groups
}
if defaults.UserDN != "" {
o.withDefaults.UserDN = defaults.UserDN
}
if defaults.GroupDN != "" {
o.withDefaults.GroupDN = defaults.GroupDN
}
if len(defaults.TokenGroups) > 0 {
o.withDefaults.TokenGroups = defaults.TokenGroups
}
if defaults.UserAttr != "" {
o.withDefaults.UserAttr = defaults.UserAttr
}
if defaults.GroupAttr != "" {
o.withDefaults.GroupAttr = defaults.GroupAttr
}
if defaults.UPNDomain != "" {
o.withDefaults.UPNDomain = defaults.UPNDomain
}
}
}
}
}
// WithMTLS provides the option to use mTLS for the directory.
func WithMTLS(t TestingT) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withMTLS = true
}
}
}
// WithNoTLS provides the option to not use TLS for the directory.
func WithNoTLS(t TestingT) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withNoTLS = true
}
}
}
// WithLogger provides the optional logger for the directory.
func WithLogger(t TestingT, l hclog.Logger) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withLogger = l
}
}
}
// WithPort provides an optional port for the directory. 0 causes a
// started server with a random port. Any other value returns a started server
// on that port.
func WithPort(t TestingT, port int) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withPort = port
}
}
}
// withFirst provides the option to only find the first match.
func withFirst(t TestingT) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withFirst = true
}
}
}
// WithMembersOf specifies optional memberOf attributes for user
// entries
func WithMembersOf(t TestingT, membersOf ...string) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withMembersOf = membersOf
}
}
}
// WithTokenGroups specifies optional test tokenGroups SID attributes for user
// entries
func WithTokenGroups(t TestingT, tokenGroupSID ...[]byte) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withTokenGroupSIDs = tokenGroupSID
}
}
}
func WithDisablePanicRecovery(t TestingT, disable bool) Option {
return func(o interface{}) {
if o, ok := o.(*options); ok {
o.withDisablePanicRecovery = disable
}
}
}
|
#!/usr/bin/env bash
set -e
npm run dist
npm run pack
TMPDIR="/tmp/npm-pack-testing.$$"
mkdir "$TMPDIR"
mv *-*.*.*.tgz "$TMPDIR"
cp tests/fixtures/smoke-testing.ts "$TMPDIR"
cd $TMPDIR
npm init -y
npm install *-*.*.*.tgz \
@types/quick-lru \
@types/node \
@types/normalize-package-data \
file-box \
memory-card \
typescript \
./node_modules/.bin/tsc \
--esModuleInterop \
--lib esnext \
--noEmitOnError \
--noImplicitAny \
--target es6 \
--module commonjs \
smoke-testing.ts
node smoke-testing.js
|
<filename>arrows/core/triangle_scan_iterator.h
/*ckwg +29
* Copyright 2018 by Kitware, SAS.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither name of Kitware, Inc. nor the names of any contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file
* \brief Header for kwiver::arrows::triangle_scan_iterator
*/
#ifndef KWIVER_ARROWS_CORE_TRIANGLE_SCAN_ITERATOR_H
#define KWIVER_ARROWS_CORE_TRIANGLE_SCAN_ITERATOR_H
#include <arrows/core/kwiver_algo_core_export.h>
#include <vital/types/vector.h>
namespace kwiver {
namespace arrows {
namespace core {
/// Provides access to the pixels of a triangle using scanlines
class KWIVER_ALGO_CORE_EXPORT triangle_scan_iterator
{
public:
triangle_scan_iterator(vital::vector_2d const & pt1,
vital::vector_2d const & pt2,
vital::vector_2d const & pt3) :
a(pt1), b(pt2), c(pt3)
{
reset();
}
/// Reset the iterator state
void reset();
/// Update the iterator to the next scanline
bool next();
/// Current scanline index
int scan_y() const { return scan_y_; }
/// Index of the first pixel of the current scanline
int start_x() const { return start_x_; }
/// Index of the last pixel of the current scanline
int end_x() const { return end_x_; }
private:
vital::vector_2d const &a, &b, &c;
vital::vector_2d g;
int scan_y_;
int start_x_, end_x_;
int x0, y0, x1, y1;
double data[3][3];
};
}
}
}
#endif // KWIVER_ARROWS_CORE_TRIANGLE_SCAN_ITERATOR_H
|
#!/usr/bin/env bash
set -e
echo "--- yarn"
yarn --frozen-lockfile --network-timeout 60000
for cmd in "$@"
do
echo "--- $cmd"
yarn -s run $cmd
done
|
<reponame>maps90/hanu<filename>conversation_test.go
package hanu
import (
"testing"
"golang.org/x/net/websocket"
"github.com/sbstjn/allot"
)
type ConnectionMock struct{}
func (c ConnectionMock) Send(ws *websocket.Conn, v interface{}) (err error) {
return nil
}
func TestConversation(t *testing.T) {
command := allot.New("cmd test <param>")
msg := Message{
ID: 0,
}
msg.SetText("cmd test value")
match, _ := command.Match(msg.Text())
conv := NewConversation(match, msg, nil)
str, err := conv.String("param")
if err != nil {
t.Errorf("Failed to get correct value for param \"param\"")
}
if str != "value" {
t.Errorf("Failed to get correct value for param \"param\": %s != %s", str, "value")
}
conv.Reply("example")
}
func TestConnect(t *testing.T) {
cmd := allot.New("cmd test <param>")
msg := Message{
ID: 0,
}
msg.SetText("cmd test value")
match, _ := cmd.Match(msg.Text())
conv := NewConversation(match, msg, &websocket.Conn{})
conv.SetConnection(ConnectionMock{})
conv.Reply("example")
}
|
import React, {Component} from "react"
import PropTypes from "prop-types"
import {GridList, GridTile} from "material-ui/GridList"
import RaisedButton from "material-ui/RaisedButton"
import FloatingActionButton from "material-ui/FloatingActionButton"
import ContentAdd from "material-ui/svg-icons/content/add"
import DialogLanguageCreate from "../containers/dialogLanguageCreate"
import TileLanguage from "../containers/tileLanguage"
const styles = {
root: {
display: "flex",
flexWrap: "wrap",
justifyContext: "space-around"
},
gridList: {
margin: 0
},
raisedButton: {
marginLeft: 12
},
floatingActionButton: {
position: "fixed",
bottom: 32,
right: 48
}
}
class Languages extends Component {
constructor(props) {
super(props)
this.state = {
dialogShow: false
}
this.dialogToggle = this.dialogToggle.bind(this)
this.handleDelete = this.handleDelete.bind(this)
}
componentWillMount() {
const {
languagesGet
} = this.props
languagesGet()
}
dialogToggle() {
const {
dialogShow
} = this.state
this.setState({dialogShow: !dialogShow})
}
handleDelete() {
const {
languageIDsSelected,
languagesDelete
} = this.props
let URL = new URL(window.location.href)
languageIDsSelected.forEach(([i, v]) => {
if (i === 0)
URL.searchParams.set("IDs", v)
URL.searchParams.append("IDs", v)
})
languagesDelete({
URL,
data: {
value: languageIDsSelected
}
})
}
tilesLanguage(languages) {
const {
contexts,
languageIDsSelected
} = this.props
return Object.values(languages).map(v => <TileLanguage
key={v.ID}
language={v}
title={contexts[v.contextID].value || v.ID}
isChecked={languageIDsSelected.indexOf(v.ID) !== -1}
/>)
}
render() {
const {
root,
gridList,
raisedButton,
floatingActionButton
} = styles
const {
dialogShow
} = this.state
const {
contexts,
languages,
languageIDsSelected
} = this.props
return (
<div style={root}>
<GridList
cols={4}
cellHeight="auto"
style={gridList}
>
<GridTile cols={1} />
<GridTile cols={2}>
{
Object.keys(languages).length ?
<GridList
style={gridList}
cols={4}
padding={10}
cellHeight={333}
>
{this.tilesLanguage(languages)}
</GridList> :
<h3>{contexts["<KEY>"].value || "No Content"}</h3>
}
{
languageIDsSelected.length > 0 &&
<RaisedButton
label={contexts["aghkZXZ-Tm9uZXIT<KEY>IG<KEY>"].value || "Delete"}
style={raisedButton}
secondary={true}
onTouchTap={this.handleDelete}
/>
}
</GridTile>
<GridTile cols={1} />
</GridList>
{
!dialogShow &&
<FloatingActionButton
secondary={true}
style={floatingActionButton}
onTouchTap={this.dialogToggle}
>
<ContentAdd />
</FloatingActionButton>
}
<DialogLanguageCreate
contexts={contexts}
title={contexts["aghkZXZ-Tm9uZXIfCxIHQ29udGVudCISQWRkIEEgTmV3IExhbmd1YWdlDA"].value || "Add A New Language"}
dialogShow={dialogShow}
dialogToggle={this.dialogToggle}
/>
</div>
)
}
}
Languages.defaultProps = {
contexts: {}
}
Languages.propTypes = {
contexts: PropTypes.object.isRequired,
languages: PropTypes.object,
languageIDsSelected: PropTypes.array,
languagesGet: PropTypes.func.isRequired,
languagesDelete: PropTypes.func.isRequired
}
Languages.muiName = "GridList"
export default Languages
|
<reponame>ghsecuritylab/bk7231_rtt_sdk<filename>test/test_pm.c
#include <rthw.h>
#include <rtthread.h>
#include <stdint.h>
#include <stdlib.h>
extern int beken_pm_level;
extern int bk_wlan_dtim_rf_ps_mode_enable(void );
extern int bk_wlan_dtim_rf_ps_mode_need_disable(void);
extern int bk_wlan_mcu_ps_mode_enable(void);
extern int bk_wlan_mcu_ps_mode_disable(void);
extern int bk_wlan_dtim_with_normal_open(void);
extern int bk_wlan_dtim_with_normal_close(void);
extern void power_save_set_linger_time(uint32_t data_wakeup_time);
static int pm_level(int argc, char **argv)
{
uint32_t level;
if(argc != 2)
{
rt_kprintf("input argc is err!\n");
return -1;
}
level = atoi(argv[1]);
if(level > 99)
{
level = 99;
}
rt_kprintf("power_save_level: %d\n", level);
//power_save_set_linger_time(level);
//beken_pm_level = level;
{
struct rt_wlan_device *sta_device = (struct rt_wlan_device *)rt_device_find(WIFI_DEVICE_STA_NAME);
if (NULL != sta_device)
{
rt_wlan_enter_powersave(sta_device, level);
rt_kprintf("rt_wlan_enter_powersave switch to %d\n", level);
}
}
return 0;
}
#ifdef RT_USING_FINSH
#include <finsh.h>
MSH_CMD_EXPORT(pm_level, pm_level 1);
#if CFG_USE_STA_PS
FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_dtim_rf_ps_mode_enable, __cmd_rf_ps, bk_wlan_dtim_rf_ps_mode_enable);
#endif
//FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_dtim_rf_ps_mode_need_disable, __cmd_rf_ps_disable, bk_wlan_dtim_rf_ps_mode_need_disable);
#if CFG_USE_MCU_PS
FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_mcu_ps_mode_enable, __cmd_mcu_ps, bk_wlan_mcu_ps_mode_enable);
FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_mcu_ps_mode_disable, __cmd_mcu_ps_disable, bk_wlan_mcu_ps_mode_disable);
#endif
//FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_dtim_with_normal_open, __cmd_dtim_normal_open, bk_wlan_dtim_with_normal_open);
//FINSH_FUNCTION_EXPORT_ALIAS(bk_wlan_dtim_with_normal_close, __cmd_dtim_normal_close, bk_wlan_dtim_with_normal_close);
#endif /* RT_USING_FINSH */
|
#!/bin/bash
echo "Executing ${BASH_SOURCE[0]}..."
DIR=$1
if [ -z "${DIR}" ]; then echo "No directory provided!"; exit 1; fi
if [ ! -d "${DIR}" ]; then DIR=$(realpath ${DIR}); fi
if [ -z "${DIR}" ]; then echo "No directory provided!"; exit 1; fi
set -e
echo "Directory: ${DIR}..."
cd ${DIR}
echo "Generating coverage..."
lcov --directory . --capture --output-file coverage.info
echo "Removing coverage..."
lcov --remove coverage.info '/usr/*' "${HOME}"'/.cache/*' '*/external/*' --output-file coverage.info
echo "Listing coverage..."
lcov --list coverage.info
echo "Submitting coverage..."
bash <(curl -s https://codecov.io/bash) -f coverage.info || echo "Codecov did not collect coverage reports"
|
<reponame>dblabuofi/Mindreader
angular.module("myApp").controller('editerController', function ($scope, $timeout, url) {
$scope.submit = function() {
console.log($scope.userCode);
}
});
|
<gh_stars>10-100
import {
DEFAULT_LIST_PAGE_INDEX,
DEFAULT_LIST_PAGE_SIZE,
LOGOUT,
getMatchOtherDriverListTypes,
matchesOtherDriverListDownloadLinkShowHideTypes,
matchesOtherDriverGetHideListTypes,
matchesOtherDriverListSetInfoType,
matchesOtherDriverListHideExpiredType,
matchesOtherDriverListHideConfirmedType
} from '../actions/types';
const matchesOtherDriverInfo = (
state = {
showMatchList: false,
matches: [],
listPageIndex: DEFAULT_LIST_PAGE_INDEX,
listPageSize: DEFAULT_LIST_PAGE_SIZE,
hideExpiredCanceled: false,
hideConfirmed: false,
showCurrentMatchDetails: false,
currentMatch: {},
showDownloadLink: false,
urlDownloadBlob: ''
},
action
) => {
switch (action.type) {
case LOGOUT:
return {
...state,
showMatchList: false,
matches: [],
listPageIndex: DEFAULT_LIST_PAGE_INDEX,
showCurrentMatchDetails: false,
currentMatch: {}
};
case getMatchOtherDriverListTypes.success: {
const { data: matches } = action.payload;
return { ...state, showMatchList: true, matches };
}
case matchesOtherDriverGetHideListTypes.hide:
return { ...state, showMatchList: false, matches: [] };
case matchesOtherDriverListDownloadLinkShowHideTypes.show: {
const { urlDownloadBlob } = action.payload;
return { ...state, showDownloadLink: true, urlDownloadBlob };
}
case matchesOtherDriverListDownloadLinkShowHideTypes.hide:
return { ...state, showDownloadLink: false, urlDownloadBlob: '' };
case matchesOtherDriverListSetInfoType:
return {
...state,
listPageIndex: action.payload.listPageIndex,
listPageSize: action.payload.listPageSize
};
case matchesOtherDriverListHideExpiredType:
return { ...state, hideExpiredCanceled: !state.hideExpiredCanceled };
case matchesOtherDriverListHideConfirmedType:
return { ...state, hideConfirmed: !state.hideConfirmed };
default:
return state;
}
};
export default matchesOtherDriverInfo;
|
// Code generated by csi-proxy-api-gen. DO NOT EDIT.
package v1beta1
import (
unsafe "unsafe"
v1beta1 "github.com/kubernetes-csi/csi-proxy/client/api/volume/v1beta1"
impl "github.com/kubernetes-csi/csi-proxy/pkg/server/volume/impl"
)
func autoConvert_v1beta1_DismountVolumeRequest_To_impl_DismountVolumeRequest(in *v1beta1.DismountVolumeRequest, out *impl.DismountVolumeRequest) error {
out.VolumeId = in.VolumeId
out.Path = in.Path
return nil
}
// Convert_v1beta1_DismountVolumeRequest_To_impl_DismountVolumeRequest is an autogenerated conversion function.
func Convert_v1beta1_DismountVolumeRequest_To_impl_DismountVolumeRequest(in *v1beta1.DismountVolumeRequest, out *impl.DismountVolumeRequest) error {
return autoConvert_v1beta1_DismountVolumeRequest_To_impl_DismountVolumeRequest(in, out)
}
func autoConvert_impl_DismountVolumeRequest_To_v1beta1_DismountVolumeRequest(in *impl.DismountVolumeRequest, out *v1beta1.DismountVolumeRequest) error {
out.VolumeId = in.VolumeId
out.Path = in.Path
return nil
}
// Convert_impl_DismountVolumeRequest_To_v1beta1_DismountVolumeRequest is an autogenerated conversion function.
func Convert_impl_DismountVolumeRequest_To_v1beta1_DismountVolumeRequest(in *impl.DismountVolumeRequest, out *v1beta1.DismountVolumeRequest) error {
return autoConvert_impl_DismountVolumeRequest_To_v1beta1_DismountVolumeRequest(in, out)
}
func autoConvert_v1beta1_DismountVolumeResponse_To_impl_DismountVolumeResponse(in *v1beta1.DismountVolumeResponse, out *impl.DismountVolumeResponse) error {
return nil
}
// Convert_v1beta1_DismountVolumeResponse_To_impl_DismountVolumeResponse is an autogenerated conversion function.
func Convert_v1beta1_DismountVolumeResponse_To_impl_DismountVolumeResponse(in *v1beta1.DismountVolumeResponse, out *impl.DismountVolumeResponse) error {
return autoConvert_v1beta1_DismountVolumeResponse_To_impl_DismountVolumeResponse(in, out)
}
func autoConvert_impl_DismountVolumeResponse_To_v1beta1_DismountVolumeResponse(in *impl.DismountVolumeResponse, out *v1beta1.DismountVolumeResponse) error {
return nil
}
// Convert_impl_DismountVolumeResponse_To_v1beta1_DismountVolumeResponse is an autogenerated conversion function.
func Convert_impl_DismountVolumeResponse_To_v1beta1_DismountVolumeResponse(in *impl.DismountVolumeResponse, out *v1beta1.DismountVolumeResponse) error {
return autoConvert_impl_DismountVolumeResponse_To_v1beta1_DismountVolumeResponse(in, out)
}
func autoConvert_v1beta1_FormatVolumeRequest_To_impl_FormatVolumeRequest(in *v1beta1.FormatVolumeRequest, out *impl.FormatVolumeRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_v1beta1_FormatVolumeRequest_To_impl_FormatVolumeRequest is an autogenerated conversion function.
func Convert_v1beta1_FormatVolumeRequest_To_impl_FormatVolumeRequest(in *v1beta1.FormatVolumeRequest, out *impl.FormatVolumeRequest) error {
return autoConvert_v1beta1_FormatVolumeRequest_To_impl_FormatVolumeRequest(in, out)
}
func autoConvert_impl_FormatVolumeRequest_To_v1beta1_FormatVolumeRequest(in *impl.FormatVolumeRequest, out *v1beta1.FormatVolumeRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_impl_FormatVolumeRequest_To_v1beta1_FormatVolumeRequest is an autogenerated conversion function.
func Convert_impl_FormatVolumeRequest_To_v1beta1_FormatVolumeRequest(in *impl.FormatVolumeRequest, out *v1beta1.FormatVolumeRequest) error {
return autoConvert_impl_FormatVolumeRequest_To_v1beta1_FormatVolumeRequest(in, out)
}
func autoConvert_v1beta1_FormatVolumeResponse_To_impl_FormatVolumeResponse(in *v1beta1.FormatVolumeResponse, out *impl.FormatVolumeResponse) error {
return nil
}
// Convert_v1beta1_FormatVolumeResponse_To_impl_FormatVolumeResponse is an autogenerated conversion function.
func Convert_v1beta1_FormatVolumeResponse_To_impl_FormatVolumeResponse(in *v1beta1.FormatVolumeResponse, out *impl.FormatVolumeResponse) error {
return autoConvert_v1beta1_FormatVolumeResponse_To_impl_FormatVolumeResponse(in, out)
}
func autoConvert_impl_FormatVolumeResponse_To_v1beta1_FormatVolumeResponse(in *impl.FormatVolumeResponse, out *v1beta1.FormatVolumeResponse) error {
return nil
}
// Convert_impl_FormatVolumeResponse_To_v1beta1_FormatVolumeResponse is an autogenerated conversion function.
func Convert_impl_FormatVolumeResponse_To_v1beta1_FormatVolumeResponse(in *impl.FormatVolumeResponse, out *v1beta1.FormatVolumeResponse) error {
return autoConvert_impl_FormatVolumeResponse_To_v1beta1_FormatVolumeResponse(in, out)
}
func autoConvert_v1beta1_IsVolumeFormattedRequest_To_impl_IsVolumeFormattedRequest(in *v1beta1.IsVolumeFormattedRequest, out *impl.IsVolumeFormattedRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_v1beta1_IsVolumeFormattedRequest_To_impl_IsVolumeFormattedRequest is an autogenerated conversion function.
func Convert_v1beta1_IsVolumeFormattedRequest_To_impl_IsVolumeFormattedRequest(in *v1beta1.IsVolumeFormattedRequest, out *impl.IsVolumeFormattedRequest) error {
return autoConvert_v1beta1_IsVolumeFormattedRequest_To_impl_IsVolumeFormattedRequest(in, out)
}
func autoConvert_impl_IsVolumeFormattedRequest_To_v1beta1_IsVolumeFormattedRequest(in *impl.IsVolumeFormattedRequest, out *v1beta1.IsVolumeFormattedRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_impl_IsVolumeFormattedRequest_To_v1beta1_IsVolumeFormattedRequest is an autogenerated conversion function.
func Convert_impl_IsVolumeFormattedRequest_To_v1beta1_IsVolumeFormattedRequest(in *impl.IsVolumeFormattedRequest, out *v1beta1.IsVolumeFormattedRequest) error {
return autoConvert_impl_IsVolumeFormattedRequest_To_v1beta1_IsVolumeFormattedRequest(in, out)
}
func autoConvert_v1beta1_IsVolumeFormattedResponse_To_impl_IsVolumeFormattedResponse(in *v1beta1.IsVolumeFormattedResponse, out *impl.IsVolumeFormattedResponse) error {
out.Formatted = in.Formatted
return nil
}
// Convert_v1beta1_IsVolumeFormattedResponse_To_impl_IsVolumeFormattedResponse is an autogenerated conversion function.
func Convert_v1beta1_IsVolumeFormattedResponse_To_impl_IsVolumeFormattedResponse(in *v1beta1.IsVolumeFormattedResponse, out *impl.IsVolumeFormattedResponse) error {
return autoConvert_v1beta1_IsVolumeFormattedResponse_To_impl_IsVolumeFormattedResponse(in, out)
}
func autoConvert_impl_IsVolumeFormattedResponse_To_v1beta1_IsVolumeFormattedResponse(in *impl.IsVolumeFormattedResponse, out *v1beta1.IsVolumeFormattedResponse) error {
out.Formatted = in.Formatted
return nil
}
// Convert_impl_IsVolumeFormattedResponse_To_v1beta1_IsVolumeFormattedResponse is an autogenerated conversion function.
func Convert_impl_IsVolumeFormattedResponse_To_v1beta1_IsVolumeFormattedResponse(in *impl.IsVolumeFormattedResponse, out *v1beta1.IsVolumeFormattedResponse) error {
return autoConvert_impl_IsVolumeFormattedResponse_To_v1beta1_IsVolumeFormattedResponse(in, out)
}
// detected external conversion function
// Convert_v1beta1_ListVolumesOnDiskRequest_To_impl_ListVolumesOnDiskRequest(in *v1beta1.ListVolumesOnDiskRequest, out *impl.ListVolumesOnDiskRequest) error
// skipping generation of the auto function
// detected external conversion function
// Convert_impl_ListVolumesOnDiskRequest_To_v1beta1_ListVolumesOnDiskRequest(in *impl.ListVolumesOnDiskRequest, out *v1beta1.ListVolumesOnDiskRequest) error
// skipping generation of the auto function
func autoConvert_v1beta1_ListVolumesOnDiskResponse_To_impl_ListVolumesOnDiskResponse(in *v1beta1.ListVolumesOnDiskResponse, out *impl.ListVolumesOnDiskResponse) error {
out.VolumeIds = *(*[]string)(unsafe.Pointer(&in.VolumeIds))
return nil
}
// Convert_v1beta1_ListVolumesOnDiskResponse_To_impl_ListVolumesOnDiskResponse is an autogenerated conversion function.
func Convert_v1beta1_ListVolumesOnDiskResponse_To_impl_ListVolumesOnDiskResponse(in *v1beta1.ListVolumesOnDiskResponse, out *impl.ListVolumesOnDiskResponse) error {
return autoConvert_v1beta1_ListVolumesOnDiskResponse_To_impl_ListVolumesOnDiskResponse(in, out)
}
func autoConvert_impl_ListVolumesOnDiskResponse_To_v1beta1_ListVolumesOnDiskResponse(in *impl.ListVolumesOnDiskResponse, out *v1beta1.ListVolumesOnDiskResponse) error {
out.VolumeIds = *(*[]string)(unsafe.Pointer(&in.VolumeIds))
return nil
}
// Convert_impl_ListVolumesOnDiskResponse_To_v1beta1_ListVolumesOnDiskResponse is an autogenerated conversion function.
func Convert_impl_ListVolumesOnDiskResponse_To_v1beta1_ListVolumesOnDiskResponse(in *impl.ListVolumesOnDiskResponse, out *v1beta1.ListVolumesOnDiskResponse) error {
return autoConvert_impl_ListVolumesOnDiskResponse_To_v1beta1_ListVolumesOnDiskResponse(in, out)
}
// detected external conversion function
// Convert_v1beta1_MountVolumeRequest_To_impl_MountVolumeRequest(in *v1beta1.MountVolumeRequest, out *impl.MountVolumeRequest) error
// skipping generation of the auto function
// detected external conversion function
// Convert_impl_MountVolumeRequest_To_v1beta1_MountVolumeRequest(in *impl.MountVolumeRequest, out *v1beta1.MountVolumeRequest) error
// skipping generation of the auto function
func autoConvert_v1beta1_MountVolumeResponse_To_impl_MountVolumeResponse(in *v1beta1.MountVolumeResponse, out *impl.MountVolumeResponse) error {
return nil
}
// Convert_v1beta1_MountVolumeResponse_To_impl_MountVolumeResponse is an autogenerated conversion function.
func Convert_v1beta1_MountVolumeResponse_To_impl_MountVolumeResponse(in *v1beta1.MountVolumeResponse, out *impl.MountVolumeResponse) error {
return autoConvert_v1beta1_MountVolumeResponse_To_impl_MountVolumeResponse(in, out)
}
func autoConvert_impl_MountVolumeResponse_To_v1beta1_MountVolumeResponse(in *impl.MountVolumeResponse, out *v1beta1.MountVolumeResponse) error {
return nil
}
// Convert_impl_MountVolumeResponse_To_v1beta1_MountVolumeResponse is an autogenerated conversion function.
func Convert_impl_MountVolumeResponse_To_v1beta1_MountVolumeResponse(in *impl.MountVolumeResponse, out *v1beta1.MountVolumeResponse) error {
return autoConvert_impl_MountVolumeResponse_To_v1beta1_MountVolumeResponse(in, out)
}
// detected external conversion function
// Convert_v1beta1_ResizeVolumeRequest_To_impl_ResizeVolumeRequest(in *v1beta1.ResizeVolumeRequest, out *impl.ResizeVolumeRequest) error
// skipping generation of the auto function
// detected external conversion function
// Convert_impl_ResizeVolumeRequest_To_v1beta1_ResizeVolumeRequest(in *impl.ResizeVolumeRequest, out *v1beta1.ResizeVolumeRequest) error
// skipping generation of the auto function
func autoConvert_v1beta1_ResizeVolumeResponse_To_impl_ResizeVolumeResponse(in *v1beta1.ResizeVolumeResponse, out *impl.ResizeVolumeResponse) error {
return nil
}
// Convert_v1beta1_ResizeVolumeResponse_To_impl_ResizeVolumeResponse is an autogenerated conversion function.
func Convert_v1beta1_ResizeVolumeResponse_To_impl_ResizeVolumeResponse(in *v1beta1.ResizeVolumeResponse, out *impl.ResizeVolumeResponse) error {
return autoConvert_v1beta1_ResizeVolumeResponse_To_impl_ResizeVolumeResponse(in, out)
}
func autoConvert_impl_ResizeVolumeResponse_To_v1beta1_ResizeVolumeResponse(in *impl.ResizeVolumeResponse, out *v1beta1.ResizeVolumeResponse) error {
return nil
}
// Convert_impl_ResizeVolumeResponse_To_v1beta1_ResizeVolumeResponse is an autogenerated conversion function.
func Convert_impl_ResizeVolumeResponse_To_v1beta1_ResizeVolumeResponse(in *impl.ResizeVolumeResponse, out *v1beta1.ResizeVolumeResponse) error {
return autoConvert_impl_ResizeVolumeResponse_To_v1beta1_ResizeVolumeResponse(in, out)
}
func autoConvert_v1beta1_VolumeDiskNumberRequest_To_impl_VolumeDiskNumberRequest(in *v1beta1.VolumeDiskNumberRequest, out *impl.VolumeDiskNumberRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_v1beta1_VolumeDiskNumberRequest_To_impl_VolumeDiskNumberRequest is an autogenerated conversion function.
func Convert_v1beta1_VolumeDiskNumberRequest_To_impl_VolumeDiskNumberRequest(in *v1beta1.VolumeDiskNumberRequest, out *impl.VolumeDiskNumberRequest) error {
return autoConvert_v1beta1_VolumeDiskNumberRequest_To_impl_VolumeDiskNumberRequest(in, out)
}
func autoConvert_impl_VolumeDiskNumberRequest_To_v1beta1_VolumeDiskNumberRequest(in *impl.VolumeDiskNumberRequest, out *v1beta1.VolumeDiskNumberRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_impl_VolumeDiskNumberRequest_To_v1beta1_VolumeDiskNumberRequest is an autogenerated conversion function.
func Convert_impl_VolumeDiskNumberRequest_To_v1beta1_VolumeDiskNumberRequest(in *impl.VolumeDiskNumberRequest, out *v1beta1.VolumeDiskNumberRequest) error {
return autoConvert_impl_VolumeDiskNumberRequest_To_v1beta1_VolumeDiskNumberRequest(in, out)
}
func autoConvert_v1beta1_VolumeDiskNumberResponse_To_impl_VolumeDiskNumberResponse(in *v1beta1.VolumeDiskNumberResponse, out *impl.VolumeDiskNumberResponse) error {
out.DiskNumber = in.DiskNumber
return nil
}
// Convert_v1beta1_VolumeDiskNumberResponse_To_impl_VolumeDiskNumberResponse is an autogenerated conversion function.
func Convert_v1beta1_VolumeDiskNumberResponse_To_impl_VolumeDiskNumberResponse(in *v1beta1.VolumeDiskNumberResponse, out *impl.VolumeDiskNumberResponse) error {
return autoConvert_v1beta1_VolumeDiskNumberResponse_To_impl_VolumeDiskNumberResponse(in, out)
}
func autoConvert_impl_VolumeDiskNumberResponse_To_v1beta1_VolumeDiskNumberResponse(in *impl.VolumeDiskNumberResponse, out *v1beta1.VolumeDiskNumberResponse) error {
out.DiskNumber = in.DiskNumber
return nil
}
// Convert_impl_VolumeDiskNumberResponse_To_v1beta1_VolumeDiskNumberResponse is an autogenerated conversion function.
func Convert_impl_VolumeDiskNumberResponse_To_v1beta1_VolumeDiskNumberResponse(in *impl.VolumeDiskNumberResponse, out *v1beta1.VolumeDiskNumberResponse) error {
return autoConvert_impl_VolumeDiskNumberResponse_To_v1beta1_VolumeDiskNumberResponse(in, out)
}
func autoConvert_v1beta1_VolumeIDFromMountRequest_To_impl_VolumeIDFromMountRequest(in *v1beta1.VolumeIDFromMountRequest, out *impl.VolumeIDFromMountRequest) error {
out.Mount = in.Mount
return nil
}
// Convert_v1beta1_VolumeIDFromMountRequest_To_impl_VolumeIDFromMountRequest is an autogenerated conversion function.
func Convert_v1beta1_VolumeIDFromMountRequest_To_impl_VolumeIDFromMountRequest(in *v1beta1.VolumeIDFromMountRequest, out *impl.VolumeIDFromMountRequest) error {
return autoConvert_v1beta1_VolumeIDFromMountRequest_To_impl_VolumeIDFromMountRequest(in, out)
}
func autoConvert_impl_VolumeIDFromMountRequest_To_v1beta1_VolumeIDFromMountRequest(in *impl.VolumeIDFromMountRequest, out *v1beta1.VolumeIDFromMountRequest) error {
out.Mount = in.Mount
return nil
}
// Convert_impl_VolumeIDFromMountRequest_To_v1beta1_VolumeIDFromMountRequest is an autogenerated conversion function.
func Convert_impl_VolumeIDFromMountRequest_To_v1beta1_VolumeIDFromMountRequest(in *impl.VolumeIDFromMountRequest, out *v1beta1.VolumeIDFromMountRequest) error {
return autoConvert_impl_VolumeIDFromMountRequest_To_v1beta1_VolumeIDFromMountRequest(in, out)
}
func autoConvert_v1beta1_VolumeIDFromMountResponse_To_impl_VolumeIDFromMountResponse(in *v1beta1.VolumeIDFromMountResponse, out *impl.VolumeIDFromMountResponse) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_v1beta1_VolumeIDFromMountResponse_To_impl_VolumeIDFromMountResponse is an autogenerated conversion function.
func Convert_v1beta1_VolumeIDFromMountResponse_To_impl_VolumeIDFromMountResponse(in *v1beta1.VolumeIDFromMountResponse, out *impl.VolumeIDFromMountResponse) error {
return autoConvert_v1beta1_VolumeIDFromMountResponse_To_impl_VolumeIDFromMountResponse(in, out)
}
func autoConvert_impl_VolumeIDFromMountResponse_To_v1beta1_VolumeIDFromMountResponse(in *impl.VolumeIDFromMountResponse, out *v1beta1.VolumeIDFromMountResponse) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_impl_VolumeIDFromMountResponse_To_v1beta1_VolumeIDFromMountResponse is an autogenerated conversion function.
func Convert_impl_VolumeIDFromMountResponse_To_v1beta1_VolumeIDFromMountResponse(in *impl.VolumeIDFromMountResponse, out *v1beta1.VolumeIDFromMountResponse) error {
return autoConvert_impl_VolumeIDFromMountResponse_To_v1beta1_VolumeIDFromMountResponse(in, out)
}
func autoConvert_v1beta1_VolumeStatsRequest_To_impl_VolumeStatsRequest(in *v1beta1.VolumeStatsRequest, out *impl.VolumeStatsRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_v1beta1_VolumeStatsRequest_To_impl_VolumeStatsRequest is an autogenerated conversion function.
func Convert_v1beta1_VolumeStatsRequest_To_impl_VolumeStatsRequest(in *v1beta1.VolumeStatsRequest, out *impl.VolumeStatsRequest) error {
return autoConvert_v1beta1_VolumeStatsRequest_To_impl_VolumeStatsRequest(in, out)
}
func autoConvert_impl_VolumeStatsRequest_To_v1beta1_VolumeStatsRequest(in *impl.VolumeStatsRequest, out *v1beta1.VolumeStatsRequest) error {
out.VolumeId = in.VolumeId
return nil
}
// Convert_impl_VolumeStatsRequest_To_v1beta1_VolumeStatsRequest is an autogenerated conversion function.
func Convert_impl_VolumeStatsRequest_To_v1beta1_VolumeStatsRequest(in *impl.VolumeStatsRequest, out *v1beta1.VolumeStatsRequest) error {
return autoConvert_impl_VolumeStatsRequest_To_v1beta1_VolumeStatsRequest(in, out)
}
func autoConvert_v1beta1_VolumeStatsResponse_To_impl_VolumeStatsResponse(in *v1beta1.VolumeStatsResponse, out *impl.VolumeStatsResponse) error {
out.VolumeSize = in.VolumeSize
out.VolumeUsedSize = in.VolumeUsedSize
return nil
}
// Convert_v1beta1_VolumeStatsResponse_To_impl_VolumeStatsResponse is an autogenerated conversion function.
func Convert_v1beta1_VolumeStatsResponse_To_impl_VolumeStatsResponse(in *v1beta1.VolumeStatsResponse, out *impl.VolumeStatsResponse) error {
return autoConvert_v1beta1_VolumeStatsResponse_To_impl_VolumeStatsResponse(in, out)
}
func autoConvert_impl_VolumeStatsResponse_To_v1beta1_VolumeStatsResponse(in *impl.VolumeStatsResponse, out *v1beta1.VolumeStatsResponse) error {
out.VolumeSize = in.VolumeSize
out.VolumeUsedSize = in.VolumeUsedSize
return nil
}
// Convert_impl_VolumeStatsResponse_To_v1beta1_VolumeStatsResponse is an autogenerated conversion function.
func Convert_impl_VolumeStatsResponse_To_v1beta1_VolumeStatsResponse(in *impl.VolumeStatsResponse, out *v1beta1.VolumeStatsResponse) error {
return autoConvert_impl_VolumeStatsResponse_To_v1beta1_VolumeStatsResponse(in, out)
}
|
<reponame>zmb3/om
package renderers
const (
ShellTypePowershell = "powershell"
ShellTypePosix = "posix"
)
|
#!/bin/bash
# sudo chmod +x wordpress.sh
# NGIRIM file zip ke scp lalu menjalankan script di server setelah kirim 1 file termasuk sampai create db dan bikin subdomain
echo "START:"
IP='127.0.0.1'
domain=YOURDOMAIN
targetDirectory='/var/www'
targetNginx='/etc/nginx'
DBHOST=YOURDBHOST
DBUSER=YOURDBUSER
DBPASS=YOURDBPASS
while IFS= read -r line; do
array=(`echo $line | sed 's/|/\n/g'`)
file=${array[0]}
dbName=${array[1]}
domainName=${array[2]}
location="${targetDirectory}/${domainName}.${domain}"
# unzip file to target
echo "${file} START:------- unzipping"
tar -zxf ${file} --overwrite
cp -R ${dbName}.com ${targetDirectory}/${domainName}.${domain}
rm -rf ${targetDirectory}/${dbName}.com
mv ${targetDirectory}/${domainName}.${domain}/.user.ini ${targetDirectory}/${domainName}.${domain}/.user.ini.backup
rm -rf ${dbName}.com
echo "${file} DONE:-------- unzipping"
echo "${file} START:------- copy wp-config.php -> wp-config.php.backup"
`echo cp ${location}/wp-config.php ${location}/wp-config.php.backup`
echo "${file} DONE:-------- copy wp-config.php -> wp-config.php.backup"
# set config file
echo "${file} START:------- copy to target path and replace wp-config parameter"
`echo cp -rf wp-config.php.master ${location}/wp-config.php`
`echo sed -i "s/{DB_HOST}/$DBHOST/g" ${location}/wp-config.php`
`echo sed -i "s/{DB_USER}/$DBUSER/g" ${location}/wp-config.php`
`echo sed -i "s/{DB_PASSWORD}/$DBPASS/g" ${location}/wp-config.php`
`echo sed -i "s/{DB_NAME}/$dbName/g" ${location}/wp-config.php`
echo "${file} DONE:-------- copy to target path and replace wp-config parameter"
# insert to database
echo "${file} START:------- search sql file and execute"
sqlFile=`find ${location} -maxdepth 1 -name "*.sql"`
echo "DROP DATABASE IF EXISTS ${dbName};" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
echo "CREATE DATABASE ${dbName}" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
if [ -f "$sqlFile" ]
then
echo $sqlFile
echo `mysql -h "${DBHOST}" -u "${DBUSER}" "${dbName}" "-p${DBPASS}" < "$sqlFile"`
echo "UPDATE ${dbName}.wp_options SET option_value = REPLACE(option_value, 'http://${dbName}.com', 'http://${domainName}.${domain}') WHERE option_name = 'home' OR option_name = 'siteurl';" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
echo "UPDATE ${dbName}.wp_posts SET guid = REPLACE(guid, 'http://${dbName}.com','http://${domainName}.${domain}');" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
echo "UPDATE ${dbName}.wp_posts SET post_content = REPLACE(post_content, 'http://${dbName}.com', 'http://${domainName}.${domain}');" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
echo "UPDATE ${dbName}.wp_postmeta SET meta_value = REPLACE(meta_value,'http://${dbName}.com','http://${domainName}.${domain}');" | mysql -h "${DBHOST}" -u "${DBUSER}" "-p${DBPASS}"
fi
echo "${file} DONE:-------- search sql file and execute"
# set up nginx sites
echo "${file} START:------- setup nginx "
`echo sudo cp -rf sites-available ${targetNginx}/sites-available/${domainName}.${domain}`
`echo sudo sed -i "s/{TARGET_PATH}/${domainName}.${domain}/g" ${targetNginx}/sites-available/${domainName}.${domain}`
`echo sudo sed -i "s/{DOMAIN}/${domainName}.${domain}/g" ${targetNginx}/sites-available/${domainName}.${domain}`
`echo sudo ln -sf ${targetNginx}/sites-available/${domainName}.${domain} ${targetNginx}/sites-enabled/`
echo "${file} DONE:------- setup nginx "
echo "${file} START:------- insert to /etc/hosts "
if grep -Fxq "${IP} ${domainName}.${domain}" /etc/hosts
then
echo "${file} /etc/hosts:------- found -> NOT INSERTING AGAIN"
else
echo `sudo -- sh -c -e "echo '${IP} ${domainName}.${domain}' >> /etc/hosts"`;
fi
echo "${file} DONE:------- insert to /etc/hosts "
done < lamtim.txt
|
<filename>pkg/anonymize/anonymize_doOnJSON_test.go
package anonymize
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_doOnJSON(t *testing.T) {
baseConfiguration, err := os.ReadFile("./testdata/example.json")
require.NoError(t, err)
anomConfiguration := doOnJSON(string(baseConfiguration))
expectedConfiguration, err := os.ReadFile("./testdata/expected.json")
require.NoError(t, err)
assert.JSONEq(t, string(expectedConfiguration), anomConfiguration)
}
func Test_doOnJSON_simple(t *testing.T) {
testCases := []struct {
name string
input string
expectedOutput string
}{
{
name: "email",
input: `{
"email1": "<EMAIL>",
"email2": "<EMAIL>",
"email3": "<EMAIL>"
}`,
expectedOutput: `{
"email1": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"email2": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"email3": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}`,
},
{
name: "url",
input: `{
"URL": "foo domain.com foo",
"URL": "foo sub.domain.com foo",
"URL": "foo sub.sub.domain.com foo",
"URL": "foo sub.sub.sub.domain.com.us foo"
}`,
expectedOutput: `{
"URL": "foo xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx foo",
"URL": "foo xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx foo",
"URL": "foo xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx foo",
"URL": "foo xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx foo"
}`,
},
}
for _, test := range testCases {
t.Run(test.name, func(t *testing.T) {
output := doOnJSON(test.input)
assert.Equal(t, test.expectedOutput, output)
})
}
}
|
package evilcraft.items;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemDye;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import evilcraft.blocks.ExcrementPile;
import evilcraft.blocks.ExcrementPileConfig;
/**
* {@link ItemBlock} for the {@link ExcrementPile}.
* @author rubensworks
*
*/
public class ExcrementPileItemBlock extends ItemBlock {
/**
* Make a new instance.
* @param block The block.
*/
public ExcrementPileItemBlock(Block block) {
super(block);
}
@Override
public boolean onItemUse(ItemStack itemStack, EntityPlayer player, World world, int x, int y, int z, int side, float coordX, float coordY, float coordZ) {
Block block = world.getBlock(x, y, z);
if(player.isSneaking()) {
boolean done = false;
int attempts = 0;
while(attempts < ExcrementPileConfig.effectiveness) {
done = ItemDye.applyBonemeal(itemStack.copy(), world, x, y, z, player) | done;
attempts++;
}
if(done) {
itemStack.stackSize--;
if (!world.isRemote) {
world.playAuxSFX(2005, x, y, z, 0);
}
return true;
}
} else {
if (block == ExcrementPile.getInstance() && itemStack != null) {
if(ExcrementPile.canHeightenPileAt(world, x, y, z)) {
ExcrementPile.heightenPileAt(world, x, y, z);
itemStack.stackSize--;
return true;
}
return false;
}
}
return super.onItemUse(itemStack, player, world, x, y, z, side, coordX, coordY, coordZ);
}
}
|
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.datasets import load_iris
# load data
iris = load_iris()
X = iris.data
y = iris.target
# split data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
# Training
dtree_model = DecisionTreeClassifier(max_depth=2).fit(X_train, y_train)
# Testing
dtree_predictions = dtree_model.predict(X_test)
# Checking accuracy
print("Accuracy:",np.mean(dtree_predictions==y_test)) |
./qjsc_macos -c -m -M GPIO -N jslib_gpio -o bytecode/jslib_gpio.c src/gpio.js
./qjsc_macos -c -m -N jslib_events -o bytecode/jslib_events.c src/events.js
./qjsc_macos -c -m -M events -M CRYPTO -N jslib_crypto -o bytecode/jslib_crypto.c src/crypto.js
./qjsc_macos -c -m -M events -M CHECKSUM -N jslib_checksum -o bytecode/jslib_checksum.c src/checksum.js
./qjsc_macos -c -m -M events -M APPOTA -N jslib_appota -o bytecode/jslib_appota.c src/appota.js
./qjsc_macos -c -m -M events -M AIOT_DEVICE -M AIOT_GATEWAY -N jslib_iot -o bytecode/jslib_iot.c src/iot.js
./qjsc_macos -c -m -M events -M BT_HOST -N jslib_bt_host -o bytecode/jslib_bt_host.c src/bt_host.js
./qjsc_macos -c -m -M REPL -M os -M std -N jslib_repl -o bytecode/jslib_repl.c src/repl.js
./qjsc_macos -c -m -M events -M UART -N jslib_uart -o bytecode/jslib_uart.c src/uart.js
./qjsc_macos -c -m -M events -M FS -N jslib_fs -o bytecode/jslib_fs.c src/fs.js
./qjsc_macos -c -m -M events -M AUDIOPLAYER -N jslib_audioplayer -o bytecode/jslib_audioplayer.c src/audioplayer.js
./qjsc_macos -c -m -M kv -N jslib_device -o bytecode/jslib_device.c src/device.js
./qjsc_macos -c -m -M events -M NETMGR -N jslib_netmgr -o bytecode/jslib_netmgr.c src/netmgr.js
./qjsc_macos -c -m -M events -M NETWORK -M NETMGR -M CELLULAR -N jslib_network -o bytecode/jslib_network.c src/network.js
./qjsc_macos -c -m -M MQTT -M events -N jslib_mqtt -o bytecode/jslib_mqtt.c src/mqtt.js
./qjsc_macos -c -m -M events -M TCP -N jslib_tcp -o bytecode/jslib_tcp.c src/tcp.js
./qjsc_macos -c -m -M events -M UDP -N jslib_udp -o bytecode/jslib_udp.c src/udp.js
./qjsc_macos -c -m -M NETWORK -M NETMGR -M CELLULAR -N jslib_location -o bytecode/jslib_location.c src/location.js
./qjsc_macos -c -m -M events -M SPI -N jslib_spi -o bytecode/jslib_spi.c src/spi.js
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.