text stringlengths 1 1.05M |
|---|
import re
def process_regex(regex_list, input_string):
result = {}
for convert_func, regex_pattern in regex_list:
match = re.search(regex_pattern, input_string)
if match:
groups = match.groupdict()
for key, value in groups.items():
if key in result:
result[key].append(value)
else:
result[key] = [value]
return result |
require 'fastlane_core/ui/ui'
require "fileutils"
module Fastlane
UI = FastlaneCore::UI unless Fastlane.const_defined?("UI")
module Helper
class CredentialsRepo
def initialize(repo)
@repo = File.join(repo, "credentials")
FileUtils.mkdir_p(@repo)
end
def get_credential(key)
path = File.join(@repo, key + ".credential")
unless File.exist?(path)
UI.user_error!("No such key '#{key}' in the credentials repository")
end
File.read(path)
end
def set_credential(key, value)
path = File.join(@repo, key + ".credential")
IO.write(path, value)
return path
end
end
end
end
|
#!/bin/bash
source litmus/utils.sh
path=$(pwd)
version=${PORTAL_VERSION}
kubectl delete workflows --all -A
kubectl delete cronworkflows --all -A
kubectl delete chaosengines --all -A
kubectl delete chaosresult --all -A
# Shutting down the Litmus-Portal Setup
curl https://raw.githubusercontent.com/litmuschaos/litmus/master/litmus-portal/cluster-k8s-manifest.yml --output litmus-portal-cleanup.yml
manifest_image_update $version litmus-portal-cleanup.yml
kubectl delete -f litmus-portal-cleanup.yml
|
class FoodTypesController < ApplicationController
# GET /food_types
# GET /food_types.json
def index
@food_types = FoodType.all
respond_to do |format|
format.html # index.html.erb
format.json { render json: @food_types }
end
end
# GET /food_types/1
# GET /food_types/1.json
def show
@food_type = FoodType.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @food_type }
end
end
# GET /food_types/new
# GET /food_types/new.json
def new
@food_type = FoodType.new
respond_to do |format|
format.html # new.html.erb
format.json { render json: @food_type }
end
end
# GET /food_types/1/edit
def edit
@food_type = FoodType.find(params[:id])
end
# POST /food_types
# POST /food_types.json
def create
@food_type = FoodType.new(params[:food_type])
respond_to do |format|
if @food_type.save
format.html { redirect_to @food_type, notice: 'Food type was successfully created.' }
format.json { render json: @food_type, status: :created, location: @food_type }
else
format.html { render action: "new" }
format.json { render json: @food_type.errors, status: :unprocessable_entity }
end
end
end
# PUT /food_types/1
# PUT /food_types/1.json
def update
@food_type = FoodType.find(params[:id])
respond_to do |format|
if @food_type.update_attributes(params[:food_type])
format.html { redirect_to @food_type, notice: 'Food type was successfully updated.' }
format.json { head :ok }
else
format.html { render action: "edit" }
format.json { render json: @food_type.errors, status: :unprocessable_entity }
end
end
end
# DELETE /food_types/1
# DELETE /food_types/1.json
def destroy
@food_type = FoodType.find(params[:id])
@food_type.destroy
respond_to do |format|
format.html { redirect_to food_types_url }
format.json { head :ok }
end
end
end
|
def sum_of_squares(n):
total = 0
for i in range(n+1):
total += i**2
return total
print(sum_of_squares(10)) # Output: 385 |
class Graph:
def __init__(self):
self.vertices = {}
def add_vertex(self, vertex):
# Add a new vertex to the graph
if vertex not in self.vertices:
self.vertices[vertex] = []
def add_edge(self, vertex1, vertex2):
# Add an edge between vertex1 and vertex2
if vertex1 in self.vertices and vertex2 in self.vertices:
self.vertices[vertex1].append(vertex2)
self.vertices[vertex2].append(vertex1)
def get_vertices(self):
# Return a list of all vertices in the graph
return list(self.vertices.keys())
# Example usage
graph = Graph()
graph.add_vertex('A')
graph.add_vertex('B')
graph.add_vertex('C')
graph.add_edge('A', 'B')
graph.add_edge('B', 'C')
print(graph.get_vertices()) # Output: ['A', 'B', 'C'] |
class SomeClass {
constructor(someOptions) {
this.prop1 = someOptions.prop1;
this.prop2 = someOptions.prop2;
this.prop3 = someOptions.prop3;
}
someFunc() {
console.log("This is some func of some class");
}
} |
#!/bin/sh
FILENAME="rodinia-euler-streamsub.sh"
REPEATS=`jq -r '.repeats' bins.json`
RUNDIR=`jq -r '.directory' bins.json`
SLEEPLEN=1
cat << PBSHEADER > $FILENAME
#!/bin/bash
#SBATCH -J ${SUITE}-${BIN##*/}-${INPUT##*/}
#SBATCH --nodes=1
#SBATCH --cpus-per-task=24
#SBATCH --exclusive
#SBATCH --time=00:15:00
#SBATCH --array=1-${REPEATS}
#SBATCH --partition=haswell
module load scorep/sync-2015-07-24-intel-xmpi-cuda6.5
module load hdeem
cd $RUNDIR
echo Hostname is \$HOSTNAME
cat /proc/meminfo
cat /proc/cpuinfo
PBSHEADER
jq -r '.experiments[] | {binary, input} | join (" ")' bins.json | while read BINARY INPUT; do
cat << RUNTEXT >> $FILENAME
# $BINARY $INPUT
sleep $SLEEPLEN
CSVNAME="Bound-Rodinia-Euler-Binary-${BINARY##*/}-Input-${INPUT##*/}-Array-\${SLURM_ARRAY_TASK_ID}-Of-${REPEATS}.csv"
clearHdeem
startHdeem
OMP_NUM_THREADS=1 KMP_AFFINITY=compact KMP_PLACE_THREADS=1c,1t,0O srun ./${BINARY} ${INPUT}
stopHdeem
printHdeem -o \$CSVNAME
RUNTEXT
done
echo done
|
import java.util.HashSet;
import java.util.ArrayList;
public class UniqueValues
{
public static void main(String[] args)
{
int[] arr = {1, 2, 3, 2, 4, 1};
HashSet<Integer> set = new HashSet<Integer>();
// Traverse the input array
for (int i = 0; i < arr.length; i++)
{
// If not present, then put it in hash and print it
if (!set.contains(arr[i]))
{
set.add(arr[i]);
}
}
// Print the elements of hash set
System.out.println("The unique elements in the given array are : ");
for (Integer num : set)
System.out.println(num + " ");
}
} |
#!/usr/bin/env sh
# Credit: https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PACKDIR="$( rospack find model_pose_dataset_generation )"
source /usr/share/gazebo/setup.sh
export GAZEBO_RESOURCE_PATH=$GAZEBO_RESOURCE_PATH:$PACKDIR
export GAZEBO_MODEL_PATH=$GAZEBO_MODEL_PATH:$PACKDIR/models
# Optionally, include bigbird models dir.
# See included script for setting those up.
if [ -d ~/.gazebo/models/bigbird ]; then
export GAZEBO_MODEL_PATH=$GAZEBO_MODEL_PATH:~/.gazebo/models/bigbird
fi
# And just in case the default dir isn't there...
export GAZEBO_MODEL_PATH=$GAZEBO_MODEL_PATH:~/.gazebo/models
|
#!/usr/bin/env node
const path = require('path');
const chalk = require('chalk');
const glob = require('glob');
const spawn = require('cross-spawn');
const getRawArgs = require('./utils/getRawArgs');
const logger = require('./utils/logger');
const SIGNAL_MESSAGES = {
SIGKILL:
'Exited too early. Either the process run out of memory or someone called "kill -9" on it.',
SIGTERM:
'Exited too early. Someone called "kill", "killall" or the system is shutting down.',
};
const getScriptsPathsForRoot = scriptsRoot =>
glob
.sync(path.join(scriptsRoot, '*'))
.map(path.normalize)
.filter(scriptPath => !scriptPath.includes('__tests__'))
.map(scriptPath => scriptPath.replace(/\.js$/, ''));
const handleSignal = signal => {
if (signal in SIGNAL_MESSAGES) {
logger.info(SIGNAL_MESSAGES[signal]);
}
};
const handleResult = result => {
if (result.signal) {
handleSignal(result.signal);
}
if (result.status > 0) {
logger.error(`Exit code: ${result.status}.`);
}
process.exit(result.status);
};
const runScript = roots => {
const scriptsRoots = roots.map(root => path.join(root, 'scripts'));
const scriptsPaths = scriptsRoots
.map(scriptsRoot => getScriptsPathsForRoot(scriptsRoot))
.reduce((a, b) => a.concat(b), []);
const scriptsPathsForNames = scriptsPaths.reduce(
(acc, scriptPath) => ({
...acc,
[path.basename(scriptPath)]: scriptPath,
}),
{}
);
const scriptsNames = Object.keys(scriptsPathsForNames);
const isValidScript = scriptName => scriptsNames.includes(scriptName);
const args = getRawArgs();
const scriptIndex = args.findIndex(isValidScript);
const scriptName = scriptIndex === -1 ? args[0] : args[scriptIndex];
if (!isValidScript(scriptName)) {
logger.error(`Unknown script: ${chalk.bold(scriptName)}.`);
process.exit(1);
}
const nodeArgs = scriptIndex > 0 ? args.slice(0, scriptIndex) : [];
const scriptArgs = args.slice(scriptIndex + 1);
const result = spawn.sync(
'node',
[...nodeArgs, scriptsPathsForNames[scriptName], ...scriptArgs],
{stdio: 'inherit'}
);
handleResult(result);
};
module.exports = runScript;
|
<reponame>dogballs/battle-city
export interface InputDevice {
isConnected(): boolean;
listen(): void;
unlisten(): void;
update(): void;
getDownCodes(): number[];
getHoldCodes(): number[];
getUpCodes(): number[];
}
|
package com.example.demo.model;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.validator.constraints.NotEmpty;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.validation.constraints.NotNull;
import java.math.BigDecimal;
@Entity
public class Account {
@Id
@GeneratedValue(generator = "UUID")
@GenericGenerator(
name = "UUID",
strategy = "org.hibernate.id.UUIDGenerator"
)
private String id;
@NotEmpty
private String name;
private String accountNumber;
@Enumerated(EnumType.STRING)
@NotNull
private Currency currency;
@NotNull
private BigDecimal balance;
@ManyToOne(fetch = FetchType.EAGER, optional = false)
@JoinColumn(name = "user_id", nullable = false)
private PayUser user;
@NotNull
private boolean enable = true;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
@app.route('/message', methods=['POST'])
def message():
data = request.get_json()
incoming_message = data['message']
response = None
if incoming_message == "Hi, I need help":
response = "Hello! What do you need help with?"
else:
response = "Sorry, I'm not sure what you mean"
response_data = {
'message': response
}
return jsonify(response_data) |
#!/bin/bash -e
Datasets=('66_chlorineConcentration_MIN_METADATA' 'LL1_Adiac_MIN_METADATA' 'LL1_ArrowHead_MIN_METADATA' 'LL1_CinC_ECG_torso_MIN_METADATA' 'LL1_Cricket_Y_MIN_METADATA' 'LL1_ECG200_MIN_METADATA' 'LL1_ElectricDevices_MIN_METADATA' 'LL1_FISH_MIN_METADATA' 'LL1_FaceFour_MIN_METADATA' 'LL1_FordA_MIN_METADATA' 'LL1_HandOutlines_MIN_METADATA' 'LL1_Haptics_MIN_METADATA' 'LL1_ItalyPowerDemand_MIN_METADATA' 'LL1_Meat_MIN_METADATA' 'LL1_OSULeaf_MIN_METADATA')
cd /primitives
# git pull upstream master
# git checkout classification_pipelines
cd /primitives/v2019.11.10/Distil/d3m.primitives.time_series_classification.convolutional_neural_net.LSTM_FCN/1.0.2
# mkdir pipelines
cd pipelines
python3 "/src/timeseriesd3mwrappers/TimeSeriesD3MWrappers/pipelines/LSTM_FCN_pipeline.py"
cd ..
# mkdir pipeline_runs
cd pipeline_runs
#create text file to record scores and timing information
# touch scores.txt
# echo "DATASET, SCORE, EXECUTION TIME" >> scores.txt
for i in "${Datasets[@]}"; do
# generate pipeline run and time
start=`date +%s`
python3 -m d3m runtime -d /datasets/ fit-score -p ../pipelines/*.json -i /datasets/seed_datasets_current/$i/TRAIN/dataset_TRAIN/datasetDoc.json -t /datasets/seed_datasets_current/$i/TEST/dataset_TEST/datasetDoc.json -a /datasets/seed_datasets_current/$i/SCORE/dataset_SCORE/datasetDoc.json -r /datasets/seed_datasets_current/$i/${i}_problem/problemDoc.json -c scores.csv -O ${i}_no_attention.yml
end=`date +%s`
runtime=$((end-start))
echo "----------$i took $runtime----------"
# save information
IFS=, read col1 score col3 col4 < <(tail -n1 scores.csv)
echo "$i, $score, $runtime" >> scores.txt
# # cleanup temporary file
rm scores.csv
done
# zip pipeline runs individually
# cd ..
# gzip -r pipeline_runs
|
package com.github.robindevilliers.welcometohell.steps;
import com.github.robindevilliers.cascade.annotations.*;
import com.github.robindevilliers.welcometohell.Utilities;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import static org.junit.Assert.assertEquals;
@Step({SelectPayment.HeavenlyHalfPennies.class, SelectPayment.Gold.class, SelectPayment.Nothing.class, SelectPayment.EarthlyFiat.class})
@Narrative("Appeal question.")
public interface AppealQuestion {
@Narrative("Enter yes to appeal and go to thrall page.")
class YesToAppeal implements AppealQuestion {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "true");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Thrall of Hell", webDriver.getTitle());
}
}
@Narrative("Enter no to appeal and go to thrall page.")
class NoToAppeal implements AppealQuestion {
@Demands
private WebDriver webDriver;
@When
public void when() {
Utilities.selectOption(webDriver, "input", "false");
webDriver.findElement(By.cssSelector("button[type=submit]")).click();
Utilities.waitForPage(webDriver);
}
@Then
public void then() {
assertEquals("Welcome to Hell | Thrall of Hell", webDriver.getTitle());
}
}
}
|
use gtk::prelude::*;
use gtk::{Application, ApplicationWindow, Button, Entry, Box, Orientation};
struct MainWindowViewModel {
// Define the data model for the to-do list
tasks: Vec<String>,
}
impl MainWindowViewModel {
// Method to add a task to the to-do list
fn add_task(&mut self, task: &str) {
self.tasks.push(task.to_string());
}
}
fn main() {
let app_id = "store.relm4.example.todo-4";
gtk::init().expect("Couldn't initialize gtk");
let application = Application::builder()
.application_id(app_id)
.build();
let model = MainWindowViewModel {
tasks: Vec::new(), // Initialize an empty to-do list
};
application.connect_activate(move |app| {
// Create the application window
let window = ApplicationWindow::builder()
.application(app)
.title("To-Do List")
.default_width(400)
.default_height(200)
.build();
// Create UI elements
let entry = Entry::builder().build();
let add_button = Button::with_label("Add Task");
let task_list = Box::new(Orientation::Vertical, 5);
// Add task when the button is clicked
add_button.connect_clicked(move |_| {
if let Some(text) = entry.text() {
model.add_task(&text); // Add the task to the to-do list
let task_label = gtk::Label::new(Some(&text));
task_list.add(&task_label);
}
});
// Add UI elements to the window
let main_box = Box::new(Orientation::Vertical, 5);
main_box.append(&entry);
main_box.append(&add_button);
main_box.append(&task_list);
window.set_child(Some(&main_box));
// Display the window
window.present();
});
application.run();
} |
#!/bin/bash
BRANCH=$(git rev-parse --abbrev-ref HEAD)
reflog_message=$(git reflog -1)
merge_reason=$(echo $reflog_message | cut -d" " -f 3 | sed "s/://")
merged_branch_name=$(echo $reflog_message | cut -d" " -f 4 | sed "s/://")
branch_folder=$(echo $merged_branch_name | awk -F: 'match($0, /^[0-9a-zA-Z\-\_]+/) { print substr( $0, RSTART, RLENGTH )}')
pre_version=$(node -p -e "require('./package.json').version")
if [[ $merge_reason == "pull" ]]; then
exit 0
fi
if [[ $branch_folder == "release" ]]; then
exit 0
elif [[ $merged_branch_name == "production" ]]; then
exit 0
elif [[ $merged_branch_name == $pre_version ]]; then
# when we make release, merge branch name equal last version, so we up minor version
npm version minor --no-git-tag-version
PACKAGE_VERSION=$(node -p -e "require('./package.json').version")
git add .
git commit -n -m 'Up minor version after release to '$PACKAGE_VERSION
elif [[ "$BRANCH" == "master" ]]; then
# when we merge some feature or some regular branch to master we, so we patch version
npm version patch --no-git-tag-version
PACKAGE_VERSION=$(node -p -e "require('./package.json').version")
git add .
git commit -n -m 'Bump version to '$PACKAGE_VERSION
fi
|
#!/bin/bash --login
# The --login ensures the bash configuration is loaded,
# enabling Conda.
# Enable strict mode.
set -euo pipefail
# ... Run whatever commands ...
# Temporarily disable strict mode and activate conda:
set +euo pipefail
conda activate django
# Re-enable strict mode:
set -euo pipefail
# echo "Waiting for MySql to start..."
./wait-for db:3306
crond -l 8
python manage.py makemigrations --noinput
python manage.py migrate --noinput
python manage.py initadmin
# exec the final command:
# exec python manage.py runserver 9001
python manage.py collectstatic --no-input
gunicorn mangepicfudan.wsgi:application --bind 0.0.0.0:80
|
#!/bin/bash
#===============================================
# Description: DIY script part 2
# File name: diy-part2.sh
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#===============================================
# 修改默认IP
sed -i 's/192.168.1.1/10.10.10.10/g' package/base-files/files/bin/config_generate
# 移除重复软件包
rm -rf package/lean/luci-theme-argon
rm -rf package/lean/luci-theme-netgear
rm -rf package/lean/luci-app-netdata
rm -rf package/lean/luci-app-wrtbwmon
rm -rf package/lean/luci-app-dockerman
rm -rf package/lean/luci-app-jd-dailybonus
# 添加额外软件包
git clone https://github.com/jerrykuku/luci-app-jd-dailybonus.git package/luci-app-jd-dailybonus
git clone https://github.com/jerrykuku/lua-maxminddb.git package/lua-maxminddb
git clone https://github.com/jerrykuku/luci-app-vssr.git package/luci-app-vssr
git clone https://github.com/kongfl888/luci-app-adguardhome.git package/luci-app-adguardhome
git clone https://github.com/tty228/luci-app-serverchan.git package/luci-app-serverchan
git clone https://github.com/iwrt/luci-app-ikoolproxy.git package/luci-app-ikoolproxy
git clone https://github.com/sirpdboy/luci-app-advanced package/luci-app-advanced
git clone https://github.com/esirplayground/luci-app-poweroff package/luci-app-poweroff
git clone https://github.com/destan19/OpenAppFilter.git package/OpenAppFilter
svn co https://github.com/kiddin9/openwrt-packages/trunk/luci-app-eqos package/luci-app-eqos
svn co https://github.com/kiddin9/openwrt-bypass/trunk/luci-app-bypass package/luci-app-bypass
svn co https://github.com/lisaac/luci-app-dockerman/trunk/applications/luci-app-dockerman package/luci-app-dockerman
svn co https://github.com/sirpdboy/sirpdboy-package/trunk/luci-app-smartdns package/luci-app-smartdns
svn co https://github.com/sirpdboy/sirpdboy-package/trunk/luci-app-netdata package/luci-app-netdata
svn co https://github.com/sirpdboy/sirpdboy-package/trunk/luci-app-socat package/luci-app-socat
# 科学上网插件依赖
svn co https://github.com/vernesong/OpenClash/trunk/luci-app-openclash package/luci-app-openclash
# 编译 po2lmo (如果有po2lmo可跳过)
pushd package/luci-app-openclash/tools/po2lmo
make && sudo make install
popd
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/brook package/brook
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/chinadns-ng package/chinadns-ng
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/tcping package/tcping
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/trojan-go package/trojan-go
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/trojan-plus package/trojan-plus
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/luci-app-passwall package/luci-app-passwall
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/xray-core package/xray-core
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/xray-plugin package/xray-plugin
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/ssocks package/ssocks
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/hysteria package/hysteria
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/v2ray-plugin package/v2ray-plugin
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/v2ray-core package/v2ray-core
svn co https://github.com/fw876/helloworld/trunk/naiveproxy package/naiveproxy
svn co https://github.com/fw876/helloworld/trunk/shadowsocks-rust package/shadowsocks-rust
svn co https://github.com/fw876/helloworld/trunk/shadowsocksr-libev package/shadowsocksr-libev
svn co https://github.com/fw876/helloworld/trunk/luci-app-ssr-plus package/luci-app-ssr-plus
svn co https://github.com/fw876/helloworld/trunk/simple-obfs package/simple-obfs
svn co https://github.com/fw876/helloworld/trunk/trojan package/trojan
# Themes
svn co https://github.com/kenzok8/openwrt-packages/trunk/luci-theme-edge package/luci-theme-edge
svn co https://github.com/rosywrt/luci-theme-rosy/trunk/luci-theme-rosy package/luci-theme-rosy
svn co https://github.com/haiibo/packages/trunk/luci-theme-darkmatter package/luci-theme-darkmatter
svn co https://github.com/haiibo/packages/trunk/luci-theme-atmaterial package/luci-theme-atmaterial
svn co https://github.com/haiibo/packages/trunk/luci-theme-opentomcat package/luci-theme-opentomcat
svn co https://github.com/haiibo/packages/trunk/luci-theme-netgear package/luci-theme-netgear
git clone https://github.com/xiaoqingfengATGH/luci-theme-infinityfreedom package/luci-theme-infinityfreedom
git clone -b 18.06 https://github.com/jerrykuku/luci-theme-argon.git package/luci-theme-argon
git clone https://github.com/jerrykuku/luci-app-argon-config package/luci-app-argon-config
git clone https://github.com/sirpdboy/luci-theme-opentopd package/luci-theme-opentopd
# MosDNS
svn co https://github.com/QiuSimons/openwrt-mos/trunk/luci-app-mosdns package/luci-app-mosdns
svn co https://github.com/QiuSimons/openwrt-mos/trunk/mosdns package/mosdns
# DDNS.to
svn co https://github.com/linkease/nas-packages-luci/trunk/luci/luci-app-ddnsto package/luci-app-ddnsto
svn co https://github.com/linkease/nas-packages/trunk/network/services/ddnsto package/ddnsto
# 易有云
svn co https://github.com/linkease/nas-packages-luci/trunk/luci/luci-app-linkease package/luci-app-linkease
svn co https://github.com/linkease/nas-packages/trunk/network/services/linkease package/linkease
# 流量监控
svn co https://github.com/sirpdboy/sirpdboy-package/trunk/luci-app-wrtbwmon package/luci-app-wrtbwmon
svn co https://github.com/sirpdboy/sirpdboy-package/trunk/wrtbwmon package/wrtbwmon
# Gost
svn co https://github.com/kenzok8/openwrt-packages/trunk/luci-app-gost package/luci-app-gost
svn co https://github.com/kenzok8/openwrt-packages/trunk/gost package/gost
# 修改makefile
find package/*/ -maxdepth 2 -path "*/Makefile" | xargs -i sed -i 's/include\ \.\.\/\.\.\/luci\.mk/include \$(TOPDIR)\/feeds\/luci\/luci\.mk/g' {}
find package/*/ -maxdepth 2 -path "*/Makefile" | xargs -i sed -i 's/include\ \.\.\/\.\.\/lang\/golang\/golang\-package\.mk/include \$(TOPDIR)\/feeds\/packages\/lang\/golang\/golang\-package\.mk/g' {}
find package/*/ -maxdepth 2 -path "*/Makefile" | xargs -i sed -i 's/PKG_SOURCE_URL:=\@GHREPO/PKG_SOURCE_URL:=https:\/\/github\.com/g' {}
find package/*/ -maxdepth 2 -path "*/Makefile" | xargs -i sed -i 's/PKG_SOURCE_URL:=\@GHCODELOAD/PKG_SOURCE_URL:=https:\/\/codeload\.github\.com/g' {}
# 调整V2ray服务到VPN菜单
sed -i 's/services/vpn/g' package/lean/luci-app-v2ray-server/luasrc/controller/*.lua
sed -i 's/services/vpn/g' package/lean/luci-app-v2ray-server/luasrc/model/cbi/v2ray_server/*.lua
sed -i 's/services/vpn/g' package/lean/luci-app-v2ray-server/luasrc/view/v2ray_server/*.htm
# 调整阿里云盘到存储菜单
sed -i 's/services/nas/g' package/lean/luci-app-aliyundrive-webdav/luasrc/controller/*.lua
sed -i 's/services/nas/g' package/lean/luci-app-aliyundrive-webdav/luasrc/model/cbi/aliyundrive-webdav/*.lua
sed -i 's/services/nas/g' package/lean/luci-app-aliyundrive-webdav/luasrc/view/aliyundrive-webdav/*.htm
# 修改插件名字
sed -i 's/"挂载 SMB 网络共享"/"挂载共享"/g' `grep "挂载 SMB 网络共享" -rl ./`
sed -i 's/"Argon 主题设置"/"Argon 设置"/g' `grep "Argon 主题设置" -rl ./`
sed -i 's/"阿里云盘 WebDAV"/"阿里云盘"/g' `grep "阿里云盘 WebDAV" -rl ./`
sed -i 's/"USB 打印服务器"/"USB 打印"/g' `grep "USB 打印服务器" -rl ./`
sed -i 's/"BaiduPCS Web"/"百度网盘"/g' `grep "BaiduPCS Web" -rl ./`
./scripts/feeds update -a
./scripts/feeds install -a
|
public static int countPairs(int arr[], int sum) {
int pairs = 0;
int n = arr.length;
for(int i=0; i<n;i++){
for(int j=i+1; j<n; j++){
if(arr[i] + arr[j] == sum){
pairs++;
}
}
}
return pairs;
}
# Output
2 |
#!/bin/bash
[ ! -f ../.VERSION.mk ] && make -C .. .VERSION.mk
. ../.VERSION.mk
if ! git show-ref --tags | grep -q "$(git rev-parse HEAD)"; then
# HEAD is not tagged, add the date, time and commit hash to the revision
BUILD_TIME="$(date +%Y%m%d%H%M)"
DEB_REVISION="${BUILD_TIME}~${REVISION}"
RPM_REVISION=".${BUILD_TIME}.${REVISION}"
fi
URL="http://logstash.net"
DESCRIPTION="An extensible logging pipeline"
if [ "$#" -ne 2 ] ; then
echo "Usage: $0 <os> <release>"
echo
echo "Example: $0 ubuntu 12.10"
exit 1
fi
os=$1
release=$2
echo "Building package for $os $release"
destdir=build/$(echo "$os" | tr ' ' '_')
prefix=/opt/logstash
if [ "$destdir/$prefix" != "/" -a -d "$destdir/$prefix" ] ; then
rm -rf "$destdir/$prefix"
fi
mkdir -p $destdir/$prefix
# install logstash.jar
jar="$(dirname $0)/../build/logstash-$VERSION-flatjar.jar"
if [ ! -f "$jar" ] ; then
echo "Unable to find $jar"
exit 1
fi
cp $jar $destdir/$prefix/logstash.jar
case $os@$release in
centos@*)
mkdir -p $destdir/etc/logrotate.d
mkdir -p $destdir/etc/sysconfig
mkdir -p $destdir/etc/init.d
mkdir -p $destdir/etc/logstash/conf.d
mkdir -p $destdir/opt/logstash/tmp
mkdir -p $destdir/var/lib/logstash
mkdir -p $destdir/var/run/logstash
mkdir -p $destdir/var/log/logstash
cp $os/sysconfig $destdir/etc/sysconfig/logstash
install -m644 logrotate.conf $destdir/etc/logrotate.d/logstash
install -m755 logstash.sysv.redhat $destdir/etc/init.d/logstash
;;
ubuntu@*)
mkdir -p $destdir/etc/logstash/conf.d
mkdir -p $destdir/etc/logrotate.d
mkdir -p $destdir/etc/init
mkdir -p $destdir/var/lib/logstash
mkdir -p $destdir/var/log/logstash
mkdir -p $destdir/etc/default
touch $destdir/etc/default/logstash
install -m644 logrotate.conf $destdir/etc/logrotate.d/logstash
install -m644 logstash.default $destdir/etc/default/logstash
install -m644 logstash-web.default $destdir/etc/default/logstash-web
install -m755 logstash.upstart.ubuntu $destdir/etc/init/logstash.conf
install -m755 logstash-web.upstart.ubuntu $destdir/etc/init/logstash-web.conf
;;
debian@*)
mkdir -p $destdir/etc/logstash/conf.d
mkdir -p $destdir/etc/logrotate.d
mkdir -p $destdir/etc/init.d
mkdir -p $destdir/var/lib/logstash
mkdir -p $destdir/var/log/logstash
mkdir -p $destdir/etc/default
touch $destdir/etc/default/logstash
install -m644 logrotate.conf $destdir/etc/logrotate.d/logstash
install -m644 logstash.default $destdir/etc/default/logstash
install -m644 logstash-web.default $destdir/etc/default/logstash-web
install -m755 logstash.sysv.debian $destdir/etc/init.d/logstash
install -m755 logstash-web.sysv.debian $destdir/etc/init.d/logstash-web
;;
*)
echo "Unknown OS: $os $release"
exit 1
;;
esac
description="logstash is a system for managing and processing events and logs"
case $os in
centos|fedora|redhat)
fpm -s dir -t rpm -n logstash -v "$RELEASE" \
-a noarch --iteration "1_${os}${RPM_REVISION}" \
--url "$URL" \
--description "$DESCRIPTION" \
-d "jre >= 1.6.0" \
--vendor "Elasticsearch" \
--license "Apache 2.0" \
--before-install centos/before-install.sh \
--before-remove centos/before-remove.sh \
--after-install centos/after-install.sh \
--config-files etc/sysconfig/logstash \
--config-files etc/logrotate.d/logstash \
-f -C $destdir .
;;
ubuntu|debian)
if ! echo $RELEASE | grep -q '\.(dev\|rc.*)'; then
# This is a dev or RC version... So change the upstream version
# example: 1.2.2.dev => 1.2.2~dev
# This ensures a clean upgrade path.
RELEASE="$(echo $RELEASE | sed 's/\.\(dev\|rc.*\)/~\1/')"
fi
fpm -s dir -t deb -n logstash -v "$RELEASE" \
-a all --iteration "1-${os}${DEB_REVISION}" \
--url "$URL" \
--description "$DESCRIPTION" \
--vendor "Elasticsearch" \
--license "Apache 2.0" \
-d "java7-runtime-headless | java6-runtime-headless" \
--deb-user root --deb-group root \
--before-install $os/before-install.sh \
--before-remove $os/before-remove.sh \
--after-install $os/after-install.sh \
--config-files /etc/default/logstash \
--config-files /etc/default/logstash-web \
--config-files /etc/logrotate.d/logstash \
-f -C $destdir .
;;
esac
|
/**
* Created by <NAME> on 04.08.2014.
*/
import { IDType } from './IDType';
export interface IHasUniqueId {
id: number;
}
export declare class HasUniqueIdUtils {
static toId(elem: IHasUniqueId): number;
static isId(id: number): (elem: IHasUniqueId) => boolean;
}
/**
* IDType with an actual collection of entities.
* Supports selections.
*/
export declare class ObjectManager<T extends IHasUniqueId> extends IDType {
private readonly instances;
private readonly pool;
constructor(id: string, name: string);
nextId(item?: T): number;
push(...items: T[]): void;
byId(id: number): T;
forEach(callbackfn: (value: T) => void, thisArg?: any): void;
get entries(): T[];
remove(id: number): T;
remove(item: T): T;
selectedObjects(type?: string): any;
}
|
<filename>Classes/EntrySense.h
//
// Sense.h
// DicionarioAberto
//
// Created by <NAME> on 21/12/2010.
//
#import "EntrySenseUsage.h"
@interface EntrySense : NSObject {
NSInteger ast;
NSMutableString *def;
NSMutableString *gramGrp;
EntrySenseUsage *usg;
}
@property (nonatomic, assign) NSInteger ast;
@property (nonatomic, strong) NSMutableString *def;
@property (nonatomic, strong) NSMutableString *gramGrp;
@property (nonatomic, strong) EntrySenseUsage *usg;
@end
|
<reponame>jsj14/python-docs-samples<gh_stars>0
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pathlib
import re
from typing import Generator
import uuid
from google.api_core.exceptions import NotFound
from google.cloud import dataproc_v1, storage
from google.cloud.dataproc_v1.types import LoggingConfig
from google.cloud.pubsublite import AdminClient, Subscription, Topic
from google.cloud.pubsublite.types import (
BacklogLocation,
CloudRegion,
CloudZone,
SubscriptionPath,
TopicPath,
)
import pytest
# A random alphanumeric string of length 32
UUID = uuid.uuid4().hex
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
PROJECT_NUMBER = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
CLOUD_REGION = "us-west1"
ZONE_ID = "a"
BUCKET = os.environ["PUBSUBLITE_BUCKET_ID"]
CLUSTER_ID = os.environ["PUBSUBLITE_CLUSTER_ID"] + "-" + UUID
TOPIC_ID = "spark-streaming-topic-" + UUID
SUBSCRIPTION_ID = "spark-streaming-subscription-" + UUID
CURRENT_DIR = pathlib.Path(__file__).parent.resolve()
@pytest.fixture(scope="module")
def client() -> Generator[AdminClient, None, None]:
yield AdminClient(CLOUD_REGION)
@pytest.fixture(scope="module")
def topic(client: AdminClient) -> Generator[Topic, None, None]:
location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
topic_path = TopicPath(PROJECT_NUMBER, location, TOPIC_ID)
# A topic of 2 partitions, each of size 30 GiB, publish throughput
# capacity per partition to 4 MiB/s, and subscribe throughput
# capacity per partition to 8 MiB/s.
topic = Topic(
name=str(topic_path),
partition_config=Topic.PartitionConfig(
count=2,
capacity=Topic.PartitionConfig.Capacity(
publish_mib_per_sec=4,
subscribe_mib_per_sec=8,
),
),
retention_config=Topic.RetentionConfig(
per_partition_bytes=30 * 1024 * 1024 * 1024,
),
)
try:
response = client.get_topic(topic.name)
except NotFound:
response = client.create_topic(topic)
yield response
try:
client.delete_topic(response.name)
except NotFound as e:
print(e.message)
@pytest.fixture(scope="module")
def subscription(
client: AdminClient, topic: Topic
) -> Generator[Subscription, None, None]:
location = CloudZone(CloudRegion(CLOUD_REGION), ZONE_ID)
subscription_path = SubscriptionPath(PROJECT_NUMBER, location, SUBSCRIPTION_ID)
subscription = Subscription(
name=str(subscription_path),
topic=topic.name,
delivery_config=Subscription.DeliveryConfig(
delivery_requirement=Subscription.DeliveryConfig.DeliveryRequirement.DELIVER_IMMEDIATELY,
),
)
try:
response = client.get_subscription(subscription.name)
except NotFound:
# This subscription will start receiving the first message in the topic.
response = client.create_subscription(subscription, BacklogLocation.BEGINNING)
yield response
try:
client.delete_subscription(response.name)
except NotFound as e:
print(e.message)
@pytest.fixture(scope="module")
def dataproc_cluster() -> Generator[dataproc_v1.Cluster, None, None]:
cluster_client = dataproc_v1.ClusterControllerClient(
client_options={"api_endpoint": f"{CLOUD_REGION}-dataproc.googleapis.com:443"}
)
cluster = {
"project_id": PROJECT_ID,
"cluster_name": CLUSTER_ID,
"config": {
"master_config": {"num_instances": 1, "machine_type_uri": "n1-standard-2"},
"worker_config": {"num_instances": 2, "machine_type_uri": "n1-standard-2"},
"config_bucket": BUCKET,
"temp_bucket": BUCKET,
"software_config": {"image_version": "1.5-debian10"},
"gce_cluster_config": {
"service_account_scopes": [
"https://www.googleapis.com/auth/cloud-platform",
],
},
"lifecycle_config": {
# Schedule cluster deletion after 2 hours of inactivity.
"idle_delete_ttl": {"seconds": 3600},
},
},
}
# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": CLOUD_REGION, "cluster": cluster}
)
result = operation.result()
yield result
cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"cluster_name": result.cluster_name,
}
)
def pyfile(source_file: str) -> str:
storage_client = storage.Client()
bucket = storage_client.bucket(BUCKET)
destination_blob_name = os.path.join(UUID, source_file)
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file)
return "gs://" + blob.bucket.name + "/" + blob.name
def test_spark_streaming_to_pubsublite(
topic: Topic, dataproc_cluster: dataproc_v1.Cluster
) -> None:
# Create a Dataproc job client.
job_client = dataproc_v1.JobControllerClient(
client_options={"api_endpoint": f"{CLOUD_REGION}-dataproc.googleapis.com:443"}
)
# Create the job config.
job = {
# Use the topic prefix and the first four alphanumeric
# characters of the UUID as job ID
"reference": {"job_id": topic.name.split("/")[-1][:-28]},
"placement": {"cluster_name": dataproc_cluster.cluster_name},
"pyspark_job": {
"main_python_file_uri": pyfile("spark_streaming_to_pubsublite_example.py"),
"jar_file_uris": [
"gs://spark-lib/pubsublite/pubsublite-spark-sql-streaming-LATEST-with-dependencies.jar"
],
"properties": {"spark.master": "yarn"},
"logging_config": {"driver_log_levels": {"root": LoggingConfig.Level.INFO}},
"args": [
f"--project_number={PROJECT_NUMBER}",
f"--location={CLOUD_REGION}-{ZONE_ID}",
f"--topic_id={TOPIC_ID}",
],
},
}
operation = job_client.submit_job_as_operation(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"job": job,
"request_id": "write-" + UUID,
}
)
response = operation.result()
# Dataproc job output gets saved to the Google Cloud Storage bucket
# allocated to the job. Use a regex to obtain the bucket and blob info.
matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
output = (
storage.Client()
.get_bucket(matches.group(1))
.blob(f"{matches.group(2)}.000000000")
.download_as_text()
)
assert "Committed 1 messages for epochId" in output
def test_spark_streaming_from_pubsublite(
subscription: Subscription, dataproc_cluster: dataproc_v1.Cluster
) -> None:
# Create a Dataproc job client.
job_client = dataproc_v1.JobControllerClient(
client_options={
"api_endpoint": "{}-dataproc.googleapis.com:443".format(CLOUD_REGION)
}
)
# Create the job config.
job = {
# Use the subscription prefix and the first four alphanumeric
# characters of the UUID as job ID
"reference": {"job_id": subscription.name.split("/")[-1][:-28]},
"placement": {"cluster_name": dataproc_cluster.cluster_name},
"pyspark_job": {
"main_python_file_uri": pyfile(
"spark_streaming_from_pubsublite_example.py"
),
"jar_file_uris": [
"gs://spark-lib/pubsublite/pubsublite-spark-sql-streaming-LATEST-with-dependencies.jar"
],
"properties": {"spark.master": "yarn"},
"logging_config": {"driver_log_levels": {"root": LoggingConfig.Level.INFO}},
"args": [
f"--project_number={PROJECT_NUMBER}",
f"--location={CLOUD_REGION}-{ZONE_ID}",
f"--subscription_id={SUBSCRIPTION_ID}",
],
},
}
operation = job_client.submit_job_as_operation(
request={
"project_id": PROJECT_ID,
"region": CLOUD_REGION,
"job": job,
"request_id": "read-" + UUID,
}
)
response = operation.result()
# Dataproc job output gets saved to the Google Cloud Storage bucket
# allocated to the job. Use a regex to obtain the bucket and blob info.
matches = re.match("gs://(.*?)/(.*)", response.driver_output_resource_uri)
output = (
storage.Client()
.get_bucket(matches.group(1))
.blob(f"{matches.group(2)}.000000000")
.download_as_text()
)
assert "Batch: 0\n" in output
|
<reponame>SynthSys/BioDare2-BACK<filename>src/test/java/ed/biodare2/backend/services/recaptcha/ReCaptchaServiceTest.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.services.recaptcha;
import ed.biodare2.EnvironmentVariables;
import ed.biodare2.MockEnvironmentVariables;
import ed.biodare2.SimpleRepoTestConfig;
import ed.biodare2.backend.web.rest.ServerSideException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.junit4.SpringRunner;
/**
*
* @author Zielu
*/
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment=SpringBootTest.WebEnvironment.NONE)
@Import(SimpleRepoTestConfig.class)
public class ReCaptchaServiceTest {
/*@Configuration
@Import(EnvironmentConfiguration.class)
@ComponentScan
public static class Config {
} */
public ReCaptchaServiceTest() {
}
@Autowired
ReCaptchaService service;
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
@Ignore("Tests no longer have valid captcha configuration")
public void testGivesFalseOnWrongChallenge() {
String challenge = "just testing response";
boolean resp = service.verify(challenge);
assertFalse(resp);
}
@Test
public void throwsExceptionOnCaptchaMisconfiguration() {
MockEnvironmentVariables var = new MockEnvironmentVariables();
var.recaptchaSiteKey = "wrongSiteKey";
var.recaptchaSecretKey ="wrongSecretKey";
EnvironmentVariables env = var.mock(); //new EnvironmentVariables("temp", "http://localhost", "http://localhost", "wrongSiteKey", "wrongSecretKey","","","");
service = new ReCaptchaService(env);
String challenge = "just testing response";
try {
service.verify(challenge);
fail("Exception expected");
} catch(ServerSideException e) {};
}
}
|
NSArray *myArray = @[@"Apple",@"Banana",@"Mango"];
NSString *searchString = @"Banana";
BOOL stringFound = NO;
for (NSString *s in myArray) {
if ([s isEqualToString:searchString]) {
stringFound = YES;
break;
}
}
if (stringFound) {
NSLog(@"String found");
} else {
NSLog(@"String not found");
} |
<gh_stars>1000+
# function with large number of arguments
def fun(a, b, c, d, e, f, g):
return a + b + c * d + e * f * g
print(fun(1, 2, 3, 4, 5, 6, 7))
|
package net.synqg.qg.utils;
import lombok.experimental.UtilityClass;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Utility methods.
*
* @author kaustubhdholé.
*/
@UtilityClass
public class SynQgUtils {
public static final String ANSI_RESET = "\u001B[0m";
public static final String ANSI_GREEN = "\u001B[32m";
public static final String ANSI_YELLOW = "\u001B[33m";
public static final String ANSI_BLUE = "\u001B[34m";
public List<String> whQuestions() {
List<String> whords = new ArrayList<>();
whords.add("who");
whords.add("which");
whords.add("what");
whords.add("why");
whords.add("how");
whords.add("when");
whords.add("where");
return whords;
}
List<String> thatPronouns() {
return Arrays.asList("that", "those", "it", "this", "these", "which");
}
List<String> personalPronouns() {
return Arrays.asList("i", "me", "myself",
"we", "our", "ourself", "ourselves",
"he", "she", "him", "her", "himself", "herself", "his",
"they", "them", "their", "themselves");
}
}
|
<gh_stars>1-10
require 'test_helper'
require 'simple_aws/ses'
describe SimpleAWS::SES do
before do
@api = SimpleAWS::SES.new "key", "secret"
end
it "points to the endpoint" do
@api.uri.must_equal "https://email.us-east-1.amazonaws.com"
end
it "works with the current version" do
@api.version.must_equal "2010-12-01"
end
describe "API calls" do
it "builds and signs calls with Signature Version 3" do
SimpleAWS::Connection.any_instance.expects(:call).with do |request|
params = request.params
params.wont_be_nil
params["Action"].must_equal "SendEmail"
request.headers["X-Amzn-Authorization"].wont_be_nil
true
end
obj = SimpleAWS::SES.new "key", "secret"
obj.send_email
end
end
end
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.taobao.weex.dom.action;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ArgbEvaluator;
import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.view.animation.PathInterpolatorCompat;
import android.text.TextUtils;
import android.util.Pair;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import com.alibaba.fastjson.JSONObject;
import com.taobao.weex.WXSDKInstance;
import com.taobao.weex.WXSDKManager;
import com.taobao.weex.dom.DOMAction;
import com.taobao.weex.dom.DOMActionContext;
import com.taobao.weex.dom.RenderAction;
import com.taobao.weex.dom.RenderActionContext;
import com.taobao.weex.dom.WXDomObject;
import com.taobao.weex.ui.animation.BackgroundColorProperty;
import com.taobao.weex.ui.animation.HeightProperty;
import com.taobao.weex.ui.animation.WXAnimationBean;
import com.taobao.weex.ui.animation.WXAnimationModule;
import com.taobao.weex.ui.animation.WidthProperty;
import com.taobao.weex.ui.component.WXComponent;
import com.taobao.weex.ui.view.border.BorderDrawable;
import com.taobao.weex.utils.SingleFunctionParser;
import com.taobao.weex.utils.WXLogUtils;
import com.taobao.weex.utils.WXResourceUtils;
import com.taobao.weex.utils.WXUtils;
import com.taobao.weex.utils.WXViewUtils;
import java.util.HashMap;
import java.util.List;
class AnimationAction implements DOMAction, RenderAction {
private final static String TAG = "AnimationAction";
@NonNull
private final String ref;
@Nullable
private
final String animation;
@Nullable
private
final String callback;
@Nullable
private
WXAnimationBean mAnimationBean;
AnimationAction(@NonNull final String ref, @Nullable String animation,
@Nullable final String callBack) {
this.ref = ref;
this.animation = animation;
this.callback = callBack;
}
AnimationAction(@NonNull String ref, @NonNull WXAnimationBean animationBean) {
this(ref, animationBean, null);
}
AnimationAction(@NonNull String ref, @NonNull WXAnimationBean animationBean,
@Nullable final String callBack) {
this.ref = ref;
this.mAnimationBean = animationBean;
this.callback = callBack;
this.animation = null;
}
@Override
public void executeDom(DOMActionContext context) {
try {
WXDomObject domObject;
if (!context.isDestory() &&
!TextUtils.isEmpty(animation) &&
(domObject = context.getDomByRef(ref)) != null) {
WXAnimationBean animationBean = JSONObject.parseObject(animation, WXAnimationBean.class);
if (animationBean != null && animationBean.styles != null) {
int width = (int) domObject.getLayoutWidth();
int height = (int) domObject.getLayoutHeight();
animationBean.styles.init(animationBean.styles.transformOrigin,
animationBean.styles.transform, width, height,
context.getInstance().getInstanceViewPortWidth());
mAnimationBean = animationBean;
context.postRenderTask(this);
}
}
} catch (RuntimeException e) {
WXLogUtils.e(TAG, WXLogUtils.getStackTrace(e));
}
}
@Override
public void executeRender(RenderActionContext context) {
WXSDKInstance instance;
if (mAnimationBean != null && (instance = context.getInstance()) != null) {
startAnimation(instance, context.getComponent(ref));
}
}
private void startAnimation(@NonNull WXSDKInstance instance, @Nullable WXComponent component) {
if (component != null) {
if (mAnimationBean != null) {
component.setNeedLayoutOnAnimation(mAnimationBean.needLayout);
}
if (component.getHostView() == null) {
WXAnimationModule.AnimationHolder holder = new WXAnimationModule.AnimationHolder(mAnimationBean, callback);
component.postAnimation(holder);
} else {
try {
Animator animator = createAnimator(component.getHostView(), instance
.getInstanceViewPortWidth());
if (animator != null) {
Animator.AnimatorListener animatorCallback = createAnimatorListener(instance, callback);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2 && component
.isLayerTypeEnabled() ) {
component.getHostView().setLayerType(View.LAYER_TYPE_HARDWARE, null);
}
Interpolator interpolator = createTimeInterpolator();
if (animatorCallback != null) {
animator.addListener(animatorCallback);
}
if (interpolator != null) {
animator.setInterpolator(interpolator);
}
animator.setDuration(mAnimationBean.duration);
animator.start();
}
} catch (RuntimeException e) {
WXLogUtils.e(TAG, WXLogUtils.getStackTrace(e));
}
}
}
}
private
@Nullable
ObjectAnimator createAnimator(final View target, final int viewPortWidth) {
if (target == null) {
return null;
}
WXAnimationBean.Style style = mAnimationBean.styles;
if (style != null) {
ObjectAnimator animator;
List<PropertyValuesHolder> holders = style.getHolders();
if (!TextUtils.isEmpty(style.backgroundColor)) {
BorderDrawable borderDrawable;
if ((borderDrawable = WXViewUtils.getBorderDrawable(target)) != null) {
holders.add(PropertyValuesHolder.ofObject(
new BackgroundColorProperty(), new ArgbEvaluator(),
borderDrawable.getColor(),
WXResourceUtils.getColor(style.backgroundColor)));
} else if (target.getBackground() instanceof ColorDrawable) {
holders.add(PropertyValuesHolder.ofObject(
new BackgroundColorProperty(), new ArgbEvaluator(),
((ColorDrawable) target.getBackground()).getColor(),
WXResourceUtils.getColor(style.backgroundColor)));
}
}
if (target.getLayoutParams() != null &&
(!TextUtils.isEmpty(style.width) || !TextUtils.isEmpty(style.height))) {
ViewGroup.LayoutParams layoutParams = target.getLayoutParams();
if (!TextUtils.isEmpty(style.width)) {
holders.add(PropertyValuesHolder.ofInt(new WidthProperty(), layoutParams.width,
(int) WXViewUtils.getRealPxByWidth(WXUtils.getFloat(style.width), viewPortWidth)));
}
if (!TextUtils.isEmpty(style.height)) {
holders.add(PropertyValuesHolder.ofInt(new HeightProperty(), layoutParams.height,
(int) WXViewUtils.getRealPxByWidth(WXUtils.getFloat(style.height), viewPortWidth)));
}
}
if (style.getPivot() != null) {
Pair<Float, Float> pair = style.getPivot();
target.setPivotX(pair.first);
target.setPivotY(pair.second);
}
animator = ObjectAnimator.ofPropertyValuesHolder(
target, holders.toArray(new PropertyValuesHolder[holders.size()]));
animator.setStartDelay(mAnimationBean.delay);
return animator;
} else {
return null;
}
}
private
@Nullable
Animator.AnimatorListener createAnimatorListener(final WXSDKInstance instance, @Nullable final String callBack) {
if (!TextUtils.isEmpty(callBack)) {
return new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
if (instance == null || instance.isDestroy()) {
WXLogUtils.e("RenderActionContextImpl-onAnimationEnd WXSDKInstance == null NPE or instance is destroyed");
} else {
WXSDKManager.getInstance().callback(instance.getInstanceId(),
callBack,
new HashMap<String, Object>());
}
}
};
} else {
return null;
}
}
private
@Nullable
Interpolator createTimeInterpolator() {
String interpolator = mAnimationBean.timingFunction;
if (!TextUtils.isEmpty(interpolator)) {
switch (interpolator) {
case WXAnimationBean.EASE_IN:
return new AccelerateInterpolator();
case WXAnimationBean.EASE_OUT:
return new DecelerateInterpolator();
case WXAnimationBean.EASE_IN_OUT:
return new AccelerateDecelerateInterpolator();
case WXAnimationBean.LINEAR:
return new LinearInterpolator();
default:
//Parse cubic-bezier
try {
SingleFunctionParser<Float> parser = new SingleFunctionParser<>(
mAnimationBean.timingFunction,
new SingleFunctionParser.FlatMapper<Float>() {
@Override
public Float map(String raw) {
return Float.parseFloat(raw);
}
});
List<Float> params = parser.parse(WXAnimationBean.CUBIC_BEZIER);
if (params != null && params.size() == WXAnimationBean.NUM_CUBIC_PARAM) {
return PathInterpolatorCompat.create(
params.get(0), params.get(1), params.get(2), params.get(3));
} else {
return null;
}
} catch (RuntimeException e) {
return null;
}
}
}
return null;
}
}
|
#!/usr/bin/env bash
echo Restoring packages...
dotnet restore src/Veldrid.sln
dotnet restore src/Veldrid.sln
echo Building and packing...
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.OpenGLBindings/Veldrid.OpenGLBindings.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.MetalBindings/Veldrid.MetalBindings.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid/Veldrid.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.Utilities/Veldrid.Utilities.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.ImGui/Veldrid.ImGui.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.ImageSharp/Veldrid.ImageSharp.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.SDL2/Veldrid.SDL2.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.StartupUtilities/Veldrid.StartupUtilities.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.VirtualReality/Veldrid.VirtualReality.csproj
dotnet pack --include-symbols -p:SymbolPackageFormat=snupkg -c Release src/Veldrid.RenderDoc/Veldrid.RenderDoc.csproj |
# Federated search requires us to append a prefix in the form of `<protocol><version>` to all URLs that get uploaded to Algolia. This script determines this prefix and saves it in the `PRODUCT_URL` environment variable.
VERSION=""
URL=""
PROTO="https://"
PROTO_AND_VERSION=""
# If one of the following environment variables is undefined, then we assume that this script runs on a development machine. We don't modify the values of the `VERSION` or `URL` variables. The prefix will be an empty string.
if [ -z "$TRAVIS_BRANCH" ] || [ -z "$BRANCH_VERSION_CONFIG" ] || [ -z "$VERSIONS_BASE_URL" ]
then
echo "One of the following environment variables is not defined: \$TRAVIS_BRANCH, \$BRANCH_VERSION_CONFIG."
# If these environment variables are defined, then we assume that this script runs in Travis.
else
# The `BRANCH_VERSION_CONFIG` environment variable is a comma-separated list of branches and versions for which we build the deployment image, update the Algolia index and push the image to GCP. If `BRANCH_VERSION_CONFIG` contains `TRAVIS_BRANCH`, then we assume that this script runs in Travis, and begin constructing the URL by setting `URL` to the base URL (`VERSIONS_BASE_URL).
if [[ $BRANCH_VERSION_CONFIG == *$TRAVIS_BRANCH* ]]
then
echo "\$TRAVIS_BRANCH = $TRAVIS_BRANCH is in \$BRANCH_VERSION_CONFIG = $BRANCH_VERSION_CONFIG"
URL=$VERSIONS_BASE_URL
echo "\$LATEST_VERSION is $LATEST_VERSION"
# If we build the latest version, then we don't need to add the version to the URL
if [[ "$LATEST_VERSION" == "$TRAVIS_BRANCH" ]]
then
PROTO_AND_VERSION="$PROTO"
# If we are not building the latest version, then we add the version to the URL
else
PROTO_AND_VERSION="$PROTO$TRAVIS_BRANCH."
fi
# If `BRANCH_VERSION_CONFIG` doesn't contain `TRAVIS_BRANCH`, then we're probably building the `master` branch. We still build the URL because otherwise our link checker would fail, but we don't add the version to the URL.
else
echo "\$TRAVIS_BRANCH = $TRAVIS_BRANCH is not in \$BRANCH_VERSION_CONFIG = $BRANCH_VERSION_CONFIG"
URL=$VERSIONS_BASE_URL
PROTO_AND_VERSION="$PROTO"
fi
fi
echo "\$PRODUCT_URL is $PROTO_AND_VERSION$URL"
export PRODUCT_URL=$PROTO_AND_VERSION$URL
|
import { getProperties } from '@ember/object';
import DaysComponent from '../power-calendar/days';
import { isBetween, isSame, diff } from 'ember-power-calendar-utils';
export default DaysComponent.extend({
// Methods
buildDay(date, today, calendar) {
let day = this._super(...arguments);
let { start, end } = getProperties(calendar.selected || { start: null, end: null }, 'start', 'end');
if (start && end) {
day.isSelected = isBetween(date, start, end, 'day', '[]');
day.isRangeStart = day.isSelected && isSame(date, start, 'day');
day.isRangeEnd = day.isSelected && isSame(date, end, 'day');
} else {
day.isRangeEnd = false;
if (!start) {
day.isRangeStart = false;
} else {
day.isRangeStart = day.isSelected = isSame(date, start, 'day');
if (!day.isDisabled) {
let diffInMs = Math.abs(diff(day.date, start));
day.isDisabled = diffInMs < calendar.minRange
|| calendar.maxRange !== null && diffInMs > calendar.maxRange;
}
}
}
return day;
},
dayIsSelected() {
return false;
}
});
|
class Solution:
def findMax(arr):
max_value = -float("inf")
for n in arr:
if n > max_value:
max_value = n
return max_value |
<reponame>edanko/dxf-go
package entities
import (
"github.com/edanko/dxf-go/core"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"strings"
"testing"
)
type VertexTestSuite struct {
suite.Suite
}
func (suite *VertexTestSuite) TestMinimalVertex() {
expected := Vertex{
BaseEntity: BaseEntity{
Handle: "LH",
LayerName: "0",
On: true,
Visible: true,
},
Location: core.Point{X: 1.1, Y: 1.2, Z: 1.3},
}
next := core.Tagger(strings.NewReader(testMinimalVertex))
vertex, err := NewVertex(core.TagSlice(core.AllTags(next)))
suite.Nil(err)
suite.True(expected.Equals(vertex))
suite.False(vertex.IsSeqEnd())
suite.False(vertex.HasNestedEntities())
}
func (suite *VertexTestSuite) TestVertexAllAttribs() {
expected := Vertex{
BaseEntity: BaseEntity{
Handle: "ALL_ARGS",
Owner: "hb",
Space: PAPER,
LayoutTabName: "layout",
LayerName: "L1",
LineTypeName: "CONTINUOUS",
On: true,
Color: 2,
LineWeight: 3,
LineTypeScale: 2.5,
Visible: false,
TrueColor: core.TrueColor(0x684e45),
ColorName: "BROWN",
Transparency: 5,
ShadowMode: CASTS,
},
Location: core.Point{X: 1.1, Y: 1.2, Z: 1.3},
StartingWidth: 10.5,
EndWidth: 15.8,
Bulge: 11.2,
Id: 3,
CreatedByCurveFitting: true,
CurveFitTangentDefined: true,
SplineVertex: true,
SplineFrameCtrlPoint: true,
Is3dPolylineVertex: true,
Is3dPolylineMesh: true,
IsPolyfaceMeshVertex: true,
CurveFitTangentDirection: 0.2,
}
next := core.Tagger(strings.NewReader(testVertexAllAttribs))
vertex, err := NewVertex(core.TagSlice(core.AllTags(next)))
suite.Nil(err)
suite.True(expected.Equals(vertex))
}
func (suite *VertexTestSuite) TestVertexNotEqualToDifferentType() {
suite.False(Vertex{}.Equals(core.NewIntegerValue(0)))
}
func TestVertexTestSuite(t *testing.T) {
suite.Run(t, new(VertexTestSuite))
}
func TestVertexSliceEquality(t *testing.T) {
testCases := []struct {
p1 VertexSlice
p2 VertexSlice
equals bool
}{
{
VertexSlice{},
VertexSlice{},
true,
},
{
VertexSlice{&Vertex{}},
VertexSlice{&Vertex{}},
true,
},
{
VertexSlice{&Vertex{Id: 1}, &Vertex{Id: 2}},
VertexSlice{&Vertex{Id: 1}, &Vertex{Id: 2}},
true,
},
{
VertexSlice{&Vertex{Id: 1}},
VertexSlice{},
false,
},
{
VertexSlice{&Vertex{Id: 1}, &Vertex{Id: 2}},
VertexSlice{&Vertex{Id: 2}, &Vertex{Id: 1}},
false,
},
}
for i, test := range testCases {
assert.Equal(t, test.equals, test.p1.Equals(test.p2), "Test index %v", i)
}
}
const testMinimalVertex = ` 0
VERTEX
5
LH
8
0
10
1.1
20
1.2
30
1.3
`
const testVertexAllAttribs = ` 0
VERTEX
5
ALL_ARGS
8
L1
6
CONTINUOUS
48
2.5
60
1
62
2
67
1
284
1
330
hb
370
3
410
layout
420
6835781
430
BROWN
440
5
10
1.1
20
1.2
30
1.3
40
10.5
41
15.8
42
11.2
91
3
70
251
50
0.2
`
|
<gh_stars>10-100
// Package statexp is an experimental API for the gokrazy/stat package.
package statexp
import (
"github.com/gokrazy/stat"
"github.com/gokrazy/stat/internal/cpu"
"github.com/gokrazy/stat/internal/disk"
"github.com/gokrazy/stat/internal/mem"
"github.com/gokrazy/stat/internal/net"
"github.com/gokrazy/stat/internal/sys"
)
type ProcessAndFormatter interface {
ProcessAndFormat(map[string][]byte) []stat.Col
}
func DefaultModules() []ProcessAndFormatter {
return []ProcessAndFormatter{
&cpu.Stats{},
&disk.Stats{},
&sys.Stats{},
&net.Stats{},
&mem.Stats{},
}
}
|
<filename>examples/clients/oauth2-react-client-example/src/index.js<gh_stars>1-10
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import App from './App';
import reportWebVitals from './reportWebVitals';
import axios from 'axios';
const instance = axios.create();
instance.interceptors.request.use((config) => {
const token = window.accessToken || 'dummy';
config.headers['Authorization'] = 'Bearer ' + token;
return config;
}, (error) => Promise.reject(error));
instance.interceptors.response.use(
(response) => response,
(error) => Promise.reject(error));
ReactDOM.render(
<React.StrictMode>
<App />
</React.StrictMode>,
document.getElementById('root'),
);
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals();
|
import unittest
from unittest import mock
from your_module import NuageSubnetExtensionDriver, RESTProxyServer, VsdClientImpl
class TestNuageSubnetExtensionDriver(unittest.TestCase):
@mock.patch.object(RESTProxyServer, 'raise_rest_error')
@mock.patch.object(VsdClientImpl, 'verify_cms')
def test_init_nuage_subnet_extension_driver(self, mock_verify_cms, mock_raise_rest_error):
# Set up the necessary configurations
config_fixture = {
'key1': 'value1',
'key2': 'value2'
}
# Patch the necessary methods
with mock.patch('your_module.NuageSubnetExtensionDriver.initialize') as mock_initialize:
# Call the method that triggers the initialization
NuageSubnetExtensionDriver().initialize()
# Assert that the initialize method is called
mock_initialize.assert_called_once_with(config_fixture)
# Assert that the methods are patched
mock_verify_cms.assert_called_once()
mock_raise_rest_error.assert_called_once()
if __name__ == '__main__':
unittest.main() |
json.version @device.plugin.version
json.connected_devices do
json.array!(@device.related_devices) do |device|
json.partial! partial: 'devices/show', locals: { device: device, no_cascade: 1 }
end
end
|
package com.mera.callcenter;
import com.mera.callcenter.entities.Call;
import com.mera.callcenter.entities.Employee;
import com.mera.callcenter.entities.EmployeeType;
import org.jboss.logging.Logger;
import java.util.*;
import static com.mera.callcenter.entities.Call.MIN_DURATION;
/**
* A factory pattern implementation to create objects on demand
*/
public class CallCenterFactory {
private static final Logger LOGGER = Logger.getLogger(CallCenterFactory.class);
public static Employee buildManager() {
return new Employee(EmployeeType.MANAGER, UUID.randomUUID());
}
public static Employee buildSupervisor() {
return new Employee(EmployeeType.SUPERVISOR, UUID.randomUUID());
}
public static Employee buildOperator() {
return new Employee(EmployeeType.OPERATOR, UUID.randomUUID());
}
public static List<Employee> buildRandomEmployees(int maxNumberEmployees){
LOGGER.debug("Building " + maxNumberEmployees + " employees");
List<Employee> employees = new ArrayList<>();
for(int i = 0; i < maxNumberEmployees; i++){
employees.add(buildRandomEmployee());
}
return employees;
}
public static List<Employee> buildBasicHierarchyEmployees(){
return Arrays.asList(
buildOperator(),
buildSupervisor(),
buildOperator(),
buildManager()
);
}
public static Employee buildRandomEmployee(){
Random rand = new Random();
int index = rand.nextInt(3);
EmployeeType type = EmployeeType.values()[index];
Employee e ;
switch (type){
case MANAGER:
e = buildManager();
break;
case OPERATOR:
e = buildOperator();
break;
case SUPERVISOR:
e = buildSupervisor();
break;
default:
e = buildOperator();
break;
}
return e;
}
public static Call buildRandomCall(){
Random rand = new Random();
long duration = (long)rand.nextInt(6) + MIN_DURATION;
return new Call(duration);
}
public static List<Call> buildRandomCalls(int numberOfCalls){
List<Call> calls = new ArrayList<>();
for(int i = 0; i < numberOfCalls; i++){
calls.add(buildRandomCall());
}
return calls;
}
}
|
class CamaleonCms::PostDecorator < CamaleonCms::ApplicationDecorator
include CamaleonCms::CustomFieldsConcern
delegate_all
def the_title(locale = nil)
r = {title: object.title.to_s.translate(get_locale(locale)), post: object}
h.hooks_run("post_the_title", r)
r[:title]
end
# return the excerpt of this post
def the_excerpt(qty_chars = 200)
excerpt = object.get_meta("summary").to_s.translate(get_locale)
# r = {content: (excerpt.present? ? excerpt : object.content_filtered.to_s.translate(get_locale).strip_tags.gsub(/ |\n/, " ").truncate(qty_chars)), post: object}
r = {content: (excerpt.present? ? excerpt : h.cama_strip_shortcodes(object.content_filtered.to_s.translate(get_locale).strip_tags.gsub(/ |\n/, " ").truncate(qty_chars))), post: object}
h.hooks_run("post_the_excerpt", r)
r[:content]
end
# return the content of this post
def the_content
r = {content: object.content.to_s.translate(get_locale), post: object}
h.hooks_run("post_the_content", r)
h.do_shortcode(r[:content], self)
end
# return thumbnail image for this post
# default: default image if thumbails not exist
# if default is empty, post_type default thumb will be returned
def the_thumb_url(default = nil)
th = object.get_meta("thumb")
th.present? ? th : (default || object.post_type.get_option('default_thumb', nil) || h.asset_url("camaleon_cms/image-not-found.png"))
end
alias_method :the_image_url, :the_thumb_url
# check if this page has registered the thumbnail
def has_thumb?
object.get_meta("thumb").present?
end
# return the path for this page
# sample: /my-page.html
def the_path(*args)
args = args.extract_options!
args[:as_path] = true
the_url(args)
end
# return front url for this post
# sample: http://localhost.com/my-page.html
# args:
# locale: language (default current language)
# as_path: return the path instead of full url, sample: /my-page.html
# Also, you can pass extra attributes as params for the url, sample: page.the_url(my_param: 'value', other: "asd")
# => http://localhost.com/my-page.html?my_param=value&other=asd
# Return String URL
def the_url(*args)
args = args.extract_options!
args[:locale] = get_locale unless args.include?(:locale)
args[:format] = args[:format] || "html"
args[:slug] = the_slug(args[:locale])
p = args.delete(:as_path).present? ? "path" : "url"
l = _calc_locale(args[:locale])
ptype = object.post_type.decorate
p_url_format = ptype.contents_route_format
p_url_format = "hierarchy_post" if ptype.manage_hierarchy?
case p_url_format
when "post_of_post_type"
args[:label] = I18n.t('routes.group', default: 'group')
args[:post_type_id] = ptype.id
args[:title] = ptype.the_title(args[:locale]).parameterize.presence || ptype.the_slug
when "post_of_category"
if ptype.manage_categories?
cat = object.categories.first.decorate rescue ptype.default_category.decorate
args[:label_cat] = I18n.t("routes.category", default: "category")
args[:category_id] = cat.id
args[:title] = cat.the_title(args[:locale]).parameterize
args[:title] = cat.the_slug unless args[:title].present?
else
p_url_format = "post"
end
when "post_of_posttype"
args[:post_type_title] = ptype.the_title(args[:locale]).parameterize.presence || ptype.the_slug
when "post_of_category_post_type"
if ptype.manage_categories?
cat = object.categories.first.decorate rescue ptype.default_category.decorate
args[:label_cat] = I18n.t("routes.category", default: "category")
args[:post_type_title] = ptype.the_title(args[:locale]).parameterize.presence || ptype.the_slug
args[:category_id] = cat.id
args[:title] = cat.the_title(args[:locale]).parameterize
args[:title] = cat.the_slug unless args[:title].present?
else
p_url_format = "post"
end
when 'hierarchy_post'
if object.post_parent.present?
slugs = ([args[:slug]]+object.parents.map{|parent| parent.decorate.the_slug(args[:locale]) }).reverse
args[:slug], args[:parent_title] = slugs.slice(1..-1).join("/"), slugs.first
else
p_url_format = "post"
end
end
h.cama_url_to_fixed("cama_#{p_url_format}_#{p}", args)
end
# return a hash of frontend urls for this post
# sample: {es: 'http://mydomain.com/es/articulo-3.html', en: 'http://mydomain.com/en/post-3.html'}
def the_urls(*args)
args = args.extract_options!
res = {}
h.current_site.the_languages.each do |l|
args[:locale] = l
res[l] = the_url(args.clone)
end
res
end
# return edit url for this post
def the_edit_url
args = h.cama_current_site_host_port({})
h.edit_cama_admin_post_type_post_url(object.post_type.id, object, args)
end
# create the html link with edit link
# return html link
# attrs: Hash of link tag attributes, sample: {id: "myid", class: "sss" }
def the_edit_link(title = nil, attrs = { })
return '' unless h.cama_current_user.present?
attrs = {target: "_blank", style: "font-size:11px !important;cursor:pointer;"}.merge(attrs)
h.link_to("→ #{title || h.ct("edit", default: 'Edit')}".html_safe, the_edit_url, attrs)
end
# show thumbnail image as html
def the_thumb(img_args = {})
r = {image: h.image_tag(the_thumb_url, img_args), post: object}
h.hooks_run("post_the_thumb", r)
r[:image]
end
# show link and thumbnail included as html
# link_args: html attributes for link
# img_args: html attributes for image
def the_link_thumb(link_args = {}, img_args = {})
h.link_to(the_thumb(img_args), the_url, link_args)
end
def the_status
case self.status
when "published"
color = "info"
status = I18n.t('camaleon_cms.admin.post_type.published', default: 'Published')
when "draft", "draft_child"
color = "warning"
status = I18n.t('camaleon_cms.admin.table.draft', default: 'Draft')
when "trash"
color = "danger"
status = I18n.t('camaleon_cms.admin.table.trash', default: 'Trash')
when "pending"
color = "default"
status = I18n.t('camaleon_cms.admin.table.pending', default: 'Pending')
else
color = "default"
status = self.status
end
"<span class='label label-#{color} label-form'>#{status.titleize}</span>"
end
# return the user object who created this post
def the_author
object.author.decorate
end
# return all categories assigned for this post filtered by permissions + hidden posts + roles + etc...
def the_categories
object.categories
end
# return all post_tags assigned for this post
def the_tags
object.post_tags
end
# return all comments for this post filtered by permissions + hidden posts + roles + etc...
def the_comments
object.comments.main.approveds.eager_load(:user)
end
# check if the post can be visited by current visitor
def can_visit?
r = {flag: true, post: object}
h.hooks_run("post_can_visit", r)
r[:flag] && object.status == 'published'
end
# add_post_type: true/false to include post type link
# children: true/false (show/hide last item link)
# show_categories: true/false, true: add categories tree to the breadcrumb
def generate_breadcrumb(show_categories = true, add_post_type = true)
p_type = object.post_type
f_cat = object.categories.first
if f_cat.present? && show_categories
f_cat.decorate.generate_breadcrumb(add_post_type, true)
else
p_type.decorate.generate_breadcrumb(add_post_type, true)
end
object.parents.reverse.each{|p| p=p.decorate; h.breadcrumb_add(p.the_title, p.published? ? p.the_url : nil) } if object.post_parent.present? && p_type.manage_hierarchy?
h.breadcrumb_add(self.the_title)
end
# return the post type of this post
def the_post_type
object.post_type.decorate
end
# looks for the next post item related to parent element based on post_order attribute
# @param _parent: parent decorated model, like: (PostType *default), Category, PostTag, Site
# @samples: my_post.the_next_post(), my_post.the_next_post(@category), my_post.the_next_post(current_site)
def the_next_post(_parent = nil)
(_parent.presence || the_post_type).the_posts.where("#{CamaleonCms::Post.table_name}.post_order > :position OR (#{CamaleonCms::Post.table_name}.post_order = :position and #{CamaleonCms::Post.table_name}.created_at > :created_at)", {position: object.post_order, created_at: object.created_at}).where.not(id: object.id).take.try(:decorate)
end
# looks for the next post item related to parent element based on post_order attribute
# @param _parent: parent decorated model, like: (PostType *default), Category, PostTag, Site
# @samples: my_post.the_prev_post(), my_post.the_prev_post(@category), my_post.the_prev_post(current_site)
def the_prev_post(_parent = nil)
(_parent.presence || the_post_type).the_posts.where("#{CamaleonCms::Post.table_name}.post_order < :position OR (#{CamaleonCms::Post.table_name}.post_order = :position and #{CamaleonCms::Post.table_name}.created_at < :created_at)", {position: object.post_order, created_at: object.created_at}).where.not(id: object.id).reorder(post_order: :asc, created_at: :asc).last.try(:decorate)
end
# return the title with hierarchy prefixed
# sample: title paren 1 - title parent 2 -.. -...
# if add_parent_title: true will add parent title like: —— item 1.1.1 | item 1.1
def the_hierarchy_title
return the_title unless object.post_parent.present?
res = '—' * object.parents.count
res << " " + the_title
res << " | #{object.parent.decorate.the_title}" if object.show_title_with_parent
res.html_safe
end
# return all related posts of current post
def the_related_posts
ptype = self.the_post_type
ptype.the_posts.joins(:categories).where("#{CamaleonCms::TermRelationship.table_name}" => {term_taxonomy_id: the_categories.pluck(:id)}).distinct
end
# fix for "Using Draper::Decorator without inferred source class"
def self.object_class_name
'CamaleonCms::Post'
end
end
|
import { useEffect, useState } from "react";
import * as emailValidator from "email-validator";
import { Box, Text, Input, Checkbox, Flex, Button, useToast } from "@chakra-ui/react";
import { _get, _post } from "../../../../common/httpClient";
import { Check } from "../../../../theme/components/icons";
/**
* @description Updates all widgetParameters to updates referrers.
* @returns {JSX.Element}
*/
const ActivateAllCfaFormations = () => {
const [loading, setLoading] = useState(false);
const [submitLoading, setSubmitLoading] = useState(false);
const [isSubmitDisabled, setSubmitDisabled] = useState(true);
const [referrers, setReferrers] = useState();
const [email, setEmail] = useState("");
const [siret, setSiret] = useState("");
const toast = useToast();
/**
* @description Get all parameters.
*/
useEffect(() => {
async function fetchData() {
try {
setLoading(true);
const referrersResponse = await getReferrers();
setReferrers(referrersResponse.map((referrer) => ({ ...referrer, isChecked: false })));
} catch (error) {
toast({
title: "Une erreur est survenue durant la récupération des informations.",
status: "error",
isClosable: true,
position: "bottom-right",
});
} finally {
setLoading(false);
}
}
fetchData();
}, [toast]);
/**
* @description Returns all referrers.
* @returns {Promise<{code: {number}, name: {string}, full_name: {string}, url: {string}[]}>}
*/
const getReferrers = async () => {
const { referrers } = await _get(`/api/constants`);
return referrers;
};
/**
* @description Toggles checkboxes.
* @param {Number} referrerCode
* @param {Boolean} isChecked
* @returns {void}
*/
const toggleReferrer = (referrerCode, isChecked) => {
const referrersUpdated = referrers.map((referrer) => {
if (referrer.code === referrerCode) {
referrer.isChecked = isChecked;
}
return referrer;
});
setReferrers(referrersUpdated);
toggleDisableButton();
};
/**
* @description Disable submit button if no one of checkbox is checked.
* @returns {void}
*/
const toggleDisableButton = () => {
const uncheckedReferrers = referrers.filter((referrer) => !referrer.isChecked);
setSubmitDisabled(
uncheckedReferrers.length === referrers.length || !emailValidator.validate(email) || siret.length !== 14
);
};
/**
* @description Handle "email" changes.
* @param {Event} event
* @returns {void}
*/
const onChangeEmail = (event) => {
setEmail(event.target.value);
toggleDisableButton();
};
/**
* @description Handle "siret" changes.
* @param {Event} event
* @returns {void}
*/
const onChangeSiret = (event) => {
const value = event.target.value;
if (/^\d+$/.test(value)) {
setSiret(value);
}
toggleDisableButton();
};
/**
* @description Submit.
* @returns {Promise<void>}
*/
const submit = async () => {
try {
setSubmitLoading(true);
const { result } = await _post("/api/widget-parameters/import", {
parameters: [
{
siret_formateur: siret,
email: email,
referrers: referrers.filter((referrer) => referrer.isChecked).map((referrer) => referrer.code),
},
],
});
if (result[0].error) {
toast({
title: result[0].error,
status: "error",
isClosable: true,
position: "bottom-right",
});
} else {
setEmail("");
setSiret("");
if (result[0].formations.length > 0) {
toast({
title: "Enregistrement effectué avec succès.",
status: "success",
isClosable: true,
position: "bottom-right",
});
} else {
toast({
title: "Aucune modification n'a été apportée.",
status: "info",
isClosable: true,
position: "bottom-right",
});
}
}
toggleDisableButton();
} catch (error) {
toast({
title: "Une erreur est survenue.",
status: "error",
isClosable: true,
position: "bottom-right",
});
} finally {
setSubmitLoading(false);
}
};
return (
<Box
w={["100%", "100%", "40%", "40%"]}
boxShadow="0 1px 2px 0 rgb(0 0 0 / 5%)"
border="1px solid rgba(0,40,100,.12)"
border-radius="3px"
mt={10}
ml={[0, 0, 5, 5]}
>
<Text fontSize="15px" p={5} borderBottom="1px solid rgba(0,40,100,.12)" border-radius="3px">
Activer toutes les formations d'un CFA
</Text>
<Box active={loading} loader p={5}>
<Text>
Veuillez cocher l'ensemble des plateformes de diffusion sur lesquelles vous souhaitez que les formations du
SIRET formateur fournies soient activés. Ne sont affecté que les formations sans configurations.
<br />
<br />
{referrers &&
referrers.map((referrer) => (
<Flex>
<Checkbox
key={referrer.code}
checked={referrer.checked}
label={referrer.full_name}
icon={<Check w="20px" h="18px" />}
onChange={() => toggleReferrer(referrer.code, !referrer.isChecked)}
>
<Text ml={2}>{referrer.full_name}</Text>
</Checkbox>
</Flex>
))}
<Box mt={5}>
<Text fontWeight="700" textStyle="sm">
Siret formateur
</Text>
<Input
mt={3}
name="siret_formateur"
placeholder="48398606300012"
maxLength={14}
onChange={onChangeSiret}
value={siret}
/>
</Box>
<Box mt={5}>
<Text fontWeight="700" textStyle="sm">
Email de contact
</Text>
<Input mt={3} name="email_contact" placeholder="<EMAIL>" onChange={onChangeEmail} value={email} />
</Box>
</Text>
</Box>
<Flex justifyContent="flex-end" borderTop="1px solid rgba(0,40,100,.12)" border-radius="3px" p={5}>
<Button
bg={isSubmitDisabled === true ? "tomato" : "#467fcf"}
disabled={isSubmitDisabled}
loading={submitLoading}
onClick={submit}
variant="primary"
mr="3rem"
_hover={{ bg: "#3057BE" }}
>
Enregistrer
</Button>
</Flex>
</Box>
);
};
export { ActivateAllCfaFormations };
|
/*
Siesta 4.2.2
Copyright(c) 2009-2016 Bryntum AB
http://bryntum.com/contact
http://bryntum.com/products/siesta/license
*/
Ext.define('Siesta.Recorder.UI.Editor.Array', {
extend : 'Ext.form.field.Text',
alias : 'widget.arrayeditor',
getValue : function () {
var value = this.callParent(arguments);
if (typeof value === 'string' && value.match(/\d*,\d*/)) {
value = value.split(',');
value[0] = parseInt(value[0], 10);
value[1] = parseInt(value[1], 10);
}
return value;
}
});
|
<reponame>popa-raluca/egeria-ui
/* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
import { mixinBehaviors } from "../../node_modules/@polymer/polymer/lib/legacy/class.js";
import { AppLocalizeBehavior } from "../../node_modules/@polymer/app-localize-behavior/app-localize-behavior.js";
import { PolymerElement, html } from "../../node_modules/@polymer/polymer/polymer-element.js";
import "../../node_modules/@polymer/paper-checkbox/paper-checkbox.js";
import "../../node_modules/@polymer/paper-button/paper-button.js";
import "../../node_modules/@polymer/paper-input/paper-input.js";
import '../shared-styles.js';
import '../token-ajax.js';
/**
*
* NetworkDiagram implements a web component for drawing a network graph of
* interconnected entities and relationships.
*
* This component visualizes a graph of the relationships and entities selected
* by the user.
* It is possible to click on an entity of relationship to 'select' it - meaning
* that the selected entity or relationship becomes the focus for the next
* operation and may be highlighted or displayed by other components. In the
* NetworkDiagram, the focus instance is highlighted.
*
* This component uses the D3 force layout by <NAME>
*
* A node has:
* id - the guid of the entity this node represents
* gen - which gen it is from
* x - horiz coord - starts at center of diagram
* y - vert coord - starts at center of diagram
* label - derived from entity properties, type, etc.
* metadataCollectionName - the name of the metadataCollection this instance originated from
* nodeId() is an accessor function that retrieves id - needed for links
*
* A link has
* id - guid of the relationship this link represents
* source - end1 node
* target - end2 node
* gen - which gen it is from
* metadataCollectionName - the name of the metadataCollection this instance originated from
*/
class NetworkDiagram extends PolymerElement {
static get template() {
return html`
<style include="rex-styles">
.node {
stroke: var(--egeria-primary-color);
fill: #FFF;
stroke-width: 2px;
radius: 10px;
}
.link {
stroke: var(--egeria-primary-color);
stroke-width: 2px;
}
</style>
<body>
<div>
<paper-radio-group id="mode-selection-group" selected="{{selectedMode}}">
<paper-radio-button name="Temporal" selected>Time-based</paper-radio-button>
<paper-radio-button name="Proximal" >Proximity-based</paper-radio-button>
</paper-radio-group>
<paper-button
class="inline-element"
style="padding:10px; text-transform:none;"
id = "saveButton"
raised
on-click="saveImage">
Save Image
</paper-button>
</div>
<!--<h2>SVG dataurl:</h2>-->
<div id="svgdataurl" style="display:none"></div>
<!--<h2>SVG converted to PNG dataurl via HTML5 CANVAS:</h2>-->
<div id="pngdataurl" style="display:none"></div>
<canvas id="canvas" width="1200" height="1200" style="display:none"></canvas>
<div id="ins" style="position:relative; overflow: auto; background-color:#FFFFFF; padding:0px;">
<p>
Placeholder for network diagram...
</p>
</div>
</body>
`;
}
static get properties() {
return {
instanceRetriever: Object,
/*
* selectedMode allows the user to relax the diagram or enforce temporal ordering.
* It is set by the radio buttons at the top of the diagram.
*/
selectedMode: {
type : String,
value : "Temporal", // possible values: "Temporal" (default) and "Proximal"
observer : 'selectedModeChanged' // Observer called when this property changes
},
width : {
type : Number,
value : 1200
},
height : {
type : Number,
value : 1200
},
node_radius : {
type : Number,
value : 15
},
link_distance : {
type : Number,
value : 200
},
/* Nodes represent entities. Each node has:
* - id - set to the entityGUID because it is unique and used by relationships
* - x,y positions
* - label - derived from entity properties
*/
nodeArray : {
type : Array,
value : []
},
/*
* Links represent relationships. The source and target are the nodes representing the
* entities connected by the relationship. Source is always 'entityOne', target is
* always 'entityTwo'
*/
linkArray : {
type : Array,
value : []
},
/*
* Map of entityGuid -> node for all known nodes.
* This is needed to be able to efficiently reference a node by GUID, e.g. because
* it is referenced by a relationship.
*/
allNodes : {
type: Map,
value : {}
},
/*
* Property for tracking number of gens as graph is extended and reduced.
* This property is used for layout calculations - for the inter-gen
* spacing for example
*/
numberOfGens : {
type : Number,
value : 0
},
/*
* Anchor for the D3 force simulation
*/
sim : {
type : Object,
value : null
},
/*
* Anchor for top level SVG element
*/
svg : {
type : Object,
value : []
},
/*
* Properties for selected nodes and links
*/
node : {
type : Object,
value : []
},
link : {
type : Object,
value : []
},
/*
* Properties for handling color themes. These are defined in CSS styles
* but because nodes and links are dynamically re-colored (on selection for
* example) these are needed as string variables.
*/
egeria_primary_color_string : {
type : String,
value : ""
},
/*
* For text labels (which are rendered against white background) the default
* primary color (aqua) may be too light. Use a darker shade of the primary
* color for labels. This affects the focus entity or relationship label.
* If primary is aqua a shade similar to "#50aaba" works well.
* The color is initialised to empty here and set up as a relative shade
* when cpt ready.
*/
egeria_text_color_string : {
type : String,
value : ""
},
/*
* As different home repositories are discovered, assign each a 'color'.
* For accessibility these are generally not 'colors' but actually a shades of gray.
*/
repositoryToColor : {
type : Map,
value : {}
},
colorToRepository : {
type : Map,
value : {}
},
possibleColors : {
type : Array,
value : []
},
};
}
/*
* Element is ready
*/
ready() {
// Call super.ready() to initialise node hash...
super.ready();
/* Here's some starter data:
if (false) {
this.nodeArray = [ {id:1, x:100, y:100, label:"alice", gen:1},
{id:2, x:200, y:100, label:"bob", gen:1},
{id:3, x:200, y:200, label:"charlie", gen:1}
];
// Here's some starter data:
this.linkArray = [ { id:1, source: 1, target: 2, idx: 0, label:"sibling", gen:1 },
{ id:2, source: 1, target: 3, idx: 0, label:"manages", gen:1 },
{ id:3, source: 1, target: 3, idx: 1, label:"knows", gen:1 },
{ id:4, source: 2, target: 2, idx: 0, label:"knows", gen:1 },
{ id:5, source: 2, target: 2, idx: 1, label:"feeds", gen:1 },
{ id:6, source: 2, target: 1, idx: 0, label:"likes", gen:1 }
];
this.allNodes = {};
this.nodeArray.forEach(node => {
this.allNodes[node.id] = node;
});
}
// */
this.repositoryToColor = {};
this.colorToRepository = {};
this.possibleColors = ['#EEE','#CCC','#AAA','#888','#666','#444','#222',
'#0EE','#0CC','#0AA','#088','#066','#044','#022' ];
/*
* To support dynamic theming of colors we need to detect what the primary color has been
* set to - this is done via a CSS variable. For most purposes the CSS variable is
* sufficient - but the network-diagram will dynamically color switch as elements are
* selected - so we need the primary color accessible at runtime. We also need to
* set up a slightly dark shade of the primary color for text labels against white
* background.
*/
const styles = window.getComputedStyle(this);
this.egeria_primary_color_string = styles.getPropertyValue('--egeria-primary-color');
var splitPrimary = this.egeria_primary_color_string.split(' ');
var strippedPrimary = splitPrimary[splitPrimary.length-1];
this.egeria_text_color_string = this.alterShade(strippedPrimary, -20);
/*
* Finally, render the diagram...
*/
this.render();
}
setInstanceRetriever(instanceRetriever) {
this.instanceRetriever = instanceRetriever;
}
selectedModeChanged(newValue,oldValue) {
if (this.sim === null) {
// Can be called during initialisation prior to render and the sim being created. In this case ignore.
return;
}
if (this.selectedMode === "Temporal") {
var yPlacement = this.yPlacement.bind(this);
this.sim.force('vert', d3.forceY().strength(0.1).y(function(d) {return yPlacement(d);}));
}
else {
//this.sim.force('vert', null); -- this allows the graph to sink down the page (or up)
this.sim.force('vert', d3.forceY(this.height/2).strength(0.001))
}
this.sim.alpha(0.1);
this.sim.restart();
}
// Input events
/*
* Handle the input event indicating that the focus is now the entity specified.
*/
inEvtFocusEntityChanged(entityGUID) {
/* The focus is now the entity with the given GUID.
* Highlighting will be handled asynchronously but it may be possible
* to upgrade the label. This situation occurs if the entity was
* originally loaded as a result of discovering a proxy attached to
* a relationship, then its label will have been set using the
* restricted set of properties available on the proxy - i.e. just
* the unique properties. If this is the case then, now that we have the
* full entity detail (it is the focus entity) we can update the label to
* the more preferred label derived from the full entity detail.
*/
var expEntity = this.instanceRetriever.getFocusEntity();
if (expEntity !== null) {
var entityDigest = expEntity.entityDigest;
var label = entityDigest.label;
/*
* Locate the node
*/
var nodeToUpdate = this.allNodes[entityGUID];
var idx = this.nodeArray.indexOf(nodeToUpdate);
if (idx !== -1) {
/*
* Update the node in place in the nodeArray array and allNodes map
*/
this.nodeArray[idx].label = label;
this.allNodes[entityGUID].label = label;
/*
* Finally, update the diagram - this will update the nodes and links
* You could leave this to tick() but that would make tick() less efficient.
*/
this.update_diagram();
}
}
}
/*
* Handle the input event indicating that the focus is now the relationship specified.
*/
inEvtFocusRelationshipChanged(relationshipGUID) {
// The focus is now the relationship given.
// There is nothing to do here - highlighting will be handled asynchronously and the
// display of details is not the responsibility of this diagram component.
}
/*
* Handle the input event indicating that the graph has additional objects.
*/
inEvtGraphExtended() {
// The graph has been added to. We could convey information on the change in the event
// but for the time being assuming that diagram will ask i-r for the latest gen and
// redraw.
var genInfo = this.instanceRetriever.getLatestGen();
this.numberOfGens = genInfo.numberOfGens;
var rexTraversal = genInfo.currentGen;
// Extract the entity and relationship information from the traversal's digests and add them to the
// existing arrays of nodes and links...
// e.g. {id:1, x:100, y:100, label:"alice", gen:1}
var entityDigests = rexTraversal.entities;
if (entityDigests != null) {
for (var e in entityDigests) {
var entityDigest = entityDigests[e];
var newNode = {};
newNode.id = entityDigest.entityGUID;
newNode.label = entityDigest.label;
newNode.gen = entityDigest.gen;
newNode.metadataCollectionName = entityDigest.metadataCollectionName;
// Add nodes in the current vertical displacement according to gen
newNode.x = this.width/2;
newNode.y = this.yPlacement(newNode);
this.nodeArray.push(newNode);
this.allNodes[newNode.id] = newNode;
}
}
var relationshipDigests = rexTraversal.relationships;
if (relationshipDigests != null) {
for (var r in relationshipDigests) {
var relationshipDigest = relationshipDigests[r];
var newLink = {};
newLink.id = relationshipDigest.relationshipGUID;
newLink.label = relationshipDigest.label;
newLink.source = this.allNodes[relationshipDigest.end1GUID];
newLink.target = this.allNodes[relationshipDigest.end2GUID];
// Attempt to bias nodes so that sources are to the left, targets to the right...
if (newLink.source.gen === relationshipDigest.gen) {
newLink.source.x = newLink.source.x - this.width/4;
}
if (newLink.target.gen === relationshipDigest.gen) {
newLink.target.x = newLink.target.x + this.width/4;
}
// Look through existing links (linkArray) to find multi-edges and set idx accordingly
var count = 0;
for (var l in this.linkArray) {
var link = this.linkArray[l];
if (link.source === newLink.source && link.target === newLink.target) {
count = count+1;
}
}
newLink.idx = count;
newLink.gen = relationshipDigest.gen;
newLink.metadataCollectionName = relationshipDigest.metadataCollectionName;
// Once the force layout has started we need to specify nodes (not array indexes or ids)
this.linkArray.push(newLink);
}
}
this.update_diagram();
}
/*
* This is a fairly subtle approach - it clears only the objects that have been removed - this should
* provide more seamless update of the diagram as existing SVG nodes and locations are retained.
*/
inEvtGraphBeingReduced() {
// The graph has been modified by removing something.
// Identify just what has been removed and extract it from the visual graph data.
//
var genInfo = this.instanceRetriever.getLatestGen();
this.numberOfGens = genInfo.numberOfGens;
var rexTraversal = genInfo.currentGen;
// Process the traversal - this time removing the entities and relationships from the graph.
var entityDigests = rexTraversal.entities;
if (entityDigests != null) {
for (var e in entityDigests) {
var nodeToRemove = this.allNodes[e];
var idx = this.nodeArray.indexOf(nodeToRemove);
if (idx !== -1) {
// Remove node from nodeArray array
this.nodeArray.splice(idx, 1);
}
// Remove node from allNodes map
this.allNodes[e] = undefined;
}
}
var relationshipDigests = rexTraversal.relationships;
if (relationshipDigests != null) {
for (var r in relationshipDigests) {
var linkFound = false;
for (var idx=0; idx<this.linkArray.length; idx++) {
if (this.linkArray[idx].id === r) {
// Note the idx so the link can be removed
linkFound = true;
break;
}
}
if (linkFound) {
this.linkArray.splice(idx,1);
}
}
}
this.update_diagram();
this.outEvtGraphReduced();
}
clearGraph() {
this.nodeArray = [];
this.linkArray = [];
this.allNodes = {};
this.repositoryToColor = {};
this.colorToRepository = {};
this.update_diagram();
}
// Output events
outEvtChangeFocusEntity(entityGUID) {
var customEvent = new CustomEvent('change-focus-entity', { bubbles: true, composed: true,
detail: {entityGUID: entityGUID, source: "network-diagram"} });
this.dispatchEvent(customEvent);
}
outEvtChangeFocusRelationship(relationshipGUID) {
var customEvent = new CustomEvent('change-focus-relationship', { bubbles: true, composed: true,
detail: {relationshipGUID: relationshipGUID, source: "network-diagram"} });
this.dispatchEvent(customEvent);
}
outEvtGraphReduced() {
var customEvent = new CustomEvent('graph-reduced', { bubbles: true, composed: true,
detail: {source: "network-diagram"} });
this.dispatchEvent(customEvent);
}
/*
* This method initialises the diagram, creates the graph data and renders the graph
*/
render() {
// Clear any startup text or previous diagram content
this.clearNetworkDiagram();
this.initialize_diagram();
this.update_diagram();
}
/*
* This method clears any introductory text or previous rendering of the diagram
*/
clearNetworkDiagram() {
var myDiv = this.shadowRoot.querySelector('#ins');
// Clear SVG objects from div
d3.select(this.shadowRoot.querySelector('#ins')).selectAll("svg").remove();
// Clear the introductory text...
this.$.ins.innerHTML = "";
}
initialize_diagram() {
var width = this.width;
var height = this.height;
this.nodeArray = [];
this.linkArray = [];
this.allNodes = {};
this.repositoryToColor = {};
this.colorToRepository = {};
this.svg = d3.select(this.shadowRoot.querySelector('#ins'))
.append("svg")
.attr('width', width)
.attr('height', height)
;
// For placement of nodes vertically within diagram use the yPlacement function.
var yPlacement = this.yPlacement.bind(this);
var ls = this.ls.bind(this);
var egeria_primary_color_string = this.egeria_primary_color_string;
this.sim = d3.forceSimulation(this.nodeArray)
.force('horiz', d3.forceX(width/2).strength(0.01))
.force('vert', d3.forceY().strength(0.1).y(function(d) {return yPlacement(d);}))
.velocityDecay(0.6)
.force('repulsion', d3.forceManyBody().strength(-500))
.alphaDecay(.0005)
.velocityDecay(0.6)
.on('tick',this.tick.bind(this))
.force('link', d3.forceLink().links(this.linkArray)
.id(this.nodeId)
.distance(this.link_distance)
.strength(function(d) { return ls(d);})) ;
// Define arrowhead for links
this.svg.append("svg:defs").selectAll("marker")
.data(["end"])
.enter().append("svg:marker")
.attr("id", String)
.attr("viewBox", "0 -5 10 10")
.attr("refX", 25) // The marker is 10 units long (in x dir) and nodes have radius 15.
.attr("refY", 0)
.attr("markerWidth", 4)
.attr("markerHeight", 4)
.style("stroke", egeria_primary_color_string)
.style("fill", egeria_primary_color_string)
.attr("orient", "auto")
.append("svg:path")
.attr("d", "M0,-5L10,0L0,5") ;
}
/*
* Experiment with link-specific link-strength (ls)
*/
ls(d) {
var gen_s = d.source.gen;
var gen_t = d.target.gen;
var gen_diff = gen_t - gen_s;
var mag_diff = Math.max(1,Math.abs(gen_diff));
return 1.0 / mag_diff;
}
yPlacement(d) {
var y = 0;
var perGen = 0;
var ymin = this.height / 6;
var ymax = 5 * (this.height / 6);
/*
* Displayable area is limited to between ymin and ymax (default to one-sixth and five sixths of height).
* diagram knows the number of gens - it retrieves it with the gen info whenever a graph-changed event is handled.
* Starting gen is indexed as 1.
* For gens up to and including 5 use an additional sixth of the height; after that shrink the perGen gaps.
* Vertical placement of a node in gen g is therefore:
* If numGens <= 5, perGen = (ymax - ymin) / 4;
* If numGens > 5. perGen = (ymax - ymin) / (numGens - 1) // because 1 is the starting gen
* In either case y = ymin + (g -1) * perGen
*/
if (this.numberOfGens <= 5) {
perGen = (ymax - ymin) / 4;
}
else {
perGen = (ymax - ymin) / (this.numberOfGens - 1);
}
y = ymin + (d.gen-1) * perGen;
return y;
}
update_diagram() {
/*
* In order to get the nodes always in front of the links, the nodes need to be re-added to the svg
* So whenever links changes we need to re-generate all the nodes. Or you do select and re-gen of all
* nodes in the tick function. The former is probably more efficient.
*/
/*
* Refresh the sim's data
* Refresh the odes first - this will cause the positions to refresh. Then refresh the links
*/
this.sim.nodes(this.nodeArray);
this.sim.force('link').links(this.linkArray);
this.updateLinks();
this.updateNodes();
this.sim.alpha(0.1);
this.sim.restart();
}
/*
* Generic accessor function for nodes
*/
nodeId(d) {
return d.id;
}
/*
* Function to retrieve a specified node
*/
getNode(id) {
return ( this.nodeArray.filter(obj => { return obj.id === id })[0] );
}
/*
* This function saves the current diagram as a PNG image file.
*/
saveImage() {
var shadowRoot = this.shadowRoot;
var canvas = this.$.canvas;
var context = canvas.getContext("2d");
var image = new Image;
var html = this.svg
.attr("version", 1.1)
.attr("xmlns", "http://www.w3.org/2000/svg")
.node().parentNode.innerHTML;
var imgsrc = 'data:image/svg+xml;base64,'+ btoa(html);
/*
* It is possible to render the SVG data but this is only retained for interest..
* var img = '<img src="'+imgsrc+'">';
* var svgDataForImage = d3.select(this.shadowRoot.querySelector('#svgdataurl')).html(img);
* svgDataForImage.html(img);
*
* Instead, this function renders a (hidden) canvas and uses that to generate a PNG. If desired
* the canvas could be revealed - e.g. for user annotation - at this point by setting display to block
* i.e. canvas.style.display="block";
* However, the canvas will display inline and displace other elements - so be careful where you
* locate it, e.g. at the end of the network-diagram's DOM.
*/
image.src = imgsrc;
image.onload = function() {
context.drawImage(image, 0, 0);
var canvasData = canvas.toDataURL("image/png");
var pngImage = '<img src="'+canvasData+'">';
d3.select(shadowRoot.querySelector('#pngdataurl')).html(pngImage);
var a = document.createElement("a");
a.download = "EgeriaRepositoryExplorerImageCapture.png";
a.href = canvasData;
a.click();
};
}
/*
* This function performs the continuous update of the diagram to cope with
* updates to the content or layout and drag and drop operations
*/
tick() {
// TODO investigate what can be refined to maximise performance..
// Try to prevent nodes from drifting off the edge of the diagram area - this will not guarantee that labels
// stay on board but it will be close.
// In the cx, cy attribute calculation it allows double node_margin on the right hand side so accommodate a label
// length roughly equal to 3 times the node_radius, to try to keep labels in view.
var node_margin = 2 * this.node_radius; // Allow a safety margin so that edges are less likely to stray
var width = this.width;
var height = this.height;
// Keep nodes in the viewbox, with a safety margin so that (curved) links are unlikely to stray...
this.node.attr('cx',function(d) { return d.x = Math.max(node_margin, Math.min(width - 4 * node_margin, d.x)); });
this.node.attr('cy',function(d) { return d.y = Math.max(node_margin, Math.min(height - node_margin, d.y)); });
this.node.attr('transform', function(d) { return "translate(" + d.x + "," + d.y + ")";});
// Highlight a selected node, if it is the instance that has been selected or just loaded (in which case it is selected)
this.node.selectAll('circle')
.attr("fill", d => this.nodeColor(d) );
this.node.selectAll('text')
.attr("fill", d => this.highlighted(d) ? this.egeria_text_color_string : "#444" );
// For curved paths use the following...
var path_func = this.path_func.bind(this);
this.link.selectAll('path')
.attr('d', function(d) { return path_func(d).path; })
.lower();
this.link.selectAll('text')
.attr("x", function(d) { return d.x = path_func(d).midpoint.x; } )
.attr("y", function(d) { return d.y = path_func(d).midpoint.y; } )
.attr("fill", d => this.highlighted(d) ? this.egeria_text_color_string : "#888" )
.attr("dominant-baseline", function(d) { return (d.source.x > d.target.x) ? "baseline" : "hanging"; } )
.attr("transform" , d => `rotate(${180/Math.PI * Math.atan((d.target.y-d.source.y)/(d.target.x-d.source.x))}, ${d.x}, ${d.y})`)
.attr("dx", d => { ((d.target.y-d.source.y)<0)? 100.0 : -100.0; })
.attr("dy", d => {
((d.target.x-d.source.x)>0)?
20.0 * (d.target.x-d.source.x)/(d.target.y-d.source.y) :
20.0 * (d.source.x-d.target.x)/(d.target.y-d.source.y) ;
});
// For straight lines use the following...
// selectAll('line') and then set...
// .attr('x1', function(d) { return d.source.x; })
// .attr('y1', function(d) { return d.source.y; })
// .attr('x2', function(d) { return d.target.x; })
// .attr('y2', function(d) { return d.target.y; });
// You will need to set the lineLabel x and y attrs to the mid point (s.x+t.x)/2 etc...
}
updateNodes() {
var node_radius = this.node_radius;
var width = this.width;
var height = this.height;
var svg = this.svg;
// update the visual rendering of the nodes
// Keep nodes 'on top' of links
// Re-drawing nodes allows us to change visual attributes like colour too
svg.selectAll(".node").remove()
this.node = svg.selectAll(".node")
.data(this.nodeArray)
this.node.exit().remove();
var dragstarted = this.dragstarted.bind(this);
var dragged = this.dragged.bind(this);
var dragended = this.dragended.bind(this);
var enter_set = this.node.enter()
.append("g")
.attr('class', 'node')
.attr("cursor", "pointer")
.call(d3.drag()
.container(this)
.on("start", dragstarted)
.on("drag", dragged)
.on("end", dragended) );
enter_set.append('circle')
.attr('r',node_radius)
.attr('stroke',this.egeria_primary_color_string)
.attr('stroke-width','2px')
.attr('fill','white')
.on("click", d => { this.nodeClicked(d.id); }) // The node's id is the entityGUID
;
enter_set.append('text')
.attr("fill","#444")
.text( function(d) { return d.label; } )
.attr("font-family","sans-serif")
.attr("font-size","12px")
.attr("stroke-width","0")
.attr("dx",20)
.attr("dy",".35em")
.on("click", d => { this.nodeClicked(d.id);}) // The node's id is the entityGUID
;
// Check all labels are up to date -- this does not yet include the enter_set as they have only just been added
// and are known to have correct labels
this.node.select('text')
.text( function(d) { return d.label; } ) ;
this.node = this.node.merge(enter_set);
}
dragstarted(d) {
if (!d3.event.active)
this.sim.alphaTarget(0.3).restart(); // this provides smooth drag behaviour
d.fx = d.x, d.fy = d.y;
}
dragged(d) {
d.fx = d3.event.x, d.fy = d3.event.y;
}
dragended(d) {
if (!d3.event.active) {
this.sim.alphaTarget(0);
}
d.fx = null, d.fy = null;
}
// update the visual rendering of the links
updateLinks() {
var svg = this.svg;
var path_func = this.path_func.bind(this);
this.link = svg.selectAll(".link")
.data(this.linkArray)
.attr('x1', function(d) { return d.source.x; })
.attr('y1', function(d) { return d.source.y; })
.attr('x2', function(d) { return d.target.x; })
.attr('y2', function(d) { return d.target.y; });
this.link.exit().remove();
var enter_set = this.link.enter()
.append("g")
.attr('class', 'link')
.attr("cursor", "pointer") ;
enter_set.append('text')
.attr('class', 'edgeLabel')
.attr("fill","#CCC")
.attr("stroke", "none")
.attr("font-family","sans-serif")
.attr("font-size","10px")
.attr("stroke-width", 0)
//.attr("alignment-baseline","middle")
.attr("dominant-baseline", function(d) { return (d.source.x > d.target.x) ? "baseline" : "hanging"; } )
.attr("x", function(d) { return path_func(d).midpoint.x; } )
.attr("y", function(d) { return path_func(d).midpoint.y; } )
// For straight paths you would want the following:
// .attr("x", d => (d.source.x+d.target.x)/2)
// .attr("y", d => (d.source.y+d.target.y)/2)
.attr('text-anchor', 'middle')
.text( function(d) { return d.label; } )
.on("click", d => { this.edgeClicked(d.id); }) // The edge's id is the relationshipGUID
.clone(true)
.lower()
.attr("stroke-linejoin", "round")
.attr("stroke-width", 3)
.attr("stroke", "white") ;
// For straight lines use the following...
// enter_set.append('line')
// .attr('class', 'link')
// .attr('x1', function(d) { return d.source.x; })
// .attr('y1', function(d) { return d.source.y; })
// .attr('x2', function(d) { return d.target.x; })
// .attr('y2', function(d) { return d.target.y; })
// .merge(u);
// For curved paths use the following...
enter_set.append('path')
.attr('class', 'line')
.attr("cursor", "pointer")
.attr('d', function(d) { return path_func(d).path; }) // d => { path_func(d); }) does not work
.attr("fill", "none")
.attr('stroke',this.egeria_primary_color_string)
.attr('stroke-width','2px')
// Only place a marker if the link is not reflexive
.attr("marker-end", function(d) { return (d.source===d.target)?"none":"url(#end)";})
.on("click", d => { this.edgeClicked(d.id); }) // The edge's id is the relationshipGUID
.lower()
;
this.link = this.link.merge(enter_set);
}
/*
* This function path_func computes the path for either a reflexive or non-reflexive link.
* It also calculates the mid-point which can be used as an anchor point for the edge label.
* Returns a map containing:
* {
* path : <path-as-a-string> ,
* midpoint : <midpoint-as-map{x:<x>,y:<y>}
* }
*
*/
path_func(d) {
var returnMap = {};
var midpoint = {};
var path = d3.path();
if ( d.source.id == d.target.id ) {
/*
* Reflexive link
*/
var cp_offset = 0.15 + (d.idx * 0.1); /* Indexing is for separation of multi-links */
var base_rad = this.link_distance; /* Sets base_rad for innermost link = link_distance for
* non-reflexive links; this is subjective but results
* in sensible radii for reflexive links
*/
var link_rad = base_rad * cp_offset;
path.moveTo(d.source.x,d.source.y);
path.arc(d.source.x+link_rad,d.source.y,link_rad,Math.PI,0.999*Math.PI);
midpoint.x = d.source.x+1.7*link_rad; /* Place the label away from the node and its label... */
midpoint.y = d.source.y-0.7*link_rad;;
}
else {
/*
* Non-reflexive link
*/
var dx = d.target.x - d.source.x;
var dy = d.target.y - d.source.y;
var mid = {};
mid.x = d.source.x + dx/2.0;
mid.y = d.source.y + dy/2.0;
var denom = dy > 0 ? Math.max(0.001,dy) : Math.min(-0.001,dy);
var gNormal = 1.0 * (dx) / denom;
var unit = {};
unit.x = Math.sign(dy) * Math.sqrt( 1.0 / (1.0 + gNormal**2) );
unit.y = -1.0 * gNormal * unit.x;
var mag = this.link_distance ;
var cp_offset = 0.2 * (d.idx+1); /* Indexing is for separation of multi-links */
var cp = {};
cp.x = mid.x + cp_offset * mag * unit.x;
cp.y = mid.y + cp_offset * mag * unit.y;
path.moveTo(d.source.x,d.source.y);
path.quadraticCurveTo(cp.x,cp.y,d.target.x,d.target.y);
midpoint.x = mid.x + 0.5 * cp_offset * mag * unit.x;
midpoint.y = mid.y + 0.5 * cp_offset * mag * unit.y;
}
returnMap.path = path.toString();
returnMap.midpoint = midpoint;
return returnMap;
}
/*
* Issue request to change focus when node clicked
*/
nodeClicked(guid) {
// Request a focus change...
this.outEvtChangeFocusEntity(guid);
}
/*
* Issue request to change focus when edge clicked
*/
edgeClicked(guid) {
// Request a focus change...
this.outEvtChangeFocusRelationship(guid);
}
/*
* This function is called to determine whether the instance is the currently selected item (and therefore
* should be visually highlighted).
*/
highlighted(d) {
var focusGUID = this.instanceRetriever.getFocusGUID();
if (focusGUID !== undefined) {
if (d.id === focusGUID)
return true;
}
return false;
}
/*
* This function is called to determine whether the color of a node - it may be selected or not, and the user may
* have opted to show nodes from different repositories in different colors.
* The selected node always appears in egeria-primary-color.
*/
nodeColor(d) {
/*
* If instance is the current focus, paint it in egeria primary color
*/
if (this.highlighted(d)) {
return this.egeria_primary_color_string;
}
else {
/*
* Look up repository name in repositoryColor map, if not found assign next color.
* This actually assigns gray-shades, starting with the #EEE and darkening by two
* stops for each new repository found - e.g. #AAA -> #888. There are therefore 8 shades
* that can be allocated, by which time we are at 100% black. If this number proves to
* be insufficient, we can shorten the two-stops or assign a single hue, e.g. green.
*/
var colorString = this.repositoryToColor[d.metadataCollectionName];
if (colorString !== undefined) {
return colorString;
}
else {
// Assign first available color
var assigned = false;
for (var col in this.possibleColors) {
var colorString = this.possibleColors[col];
if (this.colorToRepository[colorString] === undefined) {
// Color is available
this.repositoryToColor[d.metadataCollectionName] = colorString;
this.colorToRepository[colorString] = d.metadataCollectionName;
return colorString;
}
}
if (!assigned) {
/*
* Ran out of available colors for repositories!
*
* Assign a color that we know is not in the possible colors to this
* repo and any further ones we discover. Remember this for consistency
* - i.e. this repository will use this color for the remainder of this
* exploration. There may be multiple repositories sharing this same color
* so do not update the colorToRepository map. If a color frees up it will
* be allocated to a new repository, but not to repositories remembered below.
*/
var col = '#000';
this.repositoryToColor[d.metadataCollectionName] = col;
return col;
}
}
}
}
alterShade(color, percent) {
var R = parseInt(color.substring(1,3),16);
var G = parseInt(color.substring(3,5),16);
var B = parseInt(color.substring(5,7),16);
R = parseInt(R * (100 + percent) / 100);
G = parseInt(G * (100 + percent) / 100);
B = parseInt(B * (100 + percent) / 100);
R = (R<255)?R:255;
G = (G<255)?G:255;
B = (B<255)?B:255;
var RR = ((R.toString(16).length==1)?"0"+R.toString(16):R.toString(16));
var GG = ((G.toString(16).length==1)?"0"+G.toString(16):G.toString(16));
var BB = ((B.toString(16).length==1)?"0"+B.toString(16):B.toString(16));
return "#"+RR+GG+BB;
}
}
window.customElements.define('network-diagram', NetworkDiagram); |
#!/bin/sh
sleep 30
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
#make OPENMP_ON=Y MODEL_SIZE=0 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=10 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=1 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=0 CALC_TTR=0 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=1 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=2 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=4 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=8 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=1 TAU_MAX=16 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=10 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=15 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=0 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=15 KEEP_TD=0 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=15 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=0 FILE_DUMP=1 time
make OPENMP_ON=Y MODEL_SIZE=2 TAU_MAX=15 KEEP_TD=1 CALC_TTR=1 USE_TEMP_FILE=1 FILE_DUMP=1 time
|
<reponame>ericraj/nextjs-auth0<filename>src/instance.ts
import { GetSession, GetAccessToken } from './session';
import { WithApiAuthRequired, WithPageAuthRequired } from './helpers';
import { HandleAuth, HandleCallback, HandleLogin, HandleLogout, HandleProfile } from './handlers';
import { ConfigParameters, CookieStore, SessionCache } from './auth0-session';
/**
* The SDK server instance.
*
* This is created for you when you use the named exports, or you can create your own using {@link InitAuth0}
*
* See {@link Config} fro more info.
*
* @category Server
*/
export interface SignInWithAuth0 {
/**
* Session getter
*/
getSession: GetSession;
/**
* Access Token getter
*/
getAccessToken: GetAccessToken;
/**
* Login handler which will redirect the user to Auth0.
*/
handleLogin: HandleLogin;
/**
* Callback handler which will complete the transaction and create a local session.
*/
handleCallback: HandleCallback;
/**
* Logout handler which will clear the local session and the Auth0 session.
*/
handleLogout: HandleLogout;
/**
* Profile handler which return profile information about the user.
*/
handleProfile: HandleProfile;
/**
* Helper that adds auth to an API Route
*/
withApiAuthRequired: WithApiAuthRequired;
/**
* Helper that adds auth to a Page Route
*/
withPageAuthRequired: WithPageAuthRequired;
/**
* Create the main handlers for your api routes
*/
handleAuth: HandleAuth;
cookieStore: CookieStore | null;
sessionCache: SessionCache | null;
}
/**
* Initialise your own instance of the SDK.
*
* See {@link Config}
*
* @category Server
*/
export type InitAuth0 = (params?: ConfigParameters) => SignInWithAuth0;
|
const mongoose = require('mongoose');
const password = "<PASSWORD>";
const dbname = "crud";
const connectDB = async () => {
try{
// mongodb connection string
const con = await mongoose.connect(`mongodb+srv://MubashirAhmed:${<EMAIL>/${dbname}?retryWrites=true&w=majority`,
{
useNewUrlParser: true,
useUnifiedTopology: true,
useFindAndModify: false
})
console.log(`MongoDB connected : ${con.connection.host}`);
}catch(err){
console.log(`mongo error: ${err}`);
process.exit(1);
}
}
module.exports = connectDB |
<filename>src/org/usfirst/frc/team2706/robot/commands/SpinCubeInIntake.java<gh_stars>1-10
package org.usfirst.frc.team2706.robot.commands;
import org.usfirst.frc.team2706.robot.Robot;
import edu.wpi.first.wpilibj.command.Command;
/**
* Spins the cube in the intake
*/
public class SpinCubeInIntake extends Command {
@Override
protected boolean isFinished() {
return false;
}
@Override
protected void initialize() {
// Ensure that any intake ratios are ignored and the intakes pins at full speed
Robot.intake.leftCube(2.0);
Robot.intake.rightCube(-2.0);
}
@Override
protected void end() {
Robot.intake.stopMotors();
}
}
|
require 'bundler/setup'
require 'sinatra'
require File.join(File.dirname(__FILE__), 'environment')
configure(:development) do
require 'sinatra/reloader'
require 'pry'
end
helpers do
# add your helpers here
end
# index page
get '/' do
erb :index
end
|
module.exports = require('./html')
module.exports.poly = require('./poly') |
<gh_stars>10-100
package io.lindstrom.mpd.data;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import java.util.Objects;
@JsonPropertyOrder({
"title",
"source",
"copyright",
"any"
})
public class ProgramInformation {
@JacksonXmlProperty(localName = "Title", namespace = MPD.NAMESPACE)
private final String title;
@JacksonXmlProperty(localName = "Source", namespace = MPD.NAMESPACE)
private final String source;
@JacksonXmlProperty(localName = "Copyright", namespace = MPD.NAMESPACE)
private final String copyright;
@JacksonXmlProperty(isAttribute = true)
private final String lang;
@JacksonXmlProperty(isAttribute = true)
private final String moreInformationURL;
private ProgramInformation(String title, String source, String copyright, String lang, String moreInformationURL) {
this.title = title;
this.source = source;
this.copyright = copyright;
this.lang = lang;
this.moreInformationURL = moreInformationURL;
}
@SuppressWarnings("unused")
private ProgramInformation() {
this.title = null;
this.source = null;
this.copyright = null;
this.lang = null;
this.moreInformationURL = null;
}
public String getTitle() {
return title;
}
public String getSource() {
return source;
}
public String getCopyright() {
return copyright;
}
public String getLang() {
return lang;
}
public String getMoreInformationURL() {
return moreInformationURL;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ProgramInformation that = (ProgramInformation) o;
return Objects.equals(title, that.title) &&
Objects.equals(source, that.source) &&
Objects.equals(copyright, that.copyright) &&
Objects.equals(lang, that.lang) &&
Objects.equals(moreInformationURL, that.moreInformationURL);
}
@Override
public int hashCode() {
return Objects.hash(title, source, copyright, lang, moreInformationURL);
}
@Override
public String toString() {
return "ProgramInformation{" +
"title='" + title + '\'' +
", source='" + source + '\'' +
", copyright='" + copyright + '\'' +
", lang='" + lang + '\'' +
", moreInformationURL='" + moreInformationURL + '\'' +
'}';
}
public Builder buildUpon() {
return new Builder()
.withTitle(title)
.withSource(source)
.withCopyright(copyright)
.withLang(lang)
.withMoreInformationURL(moreInformationURL);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String title;
private String source;
private String copyright;
private String lang;
private String moreInformationURL;
public Builder withTitle(String title) {
this.title = title;
return this;
}
public Builder withSource(String source) {
this.source = source;
return this;
}
public Builder withCopyright(String copyright) {
this.copyright = copyright;
return this;
}
public Builder withLang(String lang) {
this.lang = lang;
return this;
}
public Builder withMoreInformationURL(String moreInformationURL) {
this.moreInformationURL = moreInformationURL;
return this;
}
public ProgramInformation build() {
return new ProgramInformation(title, source, copyright, lang, moreInformationURL);
}
}
}
|
def contains_duplicates(arr):
"""
Function that accepts an array of numbers and returns true if the array
contains any duplicates and false if it doesn't
"""
if len(arr) == len(set(arr)):
return False
else:
return True |
<filename>src/index.js<gh_stars>1-10
const crawl = require('./crawl');
const mark = require('./mark');
const sweep = require('./sweep');
const { calcSavings } = require('./util');
/**
* Recursively tidy up a directory.
*
* @param {string} path
* @param {number} probability
* @param {boolean} dry
*/
function tidyUp(path, probability = 0.05, dry = true) {
const files = crawl(path);
const discards = mark(files, probability);
const savings = calcSavings(discards);
const savePhrase = dry ? 'would have ' : '';
console.log(
`The following ${discards.length} files have failed to spark joy and must go`
);
console.log(discards.join('\r\n'));
console.log(`This action ${savePhrase}reclaimed ${savings} bytes over ${discards.length} files!`);
if (!dry) sweep(discards);
}
module.exports = tidyUp;
|
# build the base image
bash scripts/build-scripts/build-app-dockerfile.sh
# build the notebook on top of the base image
docker build -f dockerfiles/dockerfile-notebook -t sandbox-editor-notebook . |
/**
* Copyright 2014, Yahoo! Inc.
* Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
module.exports = function (context, payload, done) {
var todo = payload;
todo.pending = true;
context.dispatch('UPDATE_TODO_START', todo);
context.service.update('todo', todo, {}, function (err, theTodo) {
if (err) {
context.dispatch('UPDATE_TODO_FAILURE', todo);
done();
return;
}
context.dispatch('UPDATE_TODO_SUCCESS', theTodo);
done();
});
};
|
package cn.celess.blog.service;
import cn.celess.blog.BaseTest;
import cn.celess.blog.entity.PartnerSite;
import cn.celess.blog.entity.model.PageData;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.stream.Stream;
import static org.junit.Assert.*;
public class PartnerSiteServiceTest extends BaseTest {
@Autowired
PartnerSiteService partnerSiteService;
@Test
public void partnerSitePages() {
// 测试deleted 参数
PageData<PartnerSite> pageData = partnerSiteService.partnerSitePages(1, 10, true);
assertTrue(pageData.getList().stream().allMatch(PartnerSite::getDelete));
pageData = partnerSiteService.partnerSitePages(1, 10, false);
assertTrue(pageData.getList().stream().noneMatch(PartnerSite::getDelete));
pageData = partnerSiteService.partnerSitePages(1, 10, null);
List<PartnerSite> list = pageData.getList();
assertNotEquals(0, list.stream().filter(PartnerSite::getDelete).count());
assertNotEquals(0, list.stream().filter(partnerSite -> !partnerSite.getDelete()).count());
}
} |
# Import Libraries
from collections import Counter
import math
import string
#Define Question and Final Output
query = "How can I search a term in a database?"
final_output=[]
# Preprocessing
# Remove Punctuation
query=query.translate(str.maketrans('', '', string.punctuation))
# Tokenize
query_list=query.lower().split()
# Define Corpus
corpus=['How to search a term in a database?',
'Looking for a database search engine','How to find term quickly in database?',
'What are the most effective database search engine algorithms?']
# Frequency of words in corpus
w={}
for row in corpus:
for word in row.split():
if word not in w:
w[word]=1
else:
w[word]+=1
# Compute IDF
idf = {}
for word in w.keys():
doc_count = 0
for doc in corpus:
if word in doc.split():
doc_count += 1
idf[word] = math.log(len(corpus)/float(doc_count+1))
# Compute TF, IDF and Vector Space Model
for index,row in enumerate(corpus):
tf={}
words=row.split()
for word in words:
tf[word]=words.count(word)/float(len(words))
final_vector=[]
for word in query_list:
if word in tf:
final_vector.append(tf[word]*idf[word])
else:
final_vector.append(0)
final_output.append(final_vector)
# Calculate Cosine Similarity
cosine=[]
for vec in final_output:
cos=0
for i in range(len(vec)):
cos += vec[i]*final_vector[i]
cosine.append(cos)
# Identifying the highest relevance
cur_max=max(cosine)
max_index=cosine.index(cur_max)
# Retrieving the output
print('The best matching phrase for this query is: {}'.format(corpus[max_index]))
# Output: The best matching phrase for this query is: How to search a term in a database? |
#!/usr/bin/env bash
# Copyright 2019 Antrea Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eo pipefail
function echoerr {
>&2 echo "$@"
}
_usage="Usage: $0 [--mode (dev|release)] [--encap-mode] [--kind] [--ipsec] [--no-proxy] [--no-np] [--k8s-1.15] [--keep] [--tun (geneve|vxlan|gre|stt)] [--verbose-log] [--help|-h]
Generate a YAML manifest for Antrea using Kustomize and print it to stdout.
--mode (dev|release) Choose the configuration variant that you need (default is 'dev')
--encap-mode Traffic encapsulation mode. (default is 'encap')
--kind Generate a manifest appropriate for running Antrea in a Kind cluster
--cloud Generate a manifest appropriate for running Antrea in Public Cloud
--ipsec Generate a manifest with IPSec encryption of tunnel traffic enabled
--all-features Generate a manifest with all alpha features enabled
--no-proxy Generate a manifest with Antrea proxy disabled
--no-legacy-crd Generate a manifest without legacy CRD mirroring support enabled
--endpointslice Generate a manifest with EndpointSlice support enabled
--no-np Generate a manifest with Antrea-native policies disabled
--k8s-1.15 Generates a manifest which supports Kubernetes 1.15.
--keep Debug flag which will preserve the generated kustomization.yml
--tun (geneve|vxlan|gre|stt) Choose encap tunnel type from geneve, gre, stt and vxlan (default is geneve)
--verbose-log Generate a manifest with increased log-level (level 4) for Antrea agent and controller.
This option will work only with 'dev' mode.
--on-delete Generate a manifest with antrea-agent's update strategy set to OnDelete.
This option will work only for Kind clusters (when using '--kind').
--coverage Generates a manifest which supports measuring code coverage of Antrea binaries.
--simulator Generates a manifest with antrea-agent simulator included
--custom-adm-controller Generates a manifest with custom Antrea admission controller to validate/mutate resources.
--hw-offload Generates a manifest with hw-offload enabled in the antrea-ovs container.
--help, -h Print this message and exit
In 'release' mode, environment variables IMG_NAME and IMG_TAG must be set.
In 'dev' mode, environment variable IMG_NAME can be set to use a custom image.
This tool uses kustomize (https://github.com/kubernetes-sigs/kustomize) to generate manifests for
Antrea. You can set the KUSTOMIZE environment variable to the path of the kustomize binary you want
us to use. Otherwise we will download the appropriate version of the kustomize binary and use
it (this is the recommended approach since different versions of kustomize may create different
output YAMLs)."
function print_usage {
echoerr "$_usage"
}
function print_help {
echoerr "Try '$0 --help' for more information."
}
MODE="dev"
KIND=false
IPSEC=false
ALLFEATURES=false
PROXY=true
LEGACY_CRD=true
ENDPOINTSLICE=false
NP=true
KEEP=false
ENCAP_MODE=""
CLOUD=""
TUN_TYPE="geneve"
VERBOSE_LOG=false
ON_DELETE=false
COVERAGE=false
K8S_115=false
SIMULATOR=false
CUSTOM_ADM_CONTROLLER=false
HW_OFFLOAD=false
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--mode)
MODE="$2"
shift 2
;;
--encap-mode)
ENCAP_MODE="$2"
shift 2
;;
--cloud)
CLOUD="$2"
shift 2
;;
--kind)
KIND=true
shift
;;
--ipsec)
IPSEC=true
shift
;;
--all-features)
ALLFEATURES=true
shift
;;
--no-proxy)
PROXY=false
shift
;;
--no-legacy-crd)
LEGACY_CRD=false
shift
;;
--endpointslice)
PROXY=true
ENDPOINTSLICE=true
shift
;;
--no-np)
NP=false
shift
;;
--k8s-1.15)
K8S_115=true
shift
;;
--keep)
KEEP=true
shift
;;
--tun)
TUN_TYPE="$2"
shift 2
;;
--verbose-log)
VERBOSE_LOG=true
shift
;;
--on-delete)
ON_DELETE=true
shift
;;
--coverage)
COVERAGE=true
shift
;;
--simulator)
SIMULATOR=true
shift
;;
--custom-adm-controller)
CUSTOM_ADM_CONTROLLER=true
shift
;;
--hw-offload)
HW_OFFLOAD=true
shift
;;
-h|--help)
print_usage
exit 0
;;
*) # unknown option
echoerr "Unknown option $1"
exit 1
;;
esac
done
if [ "$PROXY" == false ] && [ "$ENDPOINTSLICE" == true ]; then
echoerr "--endpointslice requires AntreaProxy and therefore cannot be used with --no-proxy"
print_help
exit 1
fi
if [ "$MODE" != "dev" ] && [ "$MODE" != "release" ]; then
echoerr "--mode must be one of 'dev' or 'release'"
print_help
exit 1
fi
if [ "$TUN_TYPE" != "geneve" ] && [ "$TUN_TYPE" != "vxlan" ] && [ "$TUN_TYPE" != "gre" ] && [ "$TUN_TYPE" != "stt" ]; then
echoerr "--tun must be one of 'geneve', 'gre', 'stt' or 'vxlan'"
print_help
exit 1
fi
if [ "$MODE" == "release" ] && [ -z "$IMG_NAME" ]; then
echoerr "In 'release' mode, environment variable IMG_NAME must be set"
print_help
exit 1
fi
if [ "$MODE" == "release" ] && [ -z "$IMG_TAG" ]; then
echoerr "In 'release' mode, environment variable IMG_TAG must be set"
print_help
exit 1
fi
if [ "$MODE" == "release" ] && $VERBOSE_LOG; then
echoerr "--verbose-log works only with 'dev' mode"
print_help
exit 1
fi
if ! $KIND && $ON_DELETE; then
echoerr "--on-delete works only for Kind clusters"
print_help
exit 1
fi
if [[ "$ENCAP_MODE" != "" ]] && [[ "$ENCAP_MODE" != "encap" ]] && ! $PROXY; then
echoerr "Cannot use '--no-proxy' when '--encap-mode' is not 'encap'"
exit 1
fi
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source $THIS_DIR/verify-kustomize.sh
if [ -z "$KUSTOMIZE" ]; then
KUSTOMIZE="$(verify_kustomize)"
elif ! $KUSTOMIZE version > /dev/null 2>&1; then
echoerr "$KUSTOMIZE does not appear to be a valid kustomize binary"
print_help
exit 1
fi
KUSTOMIZATION_DIR=$THIS_DIR/../build/yamls
TMP_DIR=$(mktemp -d $KUSTOMIZATION_DIR/overlays.XXXXXXXX)
pushd $TMP_DIR > /dev/null
BASE=../../base
# do all ConfigMap edits
mkdir configMap && cd configMap
# user is not expected to make changes directly to antrea-agent.conf and antrea-controller.conf,
# but instead to the generated YAML manifest, so our regexs need not be too robust.
cp $KUSTOMIZATION_DIR/base/conf/antrea-agent.conf antrea-agent.conf
cp $KUSTOMIZATION_DIR/base/conf/antrea-controller.conf antrea-controller.conf
if $KIND; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*ovsDatapathType[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ovsDatapathType: netdev/" antrea-agent.conf
fi
if $IPSEC; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*enableIPSecTunnel[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/enableIPSecTunnel: true/" antrea-agent.conf
# change the tunnel type to GRE which works better with IPSec encryption than other types.
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*tunnelType[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/tunnelType: gre/" antrea-agent.conf
fi
if $ALLFEATURES; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*AntreaPolicy[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ AntreaPolicy: true/" antrea-agent.conf
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*FlowExporter[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ FlowExporter: true/" antrea-agent.conf
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*NetworkPolicyStats[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ NetworkPolicyStats: true/" antrea-agent.conf
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*EndpointSlice[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ EndpointSlice: true/" antrea-agent.conf
fi
if ! $PROXY; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*AntreaProxy[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ AntreaProxy: false/" antrea-agent.conf
fi
if ! $LEGACY_CRD; then
sed -i.bak -E "s/^#legacyCRDMirroring[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/legacyCRDMirroring: false/" antrea-controller.conf
fi
if $ENDPOINTSLICE; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*EndpointSlice[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ EndpointSlice: true/" antrea-agent.conf
fi
if ! $NP; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*AntreaPolicy[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ AntreaPolicy: false/" antrea-controller.conf
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*AntreaPolicy[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/ AntreaPolicy: false/" antrea-agent.conf
fi
if [[ $ENCAP_MODE != "" ]]; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*trafficEncapMode[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/trafficEncapMode: $ENCAP_MODE/" antrea-agent.conf
fi
if [[ $TUN_TYPE != "geneve" ]]; then
sed -i.bak -E "s/^[[:space:]]*#[[:space:]]*tunnelType[[:space:]]*:[[:space:]]*[a-z]+[[:space:]]*$/tunnelType: $TUN_TYPE/" antrea-agent.conf
fi
if [[ $CLOUD != "" ]]; then
# Delete the serviceCIDR parameter for the cloud (AKS, EKS, GKE) deployment yamls, because
# AntreaProxy is always enabled for the cloud managed K8s clusters, and the serviceCIDR
# parameter is not needed in this case.
# delete all blank lines after "#serviceCIDR:"
sed -i.bak '/#serviceCIDR:/,/^$/{/^$/d;}' antrea-agent.conf
# delete lines from "# ClusterIP CIDR range for Services" to "#serviceCIDR:"
sed -i.bak '/# ClusterIP CIDR range for Services/,/#serviceCIDR:/d' antrea-agent.conf
fi
# unfortunately 'kustomize edit add configmap' does not support specifying 'merge' as the behavior,
# which is why we use a template kustomization file.
sed -e "s/<AGENT_CONF_FILE>/antrea-agent.conf/; s/<CONTROLLER_CONF_FILE>/antrea-controller.conf/" ../../patches/kustomization.configMap.tpl.yml > kustomization.yml
$KUSTOMIZE edit add base $BASE
BASE=../configMap
cd ..
if $IPSEC; then
mkdir ipsec && cd ipsec
# we copy the patch files to avoid having to use the '--load-restrictor. flag when calling
# 'kustomize build'. See https://github.com/kubernetes-sigs/kustomize/blob/master/docs/FAQ.md#security-file-foo-is-not-in-or-below-bar
cp ../../patches/ipsec/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
# create a K8s Secret to save the PSK (pre-shared key) for IKE authentication.
$KUSTOMIZE edit add resource ipsecSecret.yml
# add a container to the Agent DaemonSet that runs the OVS IPSec and strongSwan daemons.
$KUSTOMIZE edit add patch --path ipsecContainer.yml
# add an environment variable to the antrea-agent container for passing the PSK to Agent.
$KUSTOMIZE edit add patch --path pskEnv.yml
BASE=../ipsec
cd ..
fi
if $COVERAGE; then
mkdir coverage && cd coverage
cp ../../patches/coverage/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
# this runs antrea-controller via the instrumented binary.
$KUSTOMIZE edit add patch --path startControllerCov.yml
# this runs antrea-agent via the instrumented binary.
$KUSTOMIZE edit add patch --path startAgentCov.yml
BASE=../coverage
cd ..
fi
if [[ $ENCAP_MODE == "networkPolicyOnly" ]] ; then
mkdir chaining && cd chaining
cp ../../patches/chaining/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
# change initContainer script and add antrea to CNI chain
$KUSTOMIZE edit add patch --path installCni.yml
BASE=../chaining
cd ..
fi
if [[ $CLOUD == "GKE" ]]; then
mkdir gke && cd gke
cp ../../patches/gke/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
$KUSTOMIZE edit add patch --path cniPath.yml
BASE=../gke
cd ..
fi
if [[ $CLOUD == "EKS" ]]; then
mkdir eks && cd eks
cp ../../patches/eks/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
$KUSTOMIZE edit add patch --path eksEnv.yml
BASE=../eks
cd ..
fi
if $SIMULATOR; then
mkdir simulator && cd simulator
cp ../../patches/simulator/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
$KUSTOMIZE edit add patch --path agentNodeAffinity.yml
$KUSTOMIZE edit add patch --path controllerNodeAffinity.yml
$KUSTOMIZE edit add resource antrea-agent-simulator.yml
BASE=../simulator
cd ..
fi
if $KIND; then
mkdir kind && cd kind
cp ../../patches/kind/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
# add tun device to antrea OVS container
$KUSTOMIZE edit add patch --path tunDevice.yml
# antrea-ovs should use start_ovs_netdev instead of start_ovs to ensure that the br-phy bridge
# is created.
$KUSTOMIZE edit add patch --path startOvs.yml
# this adds a small delay before running the antrea-agent process, to give the antrea-ovs
# container enough time to set up the br-phy bridge.
# workaround for https://github.com/antrea-io/antrea/issues/801
if $COVERAGE; then
cp ../../patches/coverage/startAgentCov.yml .
$KUSTOMIZE edit add patch --path startAgentCov.yml
else
$KUSTOMIZE edit add patch --path startAgent.yml
fi
# change initContainer script and remove SYS_MODULE capability
$KUSTOMIZE edit add patch --path installCni.yml
if $ON_DELETE; then
$KUSTOMIZE edit add patch --path onDeleteUpdateStrategy.yml
fi
BASE=../kind
cd ..
fi
if $CUSTOM_ADM_CONTROLLER; then
mkdir admissioncontroller && cd admissioncontroller
cp ../../patches/admissioncontroller/*.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
$KUSTOMIZE edit add resource webhook.yml
BASE=../admissioncontroller
cd ..
fi
if $HW_OFFLOAD; then
mkdir hwoffload && cd hwoffload
cp ../../patches/hwoffload/hwOffload.yml .
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
$KUSTOMIZE edit add patch --path hwOffload.yml
BASE=../hwoffload
cd ..
fi
mkdir $MODE && cd $MODE
touch kustomization.yml
$KUSTOMIZE edit add base $BASE
# ../../patches/$MODE may be empty so we use find and not simply cp
find ../../patches/$MODE -name \*.yml -exec cp {} . \;
if [ "$MODE" == "dev" ]; then
if [[ -z "$IMG_NAME" ]]; then
if $COVERAGE; then
IMG_NAME="antrea/antrea-ubuntu-coverage:latest"
else
IMG_NAME="projects.registry.vmware.com/antrea/antrea-ubuntu:latest"
fi
fi
$KUSTOMIZE edit set image antrea=$IMG_NAME
$KUSTOMIZE edit add patch --path agentImagePullPolicy.yml
$KUSTOMIZE edit add patch --path controllerImagePullPolicy.yml
if $VERBOSE_LOG; then
$KUSTOMIZE edit add patch --path agentVerboseLog.yml
$KUSTOMIZE edit add patch --path controllerVerboseLog.yml
fi
# only required because there is no good way at the moment to update the imagePullPolicy for all
# containers. See https://github.com/kubernetes-sigs/kustomize/issues/1493
if $IPSEC; then
$KUSTOMIZE edit add patch --path agentIpsecImagePullPolicy.yml
fi
fi
if [ "$MODE" == "release" ]; then
$KUSTOMIZE edit set image antrea=$IMG_NAME:$IMG_TAG
fi
# If --k8s-1.15 flag is set, then we have to patch certain resources.
# For instance, the apiVersion/schema of CustomResourceDefinition and admission webhooks
if $K8S_115; then
cp -a ../../patches/legacy ./
# Patch for controller.yml
$KUSTOMIZE edit add patch --path legacy/controller.json --kind MutatingWebhookConfiguration
$KUSTOMIZE edit add patch --path legacy/controller.json --kind ValidatingWebhookConfiguration
# Patch for all CustomResourceDefinition
$KUSTOMIZE edit add patch --path legacy/crdVersion.json --kind CustomResourceDefinition
$KUSTOMIZE edit add patch --path legacy/crdClusterInformation.json --kind CustomResourceDefinition --name antreaagentinfos.clusterinformation.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdClusterInformation.json --kind CustomResourceDefinition --name antreacontrollerinfos.clusterinformation.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdTraceflow.json --kind CustomResourceDefinition --name traceflows.ops.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdTier.json --kind CustomResourceDefinition --name tiers.security.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdClusterNetworkPolicy.json --kind CustomResourceDefinition --name clusternetworkpolicies.security.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdNetworkPolicy.json --kind CustomResourceDefinition --name networkpolicies.security.antrea.tanzu.vmware.com
$KUSTOMIZE edit add patch --path legacy/crdExternalEntity.json --kind CustomResourceDefinition --name externalentities.core.antrea.tanzu.vmware.com
fi
$KUSTOMIZE build
popd > /dev/null
if $KEEP; then
echoerr "Kustomization file is at $TMP_DIR/$MODE/kustomization.yml"
else
rm -rf $TMP_DIR
fi
|
#!/bin/sh
. setPkcs11Environment.sh
$JAVA -classpath $CP -Djava.library.path=$JAVA_LIBRARY_PATH demo.smime.pkcs11.ImplicitSignedMailDemo $@
|
class Page {
let hash: String
// Other properties and methods related to the page
init(hash: String) {
self.hash = hash
}
}
class PageCache {
static let shared = PageCache()
private var cache: [String: Page] = [:]
func getEntry(cacheKey: String) -> Page? {
return cache[cacheKey]
}
func cachePage(page: Page, for cacheKey: String) {
cache[cacheKey] = page
}
}
class Module {
// Other properties and methods related to the module
func didResume() {
// Logic to handle module resume
}
}
// Usage
let pageJson = "examplePage"
let currentPageHash = "currentHashValue"
if let cachedPage = PageCache.shared.getEntry(cacheKey: pageJson) {
if cachedPage.hash != currentPageHash {
didUpdatePage(page: cachedPage)
}
}
// Resume modules
let modules: [Module] = [Module(), Module(), Module()] // Example modules
for module in modules {
module.didResume()
}
// Check for page updates
func checkPageLoad() {
// Logic to check for page updates
}
checkPageLoad() |
<filename>testing/endpoint/http/append.go<gh_stars>100-1000
package http
import (
"github.com/viant/endly"
"github.com/viant/toolbox/url"
)
func (s *service) append(context *endly.Context, req *AppendRequest) (*AppendResponse, error) {
resp := &AppendResponse{}
server := s.servers[req.Port]
state := context.State()
if req.BaseDirectory != "" {
req.BaseDirectory = url.NewResource(state.ExpandAsText(req.BaseDirectory)).ParsedURL.Path
}
trips := req.AsHTTPServerTrips(server.rotate, server.indexKeys)
err := trips.Init(server.requestTemplate, server.responseTemplate)
if err != nil {
return nil, err
}
server.Append(trips)
resp.Trips = trips.Trips
return resp, nil
}
|
#!/bin/bash
set -eu
aws_access_key_id=`terraform state show -state terraform-state/terraform.tfstate aws_iam_access_key.pcf_iam_user_access_key | grep ^id | awk '{print $3}'`
aws_secret_access_key=`terraform state show -state terraform-state/terraform.tfstate aws_iam_access_key.pcf_iam_user_access_key | grep ^secret | awk '{print $3}'`
rds_password=`terraform state show -state terraform-state/terraform.tfstate aws_db_instance.pcf_rds | grep ^password | awk '{print $3}'`
while read -r line
do
`echo "$line" | awk '{print "export "$1"="$3}'`
done < <(terraform output -state terraform-state/terraform.tfstate)
set +e
read -r -d '' iaas_configuration <<EOF
{
"access_key_id": "$aws_access_key_id",
"secret_access_key": "$aws_secret_access_key",
"vpc_id": "$vpc_id",
"security_group": "$pcf_security_group",
"key_pair_name": "$AWS_KEY_NAME",
"ssh_private_key": "",
"region": "$AWS_REGION",
"encrypted": false
}
EOF
read -r -d '' director_configuration <<EOF
{
"ntp_servers_string": "0.amazon.pool.ntp.org,1.amazon.pool.ntp.org,2.amazon.pool.ntp.org,3.amazon.pool.ntp.org",
"resurrector_enabled": true,
"max_threads": 30,
"database_type": "external",
"external_database_options": {
"host": "$db_host",
"port": 3306,
"user": "$db_username",
"password": "$rds_password",
"database": "$db_database"
},
"blobstore_type": "s3",
"s3_blobstore_options": {
"endpoint": "$S3_ENDPOINT",
"bucket_name": "$s3_pcf_bosh",
"access_key": "$aws_access_key_id",
"secret_key": "$aws_secret_access_key",
"signature_version": "4",
"region": "$AWS_REGION"
}
}
EOF
resource_configuration=$(cat <<-EOF
{
"director": {
"instance_type": {
"id": "m4.large"
}
}
}
EOF
)
read -r -d '' az_configuration <<EOF
{
"availability_zones": [
{ "name": "$az1" },
{ "name": "$az2" },
{ "name": "$az3" }
]
}
EOF
read -r -d '' networks_configuration <<EOF
{
"icmp_checks_enabled": false,
"networks": [
{
"name": "deployment",
"service_network": false,
"subnets": [
{
"iaas_identifier": "$ert_subnet_id_az1",
"cidr": "$ert_subnet_cidr_az1",
"reserved_ip_ranges": "$ert_subnet_reserved_ranges_z1",
"dns": "$dns",
"gateway": "$ert_subnet_gw_az1",
"availability_zones": ["$az1"]
},
{
"iaas_identifier": "$ert_subnet_id_az2",
"cidr": "$ert_subnet_cidr_az2",
"reserved_ip_ranges": "$ert_subnet_reserved_ranges_z2",
"dns": "$dns",
"gateway": "$ert_subnet_gw_az2",
"availability_zones": ["$az2"]
},
{
"iaas_identifier": "$ert_subnet_id_az3",
"cidr": "$ert_subnet_cidr_az3",
"reserved_ip_ranges": "$ert_subnet_reserved_ranges_z3",
"dns": "$dns",
"gateway": "$ert_subnet_gw_az3",
"availability_zones": ["$az3"]
}
]
},
{
"name": "infrastructure",
"service_network": false,
"subnets": [
{
"iaas_identifier": "$infra_subnet_id_az1",
"cidr": "$infra_subnet_cidr_az1",
"reserved_ip_ranges": "$infra_subnet_reserved_ranges_z1",
"dns": "$dns",
"gateway": "$infra_subnet_gw_az1",
"availability_zones": ["$az1"]
}
]
},
{
"name": "services",
"service_network": false,
"subnets": [
{
"iaas_identifier": "$services_subnet_id_az1",
"cidr": "$services_subnet_cidr_az1",
"reserved_ip_ranges": "$services_subnet_reserved_ranges_z1",
"dns": "$dns",
"gateway": "$services_subnet_gw_az1",
"availability_zones": ["$az1"]
},
{
"iaas_identifier": "$services_subnet_id_az2",
"cidr": "$services_subnet_cidr_az2",
"reserved_ip_ranges": "$services_subnet_reserved_ranges_z2",
"dns": "$dns",
"gateway": "$services_subnet_gw_az2",
"availability_zones": ["$az2"]
},
{
"iaas_identifier": "$services_subnet_id_az3",
"cidr": "$services_subnet_cidr_az3",
"reserved_ip_ranges": "$services_subnet_reserved_ranges_z3",
"dns": "$dns",
"gateway": "$services_subnet_gw_az3",
"availability_zones": ["$az3"]
}
]
},
{
"name": "dynamic-services",
"service_network": true,
"subnets": [
{
"iaas_identifier": "$dynamic_services_subnet_id_az1",
"cidr": "$dynamic_services_subnet_cidr_az1",
"reserved_ip_ranges": "$dynamic_services_subnet_reserved_ranges_z1",
"dns": "$dns",
"gateway": "$dynamic_services_subnet_gw_az1",
"availability_zones": ["$az1"]
},
{
"iaas_identifier": "$dynamic_services_subnet_id_az2",
"cidr": "$dynamic_services_subnet_cidr_az2",
"reserved_ip_ranges": "$dynamic_services_subnet_reserved_ranges_z2",
"dns": "$dns",
"gateway": "$dynamic_services_subnet_gw_az2",
"availability_zones": ["$az2"]
},
{
"iaas_identifier": "$dynamic_services_subnet_id_az3",
"cidr": "$dynamic_services_subnet_cidr_az3",
"reserved_ip_ranges": "$dynamic_services_subnet_reserved_ranges_z3",
"dns": "$dns",
"gateway": "$dynamic_services_subnet_gw_az3",
"availability_zones": ["$az3"]
}
]
}
]
}
EOF
read -r -d '' network_assignment <<EOF
{
"singleton_availability_zone": "$az1",
"network": "infrastructure"
}
EOF
read -r -d '' security_configuration <<EOF
{
"trusted_certificates": "",
"vm_password_type": "generate"
}
EOF
set -e
iaas_configuration=$(
echo "$iaas_configuration" |
jq --arg ssh_private_key "$PEM" '.ssh_private_key = $ssh_private_key'
)
security_configuration=$(
echo "$security_configuration" |
jq --arg certs "$TRUSTED_CERTIFICATES" '.trusted_certificates = $certs'
)
jsons=(
"$iaas_configuration"
"$director_configuration"
"$az_configuration"
"$networks_configuration"
"$network_assignment"
"$security_configuration"
"$resource_configuration"
)
for json in "${jsons[@]}"; do
# ensure JSON is valid
echo "$json" | jq '.'
done
om-linux \
--target https://${OPSMAN_DOMAIN_OR_IP_ADDRESS} \
--skip-ssl-validation \
--client-id "${OPSMAN_CLIENT_ID}" \
--client-secret "${OPSMAN_CLIENT_SECRET}" \
--username "$OPSMAN_USER" \
--password "$OPSMAN_PASSWORD" \
configure-bosh \
--iaas-configuration "$iaas_configuration" \
--director-configuration "$director_configuration" \
--az-configuration "$az_configuration" \
--networks-configuration "$networks_configuration" \
--network-assignment "$network_assignment" \
--security-configuration "$security_configuration" \
--resource-configuration "$resource_configuration"
|
using System;
namespace FinancialApplication.Models
{
public enum TypeOfAccount
{
Checking,
Savings
}
public class Account
{
public Guid Id { get; }
public TypeOfAccount Type { get; }
public decimal Balance { get; private set; }
public Account(TypeOfAccount type, decimal initialBalance)
{
Id = Guid.NewGuid();
Type = type;
Balance = initialBalance;
}
public void Deposit(decimal amount)
{
if (amount <= 0)
{
throw new ArgumentException("Deposit amount must be greater than zero");
}
Balance += amount;
}
public void Withdraw(decimal amount)
{
if (amount <= 0)
{
throw new ArgumentException("Withdrawal amount must be greater than zero");
}
if (amount > Balance)
{
throw new InvalidOperationException("Insufficient funds for withdrawal");
}
Balance -= amount;
}
}
} |
#!/usr/bin/env bash
set -euo pipefail
KLEE_IMAGE="klee/klee"
docker pull "${KLEE_IMAGE}"
RUN="docker run -v $(pwd):$(pwd) --rm ${KLEE_IMAGE}"
CLANG_BIN=/tmp/llvm-60-install_O_D_A/bin
CLANGXX="${RUN} ${CLANG_BIN}/clang++"
CLANG="${RUN} ${CLANG_BIN}/clang"
KLEE_INCLUDE=/home/klee/klee_src/include
KLEE_BUILD=/home/klee/klee_build
KLEE_BIN="${KLEE_BUILD}/bin"
KLEE="${RUN} ${KLEE_BIN}/klee"
KTEST="${RUN} ${KLEE_BIN}/ktest-tool"
if [[ "${1}" != "replay" ]]
then
FILE=${1-main.cc}
if [ ! -f "${FILE}" ]
then
cat << EOF > "${FILE}"
#include <klee/klee.h>
#include <assert.h>
constexpr size_t N = 2;
int main(int argc, char *argv[])
{
float f;
klee_make_symbolic(&f, sizeof f, "f");
klee_assert(f * f != 2.0f);
return 0;
}
EOF
fi
${CLANG} -g -emit-llvm -DKLEE -I"${KLEE_INCLUDE}" -c "$(pwd)/${FILE}" -o "$(pwd)/${FILE%%.*}.bc"
#${KLEE} --libc=uclibc --posix-runtime "$(pwd)/${FILE%%.*}.bc" -sym-files 1 64
time ${KLEE} "$(pwd)/${FILE%%.*}.bc"
#time ${KLEE} -emit-all-errors "$(pwd)/${FILE%%.*}.bc"
ERROR_FILE="$(basename "$(find -L klee-last -name '*.assert.err')")"
${KTEST} "$(pwd)/klee-last/${ERROR_FILE%%.*}.ktest"
else
KTEST_FILE="${2}"
${KTEST} "$(pwd)/${KTEST_FILE}"
fi
|
<filename>docs/components/components/SpinnerDocumentation.js
import React from 'react';
import { Button, Card, Spinner } from 'belle';
import Code from '../Code';
import { propertyNameStyle, propertyDescriptionStyle } from '../../style';
const basicCodeExample = `<!-- basic spinner example -->
<Spinner />`;
const buttonCodeExample = `<!-- loading button example -->
<Button primary disabled>
Saving <Spinner characterStyle={{ fontSize: 18, color: '#fff' }} />
</Button>
<Button disabled style={{ marginLeft: 10 }}>
Saving <Spinner characterStyle={{ fontSize: 18, color: '#C5C4C4' }} />
</Button>`;
const cardCodeExample = `<!-- loading example -->
<Card style={{ fontSize: 20,
color: '#666',
textAlign: 'center',
borderTop: '1px solid #f2f2f2',
}}>
Loading <Spinner characterStyle={{ fontSize: 20 }} />
</Card>`;
const SpinnerDocumentation = () => (
<div>
<h2 style={{ marginTop: 0, marginBottom: 40 }}>Spinner</h2>
<Spinner />
<Code value={ basicCodeExample } style={{ marginTop: 40 }} />
<h3>Properties</h3>
<table><tbody>
<tr>
<td style={ propertyNameStyle }>
characterStyle
</td>
</tr>
<tr>
<td style={ propertyDescriptionStyle }>
<p>
<i>Object</i>
<br />
optional
</p>
<p>
The property can be used to specify styling for the spans wrapping
the dots. Behaves like Reacts built-in style property.
</p>
</td>
</tr>
</tbody></table>
<p>
Any property valid for a HTML div like
<span style={{ color: 'grey' }}> style, id, className, …</span>
</p>
<h3>More Examples</h3>
<h4>Button while loading</h4>
<Button primary disabled>
Saving <Spinner characterStyle={{ fontSize: 18, color: '#fff' }} />
</Button>
<Button disabled style={{ marginLeft: 10 }}>
Saving <Spinner characterStyle={{ fontSize: 18, color: '#C5C4C4' }} />
</Button>
<Code value={ buttonCodeExample } style={{ marginTop: 40 }} />
<h4>Card with a loading indicator</h4>
<Card style={{ fontSize: 20,
color: '#666',
textAlign: 'center',
borderTop: '1px solid #f2f2f2',
}}
>
Loading <Spinner characterStyle={{ fontSize: 20 }} />
</Card>
<Code value={ cardCodeExample } style={{ marginTop: 40 }} />
</div>
);
export default SpinnerDocumentation;
|
<reponame>Joed11/FEC-CatWalk<gh_stars>0
import React from 'react';
import ReactDOM from 'react-dom';
import { render, cleanup } from '@testing-library/react';
import "@testing-library/jest-dom/extend-expect";
import AddAnswerModal from './AddAnswerModal';
import sampleResponse from '../../../sampleData/QandAsampleResponse';
describe('Add Answer Modal Component', () => {
var props = {}
beforeEach(() => {
props = {
product: sampleResponse.primaryProduct
}
})
afterEach(cleanup);
it('renders without crashing', () => {
const div = document.createElement("div");
ReactDOM.render(<AddAnswerModal product={props.product} />, div);
});
}); |
function LCS(a, b)
{
let aLen = a.length;
let bLen = b.length;
let dp = Array(aLen + 1).fill(null).map(() => Array(bLen + 1).fill(null));
for (let i = 0; i <= aLen; i++) {
for (let j = 0; j <= bLen; j++) {
if (i == 0 || j == 0)
dp[i][j] = 0;
else if (a.charAt(i - 1) == b.charAt(j - 1))
dp[i][j] = dp[i - 1][j - 1] + 1;
else
dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);
}
}
return dp[aLen][bLen];
} |
const parseUrl = (urlString) => {
const url = new URL(urlString);
return {
hostname: url.hostname,
path: url.pathname,
parameters: [...url.searchParams]
};
};
const result = parseUrl('http://www.example.com/path/to/page?param1=value1¶m2=value2');
console.log(result);
// hostname : 'www.example.com',
// path: '/path/to/page',
// parameters: [['param1','value1'],['param2','value2']] |
void printMatrixInReverseOrder(int[,] matrix)
{
for (int row = matrix.GetLength(0) - 1; row >=0; row--)
{
for (int col = matrix.GetLength(1) - 1; col >= 0; col--)
{
Console.Write(matrix[row, col] + " ");
}
Console.WriteLine();
}
} |
# https://code.visualstudio.com/docs/setup/network#_proxy-server-support
chromium-browser --proxy-server="socks5://127.0.0.1:1080"
# vs code proxy
code --proxy-server="socks5://127.0.0.1:1080"
|
package org.ednovo.gooru.mail.serializer;
import org.springframework.web.servlet.ModelAndView;
public class MailSerializer {
public static ModelAndView toModelAndView(Object object) {
ModelAndView jsonmodel = new ModelAndView("rest/model");
jsonmodel.addObject("model", object);
return jsonmodel;
}
} |
<reponame>khepherer/java_lleida_01_06_2017
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.mycompany.concweb.dao;
import com.mycompany.concweb.modelo.Vehiculo;
import javax.annotation.PostConstruct;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
/**
*
* @author iconotc
*/
@Stateless
public class VehiculoDao {
@PersistenceContext
private EntityManager em;
@PostConstruct
private void cargarBd() {
for (int i = 1; i < 10; i++) {
em.persist(new Vehiculo("Ford", i * 1000));
}
}
public Integer contarVehiculosMarcaPrecio(Vehiculo v) {
final String query = "select v from Vehiculo v where v.marca=:marca and v.precio=:precio";
Query q = em.createQuery(query);
q.setParameter("marca", v.getMarca());
q.setParameter("precio", v.getPrecio());
return q.getResultList().size();
}
}
|
#!/bin/sh
# Copyright (c) 2014-present, Facebook, Inc. All rights reserved.
#
# You are hereby granted a non-exclusive, worldwide, royalty-free license to use,
# copy, modify, and distribute this software in source code or binary form for use
# in connection with the web services and APIs provided by Facebook.
#
# As with any software that integrates with the Facebook platform, your use of
# this software is subject to the Facebook Developer Principles and Policies
# [http://developers.facebook.com/policy/]. This copyright notice shall be
# included in all copies or substantial portions of the software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
. "${FB_SDK_SCRIPT:-$(dirname $0)}/common.sh"
# option s to skip build
SKIPBUILD=""
while getopts "s" OPTNAME
do
case "$OPTNAME" in
s)
SKIPBUILD="YES"
;;
esac
done
FB_SDK_ZIP=$FB_SDK_BUILD/FacebookSDKs-${FB_SDK_VERSION_SHORT}.zip
FB_SDK_BUILD_PACKAGE=$FB_SDK_BUILD/package
FB_SDK_BUILD_PACKAGE_SAMPLES=$FB_SDK_BUILD_PACKAGE/Samples
FB_SDK_BUILD_PACKAGE_SCRIPTS=$FB_SDK_BUILD/Scripts
FB_SDK_BUILD_PACKAGE_DOCSETS_FOLDER=$FB_SDK_BUILD_PACKAGE/DocSets/
# -----------------------------------------------------------------------------gi
# Build package directory structure
#
progress_message "Building package directory structure."
\rm -rf "$FB_SDK_BUILD_PACKAGE" "$FB_SDK_BUILD_PACKAGE_SCRIPTS"
mkdir -p "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not create directory $FB_SDK_BUILD_PACKAGE"
mkdir -p "$FB_SDK_BUILD_PACKAGE_SAMPLES"
mkdir -p "$FB_SDK_BUILD_PACKAGE_SCRIPTS"
mkdir -p "$FB_SDK_BUILD_PACKAGE_DOCSETS_FOLDER"
# -----------------------------------------------------------------------------
# Call out to build prerequisites.
#
if is_outermost_build; then
if [ -z $SKIPBUILD ]; then
. "$FB_SDK_SCRIPT/build_framework.sh" -c Release
fi
fi
echo Building Distribution.
# -----------------------------------------------------------------------------
# Install required dependencies
#
(gem list naturally -i > /dev/null) || die "Run 'gem install naturally' first"
(gem list xcpretty -i > /dev/null) || die "Run 'gem install xcpretty' first"
(gem list rake -i > /dev/null) || die "Run 'gem install rake' first"
# -----------------------------------------------------------------------------
# Copy over stuff
#
\cp -R "$FB_SDK_BUILD"/FBSDKCoreKit.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBSDKCoreKit.framework"
\cp -R "$FB_SDK_BUILD"/FBSDKLoginKit.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBSDKLoginKit.framework"
\cp -R "$FB_SDK_BUILD"/FBSDKShareKit.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBSDKShareKit.framework"
\cp -R "$FB_SDK_BUILD"/Bolts.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy Bolts.framework"
\cp -R $"$FB_SDK_ROOT"/FacebookSDKStrings.bundle "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FacebookSDKStrings.bundle"
for SAMPLE in Configurations Iconicus RPSSample Scrumptious ShareIt SwitchUserSample; do
\rsync -avmc --exclude "${SAMPLE}.xcworkspace" "$FB_SDK_SAMPLES/$SAMPLE" "$FB_SDK_BUILD_PACKAGE_SAMPLES" \
|| die "Could not copy $SAMPLE"
done
\cp "$FB_SDK_ROOT/README.txt" "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy README"
\cp "$FB_SDK_ROOT/LICENSE" "$FB_SDK_BUILD_PACKAGE"/LICENSE.txt \
|| die "Could not copy LICENSE"
# -----------------------------------------------------------------------------
# Fixup projects to point to the SDK framework
#
for fname in $(find "$FB_SDK_BUILD_PACKAGE_SAMPLES" -name "Project.xcconfig" -print); do \
sed 's|\(\.\.\(/\.\.\)*\)/build|\1|g;s|\.\.\(/\.\.\)*/Bolts-IOS/build/ios||g' \
${fname} > ${fname}.tmpfile && mv ${fname}.tmpfile ${fname}; \
done
for fname in $(find "$FB_SDK_BUILD_PACKAGE_SAMPLES" -name "project.pbxproj" -print); do \
sed 's|\(path[[:space:]]*=[[:space:]]*\.\.\(/\.\.\)*\)/build|\1|g' \
${fname} > ${fname}.tmpfile && mv ${fname}.tmpfile ${fname}; \
done
# -----------------------------------------------------------------------------
# Build AKFAccountKit framework
#
if [ -z $SKIPBUILD ]; then
("$XCTOOL" -project "${FB_SDK_ROOT}"/AccountKit/AccountKit.xcodeproj -scheme "AccountKit-Universal" -configuration Release clean build) || die "Failed to build account kit"
fi
\cp -R "$FB_SDK_BUILD"/AccountKit.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy AccountKit.framework"
\cp -R "$FB_SDK_BUILD"/AccountKitStrings.bundle "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy AccountKitStrings.bundle"
# -----------------------------------------------------------------------------
# Build FBNotifications framework
#
# Build stuff
\rake -f "$FB_SDK_ROOT/FBNotifications/iOS/Rakefile" package:frameworks || die "Could not build FBNotifications.framework"
\unzip "$FB_SDK_ROOT/FBNotifications/iOS/build/release/FBNotifications-iOS.zip" -d $FB_SDK_BUILD
\cp -R "$FB_SDK_BUILD"/FBNotifications.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBNotifications.framework"
# -----------------------------------------------------------------------------
# Build FBAudienceNetwork framework
#
if [ -z $SKIPBUILD ]; then
("$XCTOOL" -workspace "${FB_SDK_ROOT}"/ads/src/FBAudienceNetwork.xcworkspace -scheme "BuildAll-Universal" -configuration Release clean build) || die "Failed to build FBAudienceNetwork"
fi
FBAN_SAMPLES=$FB_SDK_BUILD_PACKAGE/Samples/FBAudienceNetwork
\cp -R "$FB_SDK_ROOT"/ads/build/FBAudienceNetwork.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBAudienceNetwork.framework"
\mkdir -p "$FB_SDK_BUILD_PACKAGE/Samples/FBAudienceNetwork"
\cp -R "$FB_SDK_ROOT"/ads/samples/ $FBAN_SAMPLES \
|| die "Could not copy FBAudienceNetwork samples"
# Fix up samples
for fname in $(find "$FBAN_SAMPLES" -name "project.pbxproj" -print); do \
sed "s|../../build|../../../|g;" \
${fname} > ${fname}.tmpfile && mv ${fname}.tmpfile ${fname}; \
done
# Fix up samples
for fname in $(find "$FBADSDK_SAMPLES" -name "project.pbxproj" -print); do \
sed "s|../../build|../../../|g;s|../../../../ads/build|../../../|g;" \
${fname} > ${fname}.tmpfile && mv ${fname}.tmpfile ${fname}; \
done
# -----------------------------------------------------------------------------
# Build Messenger Kit
#
if [ -z $SKIPBUILD ]; then
("$XCTOOL" -project "${FB_SDK_ROOT}"/FBSDKMessengerShareKit/FBSDKMessengerShareKit.xcodeproj -scheme "FBSDKMessengerShareKit-universal" -configuration Release clean build) || die "Failed to build messenger kit"
fi
\cp -R "$FB_SDK_BUILD"/FBSDKMessengerShareKit.framework "$FB_SDK_BUILD_PACKAGE" \
|| die "Could not copy FBSDKMessengerShareKit.framework"
# -----------------------------------------------------------------------------
# Build docs
#
if [ -z $SKIPBUILD ]; then
. "$FB_SDK_SCRIPT/build_documentation.sh"
fi
\ls -d "$FB_SDK_BUILD"/*.docset | xargs -I {} cp -R {} $FB_SDK_BUILD_PACKAGE_DOCSETS_FOLDER \
|| die "Could not copy docsets"
\cp "$FB_SDK_SCRIPT/install_docsets.sh" $FB_SDK_BUILD_PACKAGE_DOCSETS_FOLDER \
|| die "Could not copy install_docset"
# -----------------------------------------------------------------------------
# Build .zip from package directory
#
progress_message "Building .zip from package directory."
(
cd $FB_SDK_BUILD
ditto -ck --sequesterRsrc $FB_SDK_BUILD_PACKAGE $FB_SDK_ZIP
)
# -----------------------------------------------------------------------------
# Done
#
progress_message "Successfully built SDK zip: $FB_SDK_ZIP"
common_success
|
import React from 'react';
class App extends React.Component {
state = {
tasks: [],
newTask: ''
}
handleChange = e => {
this.setState({
newTask: e.target.value
})
}
handleSubmit = e => {
e.preventDefault();
this.setState({
tasks: [...this.state.tasks, this.state.newTask],
newTask: ''
})
}
handleEdit = (e, i) => {
const newTasks = [...this.state.tasks];
newTasks[i] = e.target.value;
this.setState({ tasks: newTasks });
}
handleDelete = i => {
const newTasks = [...this.state.tasks];
newTasks.splice(i, 1);
this.setState({ tasks: newTasks });
}
render() {
return (
<div>
<h1>ToDo List</h1>
<form onSubmit={this.handleSubmit}>
<input
type="text"
placeholder="Add new task"
value={this.state.newTask}
onChange={this.handleChange}
/>
<button type="submit">Add</button>
</form>
<ul>
{this.state.tasks.map((task, i) => (
<li key={i}>
<input
type="text"
value={task}
onChange={e => this.handleEdit(e, i)}
/>
<button onClick={() => this.handleDelete(i)}>X</button>
</li>
))}
</ul>
</div>
);
}
} |
import * as React from 'react';
import {Notification} from 'spaceweb/notification';
export default () => (
<Notification>Default info notification</Notification>
);
|
#!/usr/bin/env bash
koopa_locate_xargs() {
koopa_locate_app \
--allow-in-path \
--app-name='xargs' \
--opt-name='findutils'
}
|
#!/bin/sh
sed -i 's/#6272a4/#222222/g' xshado.vim
sed -i 's/#F37F97/#ff3236/g' xshado.vim
sed -i 's/#5ADECD/#2ccc14/g' xshado.vim
sed -i 's/#8897F4/#ffc84c/g' xshado.vim
sed -i 's/#bd93f9/#6b83f9/g' xshado.vim
sed -i 's/#ff79c6/#A828FF/g' xshado.vim
sed -i 's/#8be9fd/#26B3FF/g' xshado.vim
sed -i 's/#bfaae3/#eeeeee/g' xshado.vim
sed -i 's/#6272a4/#666666/g' xshado.vim
sed -i 's/#FF4971/#ff3d33/g' xshado.vim
sed -i 's/#18E3C8/#98FF88/g' xshado.vim
sed -i 's/#8897F4/#e0d40f/g' xshado.vim
sed -i 's/#bd93f9/#5c5cff/g' xshado.vim
sed -i 's/#ff79c6/#d505ff/g' xshado.vim
sed -i 's/#8be9fd/#00e1f5/g' xshado.vim
sed -i 's/#bfaae3/#ffffff/g' xshado.vim
|
<gh_stars>1-10
/**
* @author <NAME> <<EMAIL>>
* @copyright 2020 Photon Storm Ltd.
* @license {@link https://opensource.org/licenses/MIT|MIT License}
*/
/**
* Keyboard Codes.
*
* @namespace Phaser.Input.Keyboard.KeyCodes
* @memberof Phaser.Input.Keyboard
* @since 3.0.0
*/
var KeyCodes = {
/**
* The BACKSPACE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.BACKSPACE
* @type {number}
* @since 3.0.0
*/
BACKSPACE: 8,
/**
* The TAB key.
*
* @name Phaser.Input.Keyboard.KeyCodes.TAB
* @type {number}
* @since 3.0.0
*/
TAB: 9,
/**
* The ENTER key.
*
* @name Phaser.Input.Keyboard.KeyCodes.ENTER
* @type {number}
* @since 3.0.0
*/
ENTER: 13,
/**
* The SHIFT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SHIFT
* @type {number}
* @since 3.0.0
*/
SHIFT: 16,
/**
* The CTRL key.
*
* @name Phaser.Input.Keyboard.KeyCodes.CTRL
* @type {number}
* @since 3.0.0
*/
CTRL: 17,
/**
* The ALT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.ALT
* @type {number}
* @since 3.0.0
*/
ALT: 18,
/**
* The PAUSE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PAUSE
* @type {number}
* @since 3.0.0
*/
PAUSE: 19,
/**
* The CAPS_LOCK key.
*
* @name Phaser.Input.Keyboard.KeyCodes.CAPS_LOCK
* @type {number}
* @since 3.0.0
*/
CAPS_LOCK: 20,
/**
* The ESC key.
*
* @name Phaser.Input.Keyboard.KeyCodes.ESC
* @type {number}
* @since 3.0.0
*/
ESC: 27,
/**
* The SPACE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SPACE
* @type {number}
* @since 3.0.0
*/
SPACE: 32,
/**
* The PAGE_UP key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PAGE_UP
* @type {number}
* @since 3.0.0
*/
PAGE_UP: 33,
/**
* The PAGE_DOWN key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PAGE_DOWN
* @type {number}
* @since 3.0.0
*/
PAGE_DOWN: 34,
/**
* The END key.
*
* @name Phaser.Input.Keyboard.KeyCodes.END
* @type {number}
* @since 3.0.0
*/
END: 35,
/**
* The HOME key.
*
* @name Phaser.Input.Keyboard.KeyCodes.HOME
* @type {number}
* @since 3.0.0
*/
HOME: 36,
/**
* The LEFT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.LEFT
* @type {number}
* @since 3.0.0
*/
LEFT: 37,
/**
* The UP key.
*
* @name Phaser.Input.Keyboard.KeyCodes.UP
* @type {number}
* @since 3.0.0
*/
UP: 38,
/**
* The RIGHT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.RIGHT
* @type {number}
* @since 3.0.0
*/
RIGHT: 39,
/**
* The DOWN key.
*
* @name Phaser.Input.Keyboard.KeyCodes.DOWN
* @type {number}
* @since 3.0.0
*/
DOWN: 40,
/**
* The PRINT_SCREEN key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PRINT_SCREEN
* @type {number}
* @since 3.0.0
*/
PRINT_SCREEN: 42,
/**
* The INSERT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.INSERT
* @type {number}
* @since 3.0.0
*/
INSERT: 45,
/**
* The DELETE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.DELETE
* @type {number}
* @since 3.0.0
*/
DELETE: 46,
/**
* The ZERO key.
*
* @name Phaser.Input.Keyboard.KeyCodes.ZERO
* @type {number}
* @since 3.0.0
*/
ZERO: 48,
/**
* The ONE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.ONE
* @type {number}
* @since 3.0.0
*/
ONE: 49,
/**
* The TWO key.
*
* @name Phaser.Input.Keyboard.KeyCodes.TWO
* @type {number}
* @since 3.0.0
*/
TWO: 50,
/**
* The THREE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.THREE
* @type {number}
* @since 3.0.0
*/
THREE: 51,
/**
* The FOUR key.
*
* @name Phaser.Input.Keyboard.KeyCodes.FOUR
* @type {number}
* @since 3.0.0
*/
FOUR: 52,
/**
* The FIVE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.FIVE
* @type {number}
* @since 3.0.0
*/
FIVE: 53,
/**
* The SIX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SIX
* @type {number}
* @since 3.0.0
*/
SIX: 54,
/**
* The SEVEN key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SEVEN
* @type {number}
* @since 3.0.0
*/
SEVEN: 55,
/**
* The EIGHT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.EIGHT
* @type {number}
* @since 3.0.0
*/
EIGHT: 56,
/**
* The NINE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NINE
* @type {number}
* @since 3.0.0
*/
NINE: 57,
/**
* The NUMPAD_ZERO key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_ZERO
* @type {number}
* @since 3.0.0
*/
NUMPAD_ZERO: 96,
/**
* The NUMPAD_ONE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_ONE
* @type {number}
* @since 3.0.0
*/
NUMPAD_ONE: 97,
/**
* The NUMPAD_TWO key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_TWO
* @type {number}
* @since 3.0.0
*/
NUMPAD_TWO: 98,
/**
* The NUMPAD_THREE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_THREE
* @type {number}
* @since 3.0.0
*/
NUMPAD_THREE: 99,
/**
* The NUMPAD_FOUR key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_FOUR
* @type {number}
* @since 3.0.0
*/
NUMPAD_FOUR: 100,
/**
* The NUMPAD_FIVE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_FIVE
* @type {number}
* @since 3.0.0
*/
NUMPAD_FIVE: 101,
/**
* The NUMPAD_SIX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_SIX
* @type {number}
* @since 3.0.0
*/
NUMPAD_SIX: 102,
/**
* The NUMPAD_SEVEN key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_SEVEN
* @type {number}
* @since 3.0.0
*/
NUMPAD_SEVEN: 103,
/**
* The NUMPAD_EIGHT key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_EIGHT
* @type {number}
* @since 3.0.0
*/
NUMPAD_EIGHT: 104,
/**
* The NUMPAD_NINE key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_NINE
* @type {number}
* @since 3.0.0
*/
NUMPAD_NINE: 105,
/**
* The Numpad Addition (+) key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_ADD
* @type {number}
* @since 3.21.0
*/
NUMPAD_ADD: 107,
/**
* The Numpad Subtraction (-) key.
*
* @name Phaser.Input.Keyboard.KeyCodes.NUMPAD_SUBTRACT
* @type {number}
* @since 3.21.0
*/
NUMPAD_SUBTRACT: 109,
/**
* The A key.
*
* @name Phaser.Input.Keyboard.KeyCodes.A
* @type {number}
* @since 3.0.0
*/
A: 65,
/**
* The B key.
*
* @name Phaser.Input.Keyboard.KeyCodes.B
* @type {number}
* @since 3.0.0
*/
B: 66,
/**
* The C key.
*
* @name Phaser.Input.Keyboard.KeyCodes.C
* @type {number}
* @since 3.0.0
*/
C: 67,
/**
* The D key.
*
* @name Phaser.Input.Keyboard.KeyCodes.D
* @type {number}
* @since 3.0.0
*/
D: 68,
/**
* The E key.
*
* @name Phaser.Input.Keyboard.KeyCodes.E
* @type {number}
* @since 3.0.0
*/
E: 69,
/**
* The F key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F
* @type {number}
* @since 3.0.0
*/
F: 70,
/**
* The G key.
*
* @name Phaser.Input.Keyboard.KeyCodes.G
* @type {number}
* @since 3.0.0
*/
G: 71,
/**
* The H key.
*
* @name Phaser.Input.Keyboard.KeyCodes.H
* @type {number}
* @since 3.0.0
*/
H: 72,
/**
* The I key.
*
* @name Phaser.Input.Keyboard.KeyCodes.I
* @type {number}
* @since 3.0.0
*/
I: 73,
/**
* The J key.
*
* @name Phaser.Input.Keyboard.KeyCodes.J
* @type {number}
* @since 3.0.0
*/
J: 74,
/**
* The K key.
*
* @name Phaser.Input.Keyboard.KeyCodes.K
* @type {number}
* @since 3.0.0
*/
K: 75,
/**
* The L key.
*
* @name Phaser.Input.Keyboard.KeyCodes.L
* @type {number}
* @since 3.0.0
*/
L: 76,
/**
* The M key.
*
* @name Phaser.Input.Keyboard.KeyCodes.M
* @type {number}
* @since 3.0.0
*/
M: 77,
/**
* The N key.
*
* @name Phaser.Input.Keyboard.KeyCodes.N
* @type {number}
* @since 3.0.0
*/
N: 78,
/**
* The O key.
*
* @name Phaser.Input.Keyboard.KeyCodes.O
* @type {number}
* @since 3.0.0
*/
O: 79,
/**
* The P key.
*
* @name Phaser.Input.Keyboard.KeyCodes.P
* @type {number}
* @since 3.0.0
*/
P: 80,
/**
* The Q key.
*
* @name Phaser.Input.Keyboard.KeyCodes.Q
* @type {number}
* @since 3.0.0
*/
Q: 81,
/**
* The R key.
*
* @name Phaser.Input.Keyboard.KeyCodes.R
* @type {number}
* @since 3.0.0
*/
R: 82,
/**
* The S key.
*
* @name Phaser.Input.Keyboard.KeyCodes.S
* @type {number}
* @since 3.0.0
*/
S: 83,
/**
* The T key.
*
* @name Phaser.Input.Keyboard.KeyCodes.T
* @type {number}
* @since 3.0.0
*/
T: 84,
/**
* The U key.
*
* @name Phaser.Input.Keyboard.KeyCodes.U
* @type {number}
* @since 3.0.0
*/
U: 85,
/**
* The V key.
*
* @name Phaser.Input.Keyboard.KeyCodes.V
* @type {number}
* @since 3.0.0
*/
V: 86,
/**
* The W key.
*
* @name Phaser.Input.Keyboard.KeyCodes.W
* @type {number}
* @since 3.0.0
*/
W: 87,
/**
* The X key.
*
* @name Phaser.Input.Keyboard.KeyCodes.X
* @type {number}
* @since 3.0.0
*/
X: 88,
/**
* The Y key.
*
* @name Phaser.Input.Keyboard.KeyCodes.Y
* @type {number}
* @since 3.0.0
*/
Y: 89,
/**
* The Z key.
*
* @name Phaser.Input.Keyboard.KeyCodes.Z
* @type {number}
* @since 3.0.0
*/
Z: 90,
/**
* The F1 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F1
* @type {number}
* @since 3.0.0
*/
F1: 112,
/**
* The F2 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F2
* @type {number}
* @since 3.0.0
*/
F2: 113,
/**
* The F3 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F3
* @type {number}
* @since 3.0.0
*/
F3: 114,
/**
* The F4 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F4
* @type {number}
* @since 3.0.0
*/
F4: 115,
/**
* The F5 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F5
* @type {number}
* @since 3.0.0
*/
F5: 116,
/**
* The F6 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F6
* @type {number}
* @since 3.0.0
*/
F6: 117,
/**
* The F7 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F7
* @type {number}
* @since 3.0.0
*/
F7: 118,
/**
* The F8 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F8
* @type {number}
* @since 3.0.0
*/
F8: 119,
/**
* The F9 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F9
* @type {number}
* @since 3.0.0
*/
F9: 120,
/**
* The F10 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F10
* @type {number}
* @since 3.0.0
*/
F10: 121,
/**
* The F11 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F11
* @type {number}
* @since 3.0.0
*/
F11: 122,
/**
* The F12 key.
*
* @name Phaser.Input.Keyboard.KeyCodes.F12
* @type {number}
* @since 3.0.0
*/
F12: 123,
/**
* The SEMICOLON key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SEMICOLON
* @type {number}
* @since 3.0.0
*/
SEMICOLON: 186,
/**
* The PLUS key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PLUS
* @type {number}
* @since 3.0.0
*/
PLUS: 187,
/**
* The COMMA key.
*
* @name Phaser.Input.Keyboard.KeyCodes.COMMA
* @type {number}
* @since 3.0.0
*/
COMMA: 188,
/**
* The MINUS key.
*
* @name Phaser.Input.Keyboard.KeyCodes.MINUS
* @type {number}
* @since 3.0.0
*/
MINUS: 189,
/**
* The PERIOD key.
*
* @name Phaser.Input.Keyboard.KeyCodes.PERIOD
* @type {number}
* @since 3.0.0
*/
PERIOD: 190,
/**
* The FORWARD_SLASH key.
*
* @name Phaser.Input.Keyboard.KeyCodes.FORWARD_SLASH
* @type {number}
* @since 3.0.0
*/
FORWARD_SLASH: 191,
/**
* The BACK_SLASH key.
*
* @name Phaser.Input.Keyboard.KeyCodes.BACK_SLASH
* @type {number}
* @since 3.0.0
*/
BACK_SLASH: 220,
/**
* The QUOTES key.
*
* @name Phaser.Input.Keyboard.KeyCodes.QUOTES
* @type {number}
* @since 3.0.0
*/
QUOTES: 222,
/**
* The BACKTICK key.
*
* @name Phaser.Input.Keyboard.KeyCodes.BACKTICK
* @type {number}
* @since 3.0.0
*/
BACKTICK: 192,
/**
* The OPEN_BRACKET key.
*
* @name Phaser.Input.Keyboard.KeyCodes.OPEN_BRACKET
* @type {number}
* @since 3.0.0
*/
OPEN_BRACKET: 219,
/**
* The CLOSED_BRACKET key.
*
* @name Phaser.Input.Keyboard.KeyCodes.CLOSED_BRACKET
* @type {number}
* @since 3.0.0
*/
CLOSED_BRACKET: 221,
/**
* The SEMICOLON_FIREFOX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.SEMICOLON_FIREFOX
* @type {number}
* @since 3.0.0
*/
SEMICOLON_FIREFOX: 59,
/**
* The COLON key.
*
* @name Phaser.Input.Keyboard.KeyCodes.COLON
* @type {number}
* @since 3.0.0
*/
COLON: 58,
/**
* The COMMA_FIREFOX_WINDOWS key.
*
* @name Phaser.Input.Keyboard.KeyCodes.COMMA_FIREFOX_WINDOWS
* @type {number}
* @since 3.0.0
*/
COMMA_FIREFOX_WINDOWS: 60,
/**
* The COMMA_FIREFOX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.COMMA_FIREFOX
* @type {number}
* @since 3.0.0
*/
COMMA_FIREFOX: 62,
/**
* The BRACKET_RIGHT_FIREFOX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.BRACKET_RIGHT_FIREFOX
* @type {number}
* @since 3.0.0
*/
BRACKET_RIGHT_FIREFOX: 174,
/**
* The BRACKET_LEFT_FIREFOX key.
*
* @name Phaser.Input.Keyboard.KeyCodes.BRACKET_LEFT_FIREFOX
* @type {number}
* @since 3.0.0
*/
BRACKET_LEFT_FIREFOX: 175
};
module.exports = KeyCodes;
|
import os
arg0 = 'www.youtube.com'
os.system ('./doittest.sh ' + arg0)
|
#!/bin/sh
#
# http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/cluster-health.html
#
curl -s 'http://localhost:9200/_cluster/health?pretty=true'
|
<filename>u-boot/arch/arm/cpu/armv7/sun8iw11p1/spl/efuse_spl.c
/*
**********************************************************************************************************************
*
* the Embedded Secure Bootloader System
*
*
* Copyright(C), 2006-2014, Allwinnertech Co., Ltd.
* All Rights Reserved
*
* File :
*
* By :
*
* Version : V2.00
*
* Date :
*
* Descript:
**********************************************************************************************************************
*/
#include "common.h"
#include "asm/io.h"
//#include "asm/arch/efuse.h"
#define SID_OP_LOCK (0xAC)
/*
************************************************************************************************************
*
* function
*
* name :
*
* parmeters :
*
* return :
*
* note :
*
*
************************************************************************************************************
*/
uint sid_read_key(uint key_index)
{
#if 0
uint reg_val;
reg_val = readl(SID_PRCTL);
reg_val &= ~((0x1ff<<16)|0x3);
reg_val |= key_index<<16;
writel(reg_val, SID_PRCTL);
reg_val &= ~((0xff<<8)|0x3);
reg_val |= (SID_OP_LOCK<<8) | 0x2;
writel(reg_val, SID_PRCTL);
while(readl(SID_PRCTL)&0x2){};
reg_val &= ~((0x1ff<<16)|(0xff<<8)|0x3);
writel(reg_val, SID_PRCTL);
reg_val = readl(SID_RDKEY);
return reg_val;
#endif
return 0;
}
/*
*
************************************************************************************************************
*
* function
*
* name :
*
* parmeters :
*
* return :
*
* note :
*
*
************************************************************************************************************
*/
void sid_read_rotpk(void *dst)
{
#if 0
uint chipid_index = 0x64;
uint id_length = 32;
uint i = 0;
for(i = 0 ; i < id_length ;i+=4 )
{
*(u32*)dst = sid_read_key(chipid_index + i );
dst += 4 ;
}
#endif
return ;
}
|
#!/bin/bash -e
get_images () {
docker run --rm -v "$PWD:/mount" docker-compose-helper \
get-images "$@"
}
build () {
local OUTPUT=$(get_images "$@")
echo "All image dependencies: ${OUTPUT}"
IFS=' ' read -ra IMAGES <<< "$OUTPUT"
for image in "${IMAGES[@]}"
do
./scripts/host/docker-build-r.sh "$image"
done
}
build "$@"
|
<reponame>lgoldstein/communitychest
package net.community.chest.net.proto.text.imap4;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import net.community.chest.CoVariantReturn;
import net.community.chest.ParsableString;
import net.community.chest.net.TextNetConnection;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Mar 27, 2008 11:15:36 AM
*/
public class IMAP4MbRefUntaggedRspHandler extends AbstractIMAP4UntaggedResponseHandlerHelper {
private final IMAP4FoldersListInfo _listInfo;
/**
* Original command used to ask for the references (LIST/LSUB) - we need it filter other un-tagged responses
*/
private final char[] _refCmd;
/**
* Collection (lazy-allocated) used to accumulated the actual folders (since we do not know in advance how many
* responses we will get). Once the tagged response has been found, we need to update the actual response object.
* @see #updateFolders(IMAP4FoldersListInfo rsp)
*/
private Collection<IMAP4FolderInfo> _foldersInfo /* =null */;
// constructor
public IMAP4MbRefUntaggedRspHandler (final char[] refCmd, final TextNetConnection conn)
{
super(conn);
if ((null == (_refCmd=refCmd)) || (refCmd.length <= 0))
throw new IllegalArgumentException("No MBRef command specified");
_listInfo = new IMAP4FoldersListInfo();
}
/*
* @see net.community.chest.net.proto.text.imap4.AbstractIMAP4UntaggedResponseHandler#getResponse()
*/
@Override
@CoVariantReturn
protected IMAP4FoldersListInfo getResponse ()
{
return _listInfo;
}
/**
* Updates the array of folders in the response - up till now they have been accumulated in a
* collection since we do not know in advance how many responses we will get
* @param rsp response to be updated
* @return number of folders updated (<0 if error)
* @see #_foldersInfo
*/
protected int updateFolders (final IMAP4FoldersListInfo rsp)
{
final int numFolders=(null == _foldersInfo) ? 0 : _foldersInfo.size();
if (numFolders <= 0) // OK if no folders returned
return 0;
rsp.setFolders(_foldersInfo);
return numFolders;
}
/**
* Extracts the hierarchy separator response
* @param startPos index in parse buffer where to start looking for separator (inclusive)
* @param hierSep (IN/OUT) hierarchy separator (ONLY at index=0)
* @return next index in parse buffer (<0 if error)
* @throws IOException if I/O errors encountered
*/
private int extractHierarchySeparator (final int startPos, final char[] hierSep) throws IOException
{
for (int index=0; index < hierSep.length; index++)
hierSep[index] = '\0'; // assume unknown separator
final IMAP4ParseAtomValue aVal=extractStringHdrVal(startPos, false);
// copy all separators (if any)
for (int index=0, maxIndex=Math.min(aVal.length(), hierSep.length); index < maxIndex; index++)
hierSep[index] = aVal.charAt(index);
// check if escaped character
if (('\\' == hierSep[0]) && (hierSep.length > 1) && (hierSep[1] != '\0'))
{
hierSep[0] = hierSep[1];
hierSep[1] = '\0';
}
return aVal.startPos;
}
/*
* @see net.community.chest.net.proto.text.imap4.AbstractIMAP4UntaggedResponseHandler#handleUntaggedResponse(net.community.chest.ParsableString, int)
*/
@Override
public int handleUntaggedResponse (final ParsableString ps, final int startPos) throws IOException
{
int maxIndex=_psHelper.getMaxIndex(),
curPos=_psHelper.findNonEmptyDataStart(startPos),
nextPos=_psHelper.findNonEmptyDataEnd(curPos+1);
if ((curPos < startPos) || (curPos >= maxIndex) || (nextPos <= curPos) || (nextPos >= maxIndex))
return 0; // ignore failure to find un-tagged data start or nothing follows it
if (!_psHelper.compareTo(curPos, nextPos, _refCmd, true))
return 0; // ignore if this is not the command we seek
// each member is a String
final Collection<String> flagsInfo=new LinkedList<String>();
if ((curPos=extractFlagsList(nextPos, flagsInfo)) < 0)
return curPos;
final int numFlags=flagsInfo.size();
final Collection<IMAP4FolderFlag> objFlags=
(numFlags <= 0) ? null : new ArrayList<IMAP4FolderFlag>(numFlags);
// extract flags objects from their corresponding strings
if (numFlags > 0)
{
for (final String flgVal : flagsInfo)
{
if ((null == flgVal) || (flgVal.length() <= 0))
continue; // should not happen
objFlags.add(new IMAP4FolderFlag(flgVal));
}
}
final char[] hierSep=new char[2]; // we allocate 2 for escaping purpose
if ((curPos=extractHierarchySeparator(curPos, hierSep)) < 0)
return curPos;
final IMAP4ParseAtomValue aVal=extractStringHdrVal(curPos, true);
final String folderName=(aVal.length() <= 0) ? null : aVal.toString();
if ((null == folderName) || (folderName.length() <= 0))
return (-502);
if (null == _foldersInfo)
_foldersInfo = new LinkedList<IMAP4FolderInfo>();
_foldersInfo.add(new IMAP4FolderInfo(folderName, hierSep[0], objFlags));
return 0;
}
}
|
#!/bin/bash
set -e
create_log_dir() {
mkdir -p ${SQUID_LOG_DIR}
chmod -R 755 ${SQUID_LOG_DIR}
chown -R ${SQUID_USER}:${SQUID_USER} ${SQUID_LOG_DIR}
}
create_cache_dir() {
mkdir -p ${SQUID_CACHE_DIR}
chown -R ${SQUID_USER}:${SQUID_USER} ${SQUID_CACHE_DIR}
}
create_log_dir
create_cache_dir
# allow arguments to be passed to squid
if [[ ${1:0:1} = '-' ]]; then
EXTRA_ARGS="$@"
set --
elif [[ ${1} == squid || ${1} == $(which squid) ]]; then
EXTRA_ARGS="${@:2}"
set --
fi
# Launch lighttpd to serve squid log files
$(which lighttpd) -f /etc/lighttpd/lighttpd.conf
# default behaviour is to launch squid
if [[ -z ${1} ]]; then
if [[ ! -d ${SQUID_CACHE_DIR}/00 ]]; then
echo "Initializing cache..."
$(which squid) -N -f /etc/squid/squid.conf -z
fi
echo "Starting squid..."
exec $(which squid) -f /etc/squid/squid.conf -NYCd 1 ${EXTRA_ARGS}
else
exec "$@"
fi |
<filename>SoundCloud/src/Exceptions/PasswordIncorretaException.java
package Exceptions;
public class PasswordIncorretaException extends Exception {
public PasswordIncorretaException(String s){
super(s);
}
}
|
"""
Generate an algorithm for solving the Rubik's cube
"""
def solve(cube):
# Step 1: Orient Edges
while not all_edges_are_oriented(cube):
for edge in cube.edges:
if edge.stripe() == None:
twist = resolve_edge(edge)
cube.twist(twist)
# Step 2: Get Corner Permutation
while not all_corners_correct_permutation(cube):
twist = resolve_corner_permutation(cube)
cube.twist(twist)
# Step 3: Position/Orient Corners
while not all_corners_positioned_and_oriented(cube):
twist = resolve_corner_position_and_orientation(cube)
cube.twist(twist)
# other helper functions have been omitted for brevity
# (e.g. all_edges_are_oriented, resolve_edge, resolve_corner_permutation,
# resolve_corner_position_and_orientation, etc.) |
mkdir /storage/datasets
mkdir /storage/datasets/kitti2015
wget -c https://s3.eu-central-1.amazonaws.com/avg-kitti/data_scene_flow.zip -O /storage/datasets/kitti2015.zip
unzip /storage/datasets/kitti2015.zip -d /storage/datasets/kitti2015 |
<filename>fs/default.js
// We want to load this code once.
//////////////////////////////////////////////////////////////////////////////
// Some global vars
var run_status = 0;
//////////////////////////////////////////////////////////////////////////////
var XMLHttpRequests = new Array();
function XMLHttpRequestFactory() {
if (XMLHttpRequests.length > 0) {
return XMLHttpRequests.pop();
} else {
var request = false;
try {
request = new XMLHttpRequest();
} catch (failed) {
request = false;
}
if (!request) {
alert("Error initializing XMLHttpRequest!");
}
return request;
}
}
// This is a uniq number for all requests, to avoid cache hit problems.
uq = 0;
function send(msg,args) {
// progressbar(true);
var i = 0;
var request = XMLHttpRequestFactory();
var argMsg = msg + "?uq=" + uq++;
if (args != "") {
argMsg += "&" + args;
}
request.open("GET", argMsg,true);
// Abort the request if there is no response in a timely manner. (1 minute).
var timeout = setTimeout(function () {
alert("request did not get response, aborting");
request.abort();
XMLHttpRequests.push(request);
},60000);
request.onreadystatechange = function () {
if (request.readyState == 4) {
// Please do not cancel this request; we will return it to the factory.
clearTimeout(timeout);
// progressbar(false);
try {
if (request.status == 200) {
// alert(request.responseText);
// Frightfully simple; evaluate the text in the content of top.status
try {
eval(request.responseText);
} catch (problem) {
alert('callback failure : ' + problem + "\n" + request.responseText);
}
} else {
// Should *never* happen.
alert('status != 200 ' + request.status);
}
} catch (problem) {
alert('status wrong or callback aborted:' + problem);
request.abort();
}
// Return the completed xml
XMLHttpRequests.push(request);
}
};
request.send(null);
}
//////////////////////////////////////////////////////////////////////////////
function setField(name,val) {
var w = top.status.document.getElementById(name);
if (w.innerHTML != val) {
w.innerHTML = val;
}
}
//////////////////////////////////////////////////////////////////////////////
// Show functions
function showLocation(loc) {
if (loc == null) {
return "-";
} else {
if (loc.startLine == loc.endLine) {
return loc.startLine + ":" + loc.startCol + "-" + loc.endCol;
}
return loc.startLine + ":" + loc.startCol + "-" +
loc.endLine + ":" + loc.endCol;
}
}
//////////////////////////////////////////////////////////////////////////////
// Local State
var state = new Object();
state.module = ''; // which module is the cursor at?
state.location = ''; // HTML of location
state.event = ''; // simple string of event
state.eventtext = ''; // HTML of event description
state.counter = '';
state.threadid = '';
state.tixboxNo = '';
state.tixboxType = '';
state.lineno = '';
state.viewedModule = ''; // which module is the user looking at?
state.modNames = new Object();
state.fontsize = '10';
state.linespacing = '13';
function stateAlert () {
alert("state.module = " + state.module + "\n" +
"state.viewedModule = " + state.viewedModule + "\n" +
"");
}
// These functions both modify the state, and also
// modify any viewer on the data, like the status box.
function setModule(mod) {
setField("module",mod);
// Also set the pulldown box to the correct module.
if (state.module != mod) {
var w = state.modNames["mod_" + state.module];
if (w != undefined) {
w.setAttribute("style","background: white");
}
var w = state.modNames["mod_" + mod];
if (w != undefined) {
w.setAttribute("style","background: yellow");
}
}
state.module = mod;
}
function setLocation(loc) {
state.location = loc;
setField("location",showLocation(loc));
}
function setEvent2(e,et) {
state.event = e;
state.eventtext = e;
setField("event",et);
}
function setCounter(c) {
state.counter = c;
setField("counter",c);
}
function setThreadID(tid) {
state.counter = tid;
setField("thread_no",tid);
}
function setTickInfo(tick,ty,lineno) {
state.tixboxNo = tick;
state.tixboxType = ty;
state.lineno = lineno;
}
function clearTickInfo() {
state.tixboxNo = '';
state.tixboxType = '';
state.lineno = '';
unmark();
}
function setViewedModule(modName) {
state.viewedModule = modName;
top.status.document.getElementById('allmodules').value = modName;
}
var breakpoints = new Array(0);
var breakpoints2 = new Array(0);
function setBreakpoint(o) {
var name = breakPointToName(o);
var fullName = breakPointToText(o);
breakpoints.push(name);
var name = breakPointToName(o);
// Do not add if we already have it.
for(var i = 0;i < breakpoints2.length;i++) {
// TODO: We could cache the JSON string inside the breakpoints
if (breakPointToName(breakpoints2[i]) == name) {
return;
}
}
breakpoints2.push(o); // Adding this to the bottom of the breakpoint list
// showLog("set " + breakpoints.length);
var w = top.status.document.getElementById(name);
if (w != undefined) {
// If this value is ticked, then do not display it in the
// Create Breakpoints list.
w.style.display = 'none';
}
// Now we look for this exception in the Active Breakpoints list.
var w = top.status.document.getElementById(name + '_active');
if (w != undefined) {
var rows = w.parentNode;
rows.removeChild(w);
}
var tab = top.status.document.getElementById('Breakpoints');
var newRow = tab.insertRow(-1); // insert at bottom
if (breakpoints2.length % 2 == 0) {
newRow.setAttribute("style","background: #f0f0f0");
} else {
newRow.setAttribute("style","background: #f8f8f8");
}
newRow.setAttribute("id",name + '_active');
var newCell = newRow.insertCell(0);
var newText = top.status.document.createTextNode(fullName);
newCell.setAttribute("align","right");
newCell.appendChild(newText);
var newCell = newRow.insertCell(1);
var newElem = top.status.document.createElement("input");
newElem.setAttribute("type","checkbox");
newElem.checked = true;
newElem.onchange = function () {
breakpoint(o,newElem.checked);
};
newCell.appendChild(newElem);
}
function clearBreakpoint(o) {
var name = breakPointToName(o);
for(var i = 0;i < breakpoints2.length;i++) {
if (breakPointToName(breakpoints2[i]) == name) {
breakpoints2.splice(i,1);
}
}
//showLog("clear " + breakpoints + " " + breakpoints.length);
var w = top.status.document.getElementById(name);
if (w != undefined) {
w.style.display = '';
}
var tb = top.status.document.getElementById(name + '_checkbox');
if (tb != undefined) {
tb.checked = false;
}
// Now we look for this exception in the Active Breakpoints list.
var w = top.status.document.getElementById(name + '_active');
if (w != undefined) {
var rows = w.parentNode;
rows.removeChild(w);
}
}
function breakPointToText(o) {
switch (o) {
case "AllExceptions":
return 'All Exceptions';
break;
case "ThreadChange":
return 'All Thread Changes';
break;
case "ThreadTermination":
return 'All Thread Terminations';
break;
default:
switch(o.tag) {
case "CounterAt":
return 'Counter # ' + o.count;
break;
case "ThreadChangeTo":
return 'Thread Change To TID# ' + o.tid;
break;
case "TickBox":
var loc = o.tickInfo.location;
return o.tickInfo.module + " " +
loc.startLine + ":" + loc.startCol + "-" +
loc.endLine + ":" + loc.endCol;
break;
default:
alert("switch problem: breakPointToText" + o.toJSONString());
}
}
}
function breakPointToName(o) {
switch (o) {
case "AllExceptions":
case "ThreadChange":
case "ThreadTermination":
return(o);
break;
default:
switch(o.tag) {
case "CounterAt":
return 'CounterAt_' + o.count;
break;
case "ThreadChangeTo":
return 'ThreadChangeTo_' + o.tid;
break;
case "TickBox":
return 'TickBox_' + o.tickInfo.global;
break;
default:
alert("switch problem: breakPointToName" + o.toJSONString());
}
}
}
//////////////////////////////////////////////////////////////////////////////
// All the modules in this section have an entry in AjaxAPI.hs
function setEvent(event) {
switch(event) {
case "Raise":
setEvent2("Exception","<div style='color: red'>Exception!</div>");
break;
case "Finished":
setEvent2("Finished","<div style='color: green'>Program Finished</div>");
break;
case "ThreadFinished":
setEvent2("Finished","<div style='color: green'>Thread Finished</div>");
break;
default:
switch(event.tag) {
case "Tick":
var tick = event.tick;
setEvent2(tick,"#" + tick);
break;
default:
alert("switch problem " + event.toJSONString());
}
}
}
function setTicked(tickInfo) {
//alert(tickInfo.toJSONString());
if (tickInfo == null) {
setModule("-");
setLocation(null);
clearTickInfo();
unmark();
} else {
setLocation(tickInfo.location);
setModule(tickInfo.module);
setTickInfo(tickInfo.local,tickInfo.tickType,tickInfo.location.startLine);
addMarkings();
}
}
function setModules(modNames) {
for(var i = 0; i < modNames.length;i++) {
var modName = modNames[i];
var w = top.status.document.getElementById("allmodules");
var o = document.createElement("option");
o.setAttribute("value",modName);
w.appendChild(o);
o.appendChild(document.createTextNode(modName));
state.modNames["mod_" + modName] = o;
}
}
function clearBreakPoints () {
// Simply remove all the breakpoints
while(breakpoints2.length > 0) {
clearBreakpoint(breakpoints2[0]);
}
}
// TODO: rename this back
function setBreakPoint2(o) {
setBreakpoint(o);
}
function setBreakPointLights(lights) {
for(var i = 0;i < breakpoints2.length;i++) {
var name = breakPointToName(breakpoints2[i]);
var w = top.status.document.getElementById(name + '_active');
if (w != undefined) {
if (lights[i]) {
w.setAttribute("style","background: orange");
} else {
w.setAttribute("style","background: ");
}
}
}
}
function setRunning(run) {
// Hmm, async issues,
switch(run) {
case "Stopped":
progressbar(false);
top.heading.document.getElementById("runbackbutton").disabled = false;
top.heading.document.getElementById("runbutton").disabled = false;
top.heading.document.getElementById("stopbutton").disabled = true;
break;
case "Forward":
case "Backward":
progressbar(true);
top.heading.document.getElementById("runbackbutton").disabled = true;
top.heading.document.getElementById("runbutton").disabled = true;
top.heading.document.getElementById("stopbutton").disabled = false;
please_continue();
}
}
//////////////////////////////////////////////////////////////////////////////
function running(count) {
setCounter(count);
// Check to see if the breakpoint has been found
// The result is either
// - (re)calling running
// - calling setState
// This is where any animation might live.
pleasecontinue();
}
//////////////////////////////////////////////////////////////////////////////
function showMessage(msg) {
top.footing.document.getElementById("global_message").innerHTML = msg;
}
function showLog(msg) {
var newText = top.status.document.createTextNode(msg);
top.status.document.getElementById("scratch").appendChild(newText);
var br = top.status.document.createElement("BR");
top.status.document.getElementById("scratch").appendChild(br);
}
// Combine these below
function drawStart(count) {
setEvent("Booting","booting...");
clearTickInfo();
unmark();
}
function drawRaise(count,tid) {
// Mark up the global tick count
setEvent("Exception","<div style='color: red'>Exception!</div>");
clearTickInfo();
unmark();
}
function drawThread(count,tid) {
// Mark up the global tick count
setEvent("ThreadChange","<div style='color: green'>Change to Thread # " + tid + "</div>");
clearTickInfo();
unmark();
}
function drawThreadFinished(count,tid) {
// Mark up the global tick count
setEvent("ThreadFinished","<div style='color: green'>Thread# " + tid + " finished</div>");
clearTickInfo();
unmark();
}
// This makes sure that the currently marked module is
// highlighted in yellow
function markModule(modName) {
// o.setAttribute("style","background: green");
}
function menu(m) {
if (m.value == 'Next Exception') {
send("/next_exception");
}
m.value = '...';
}
theLineSpacing = 16;
// For some reason, when you ask for font size 12, you get 14, etc, etc.
// The argument is the font size you asked for, 'theLineSpacing' contains
// the real size of the font. Perhaps this is just spacing between lines?
function fontsize(fs) {
alert("font: " + fs);
if (fs == 12) {
theLineSpacing = 16;
} else if (fs == 10) {
theLineSpacing = 13;
} else {
alert("strange font size");
}
}
var mark_elem = null;
function unmark() {
if (mark_elem != null) {
mark_elem.style.border = '';
mark_elem.style.background = '';
}
}
var ps = null;
var oldScroll = 0;
var when = null;
function mark(n,ty,lineno) {
unmark();
mark_elem = top.code.document.getElementById("t_" + n);
if (mark_elem != null) {
switch (ty) {
case "ExpBox":
case "AltBox":
// mark_elem.style.border = '1px solid orange';
mark_elem.style.background = '#f0f000';
break;
default:
switch(ty.tag) {
case "TopLevelBox":
case "LocalBox":
mark_elem.style.background = '#f0f000';
break;
case "GuardBinBox":
case "CondBinBox":
case "QualBinBox":
if (ty.value) {
mark_elem.style.borderBottom = '3px double green';
} else {
mark_elem.style.borderBottom = '3px double red';
}
break;
default:
alert("switch problem " + ty.toJSONString());
}
}
// var viewerHeight = top.code.window.innerHeight;
// window.status = top.code.window.innerHeight;
var newScroll = (lineno - 10) * state.linespacing;
// Dont scroll if your newScoll is on your viewer.
top.code.scroll(0,newScroll);
}
}
// Add any markup onto the current page.
// If we do not have the correct page loaded, then initiate the
// load of the correct page, which will eventually re-call addMarkings.
// TODO: rename this function better.
function addMarkings() {
if (state.module != state.viewedModule) {
viewModule(state.module);
return;
}
mark(state.tixboxNo,state.tixboxType,state.lineno);
}
//////////////////////////////////////////////////////////////////////////////
// Called after every loading the code page.
//
// This is a chance to set the font, move the scroll bar,
// markup the text, restore callbacks, etc, etc.
function codeloaded (modName) {
setViewedModule(modName);
top.code.document.body.style.fontSize = "" + state.fontsize + "pt";
top.code.onmousedown = mousedownCode;
top.code.onmousemove = mousemoveCode;
top.code.onmouseup = mouseupCode;
if (state.module == state.viewedModule) {
// This module needs markup, because it contains the cursor
// We do not just call addMarkings, because it loaded
// the cursor page as a side-effect.
addMarkings();
}
}
//////////////////////////////////////////////////////////////////////////////
// Callbacks from Javascript buttons, etc, to perform actions.
function viewModule(modName) {
// TODO: perhaps some sort of visual cue of action pending.
// top.code.document.body.innerHTML = "loading : " + modName;
top.code.location.href=modulecode(modName)
}
//////////////////////////////////////////////////////////////////////////////
function numberEnterFrom(myfield,e,name,field) {
var keycode;
if (window.event) {
keycode = window.event.keyCode;
} else {
if (e) {
keycode = e.which;
} else {
return true;
}
}
if (keycode == 13) {
// TODO: check for number value only
var obj = { "tag" : name };
obj[field] = myfield.value;
breakpoint(obj,true);
myfield.value = "";
return false;
}
return true;
}
//////////////////////////////////////////////////////////////////////////////
function getTextFromCodeNode(w) {
if (w.nodeType == Node.TEXT_NODE) {
if (w.nodeValue == undefined) {
return "{{*}}";
} else {
return w.nodeValue;
}
} else {
var children = w.childNodes;
var text = "";
for(var i = 0;i < children.length;i++) {
text += getTextFromCodeNode(children[i]);
}
return text;
}
}
function mousedownCode(mouseEvent) {
var x = mouseEvent.clientX;
var y = mouseEvent.clientY;
var x2 = mouseEvent.pageX;
var y2 = mouseEvent.pageY;
var target = mouseEvent.target;
var id = mouseEvent.target.id;
var t = ("" + x + " " +
y + " " +
x2 + " " +
y2 + " " +
target + " " +
target.id + " " +
"");
// alert("clicked: " + t);
// target.style.background = 'pink';
var txtls = "";
var re = /t_(\d+)/;
var w = target;
var menuItems = new Array(0);
while (w != null && w.parentNode != undefined) {
if (w.id != undefined) {
var o = re.exec(w.id);
if (o != null) {
var text = getTextFromCodeNode(w);
if (text.length > 30) {
text = text.substr(0,28) + " ...";
}
menuItems.push({ id : o[1], text : text });
}
}
w = w.parentNode;
}
t += " " + txtls;
if (menuItems.length > 0) {
var newElem = top.status.document.createElement("div");
newElem.setAttribute("id","codemenu");
newElem.style.position='absolute';
newElem.style.top = "" + (y2 - 5) + 'px';
newElem.style.left = "" + (x2 - 5) + 'px';
newElem.style.width = "200px";
newElem.style.opacity = '0.95';
newElem.style.backgroundColor = 'white';
var table = document.createElement("table");
table.setAttribute("border", "1");
// table.setAttribute("background", "white");
var tbody = document.createElement("tbody");
var tr = document.createElement("tr");
var th = document.createElement("th");
th.appendChild(document.createTextNode("Set Breakpoint"));
tr.appendChild(th);
tbody.appendChild(tr);
for(var i = 0;i < menuItems.length;i++) {
var tr = document.createElement("tr");
var td = document.createElement("td");
td.appendChild(document.createTextNode(menuItems[i].text));
td.id = 'menu_' + i;
td.onmouseover = function(event) {
event.target.style.background = 'orange';
}
td.onmouseout = function(event) {
event.target.style.background = 'white';
}
td.onmouseup = function(event) {
var ix = /menu_(\d+)/.exec(event.target.id)[1];
// alert(menuItems[ix].toJSONString());
breakpoint({ tag : "ReqTickBox",
module : state.viewedModule,
id : parseInt(menuItems[ix].id)
},true);
return true; // so the widget above can remove the menu
}
tr.appendChild(td);
tbody.appendChild(tr);
}
table.appendChild(tbody);
newElem.appendChild(table);
top.code.document.body.appendChild(newElem);
}
return false;
}
var bp_highlighted = null;
function mousemoveCode(mouseEvent) {
/*
var x = mouseEvent.clientX;
var y = mouseEvent.clientY;
var x2 = mouseEvent.pageX;
var y2 = mouseEvent.pageY;
var target = mouseEvent.target;
if (mouseEvent.ctrlKey) { // For now, untill we debug it
var id = mouseEvent.target.id;
// Perhaps dig for the
if (id != undefined) {
} else {
if (bp_highlighted = null;
}
mousedownCode(mouseEvent);
}
*/
return true;
}
function mouseupCode(mouseEvent) {
var o = top.code.document.getElementById('codemenu');
if (o != undefined) {
var p = o.parentNode;
p.removeChild(o);
}
return true; // someone else might want to see the mouseup button.
// value = modName;
}
//////////////////////////////////////////////////////////////////////////////
var progressTid = null;
// Show progress bar
function progressbar(show) {
if (show == true) {
if (progressTid != null) {
return;
}
/* You could put this into a timeout */
var d = top.status.document;
var w = d.getElementById("progress");
w.style.visibility = "";
progressTid = {};
// progressTid = setTimeout(function () {},100);
} else {
if (progressTid == null) {
return;
}
// clearInterval(progressTid);
progressTid = null;
var d = top.status.document
var w = d.getElementById("progress");
w.style.visibility = "hidden";
}
}
/*
if (w == null) {
var newElem = d.createElement("div");
newElem.setAttribute("id","progress");
newElem.style.position='absolute';
newElem.style.top = "0px";
newElem.style.left = "100px"
newElem.style.opacity = '0.95';
newElem.style.backgroundColor = 'white';
var newImage = d.createElement("img");
newImage.src = "progress.gif";
newElem.appendChild(newImage);
d.body.appendChild(newElem);
w = newElem;
}
if (show) {
w.style.visibility = "";
} else {
w.style.visibility = "hidden";
}
}
*/
//////////////////////////////////////////////////////////////////////////////
setTimeout(function () {
// alert(escape(({ "Hello World": "This is a %32 Test", "World": 2.0 }).toString()));
// alert(true.toJSONString());
init_please();
send("/boot","") },500);
function init_please() {
// top.code.onclick = function () { alert("loaded"); }
// onclick = function () { alert("loaded2"); }
// top.onclick = function () { alert("loaded3"); }
}
//////////////////////////////////////////////////////////////////////////////
/* AJG: from http://www.json.org/json.js
*/
/*
json.js
2006-12-06
This file adds these methods to JavaScript:
array.toJSONString()
boolean.toJSONString()
date.toJSONString()
number.toJSONString()
object.toJSONString()
string.toJSONString()
These methods produce a JSON text from a JavaScript value.
It must not contain any cyclical references. Illegal values
will be excluded.
The default conversion for dates is to an ISO string. You can
add a toJSONString method to any date object to get a different
representation.
string.parseJSON(hook)
This method parses a JSON text to produce an object or
array. It can throw a SyntaxError exception.
The optional hook parameter is a function which can filter and
transform the results. It receives each of the values, and its
return value is used instead. If it returns what it received, then
structure is not modified.
Example:
// Parse the text. If it contains any "NaN" strings, replace them
// with the NaN value. All other values are left alone.
myData = text.parseJSON(function (value) {
if (value === 'NaN') {
return NaN;
}
return value;
});
It is expected that these methods will formally become part of the
JavaScript Programming Language in the Fourth Edition of the
ECMAScript standard in 2007.
*/
if (!Object.prototype.toJSONString) {
Array.prototype.toJSONString = function () {
var a = ['['], b, i, l = this.length, v;
function p(s) {
if (b) {
a.push(',');
}
a.push(s);
b = true;
}
for (i = 0; i < l; i += 1) {
v = this[i];
switch (typeof v) {
case 'undefined':
case 'function':
case 'unknown':
break;
case 'object':
if (v) {
if (typeof v.toJSONString === 'function') {
p(v.toJSONString());
}
} else {
p("null");
}
break;
default:
p(v.toJSONString());
}
}
a.push(']');
return a.join('');
};
Boolean.prototype.toJSONString = function () {
return String(this);
};
Date.prototype.toJSONString = function () {
function f(n) {
return n < 10 ? '0' + n : n;
}
return '"' + this.getFullYear() + '-' +
f(this.getMonth() + 1) + '-' +
f(this.getDate()) + 'T' +
f(this.getHours()) + ':' +
f(this.getMinutes()) + ':' +
f(this.getSeconds()) + '"';
};
Number.prototype.toJSONString = function () {
return isFinite(this) ? String(this) : "null";
};
Object.prototype.toJSONString = function () {
var a = ['{'], b, i, v;
function p(s) {
if (b) {
a.push(',');
}
a.push(i.toJSONString(), ':', s);
b = true;
}
for (i in this) {
if (this.hasOwnProperty(i)) {
v = this[i];
switch (typeof v) {
case 'undefined':
case 'function':
case 'unknown':
break;
case 'object':
if (v) {
if (typeof v.toJSONString === 'function') {
p(v.toJSONString());
}
} else {
p("null");
}
break;
default:
p(v.toJSONString());
}
}
}
a.push('}');
return a.join('');
};
(function (s) {
var m = {
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'"' : '\\"',
'\\': '\\\\'
};
s.parseJSON = function (hook) {
try {
if (/^("(\\.|[^"\\\n\r])*?"|[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t])+?$/.
test(this)) {
var j = eval('(' + this + ')');
if (typeof hook === 'function') {
function walk(v) {
if (v && typeof v === 'object') {
for (var i in v) {
if (v.hasOwnProperty(i)) {
v[i] = walk(v[i]);
}
}
}
return hook(v);
}
return walk(j);
}
return j;
}
} catch (e) {
}
throw new SyntaxError("parseJSON");
};
s.toJSONString = function () {
if (/["\\\x00-\x1f]/.test(this)) {
return '"' + this.replace(/([\x00-\x1f\\"])/g, function(a, b) {
var c = m[b];
if (c) {
return c;
}
c = b.charCodeAt();
return '\\u00' +
Math.floor(c / 16).toString(16) +
(c % 16).toString(16);
}) + '"';
}
return '"' + this + '"';
};
})(String.prototype);
}
|
#!/bin/sh
#Krzysztof.M.Sywula@intel.com
SKETCH=/sketch
SKETCH_SIZE=256 #KB
ERASE_SIZE=32 #KB
IMAGE=/opt/cln/galileo/jffs2.bin
DEVICE=/dev/mtdblock0
start()
{
dd if=$IMAGE of=$DEVICE
mount -t jffs2 $DEVICE $SKETCH
}
stop()
{
umount $SKETCH
dd if=$DEVICE of=$IMAGE
}
setup()
{
ulimit -c unlimited
modprobe mtdram total_size=$SKETCH_SIZE erase_size=$ERASE_SIZE
modprobe mtdchar
modprobe mtdblock
mkdir -p $SKETCH
# IF IMAGE DOES NOT EXIST - CREATE ONE
test -f $IMAGE || mkfs.jffs2 --pad=$(($SKETCH_SIZE * 1024)) -r /tmp --eraseblock=$ERASE_SIZE -o $IMAGE
}
usage()
{
echo "Use: $1 [start/stop]"
exit 1
}
main()
{
echo $1
if [ "x$1" = "xstart" ]; then
# setup
# start
mkdir -p /sketch
elif [ "x$1" = "xstop" ]; then
# stop
else
usage $0
fi
}
main "$@"
|
package com.cannolicatfish.rankine.blocks;
import com.cannolicatfish.rankine.init.RankineBlocks;
import net.minecraft.block.Block;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.server.ServerWorld;
import java.util.Random;
public class FumaroleDepositBlock extends Block {
public FumaroleDepositBlock(Properties properties) {
super(properties);
}
@Override
public boolean ticksRandomly(BlockState state) {
return true;
}
@Override
public void randomTick(BlockState state, ServerWorld worldIn, BlockPos pos, Random random) {
if (random.nextFloat()<0.1 && worldIn.getBlockState(pos.up()).matchesBlock(Blocks.AIR)) {
worldIn.setBlockState(pos.up(), RankineBlocks.GWIHABAITE_CRYSTAL.get().getDefaultState(),2);
}
super.randomTick(state, worldIn, pos, random);
}
}
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.creditCard = void 0;
var creditCard = {
"viewBox": "0 0 1920 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1760 128q66 0 113 47t47 113v1216q0 66-47 113t-113 47h-1600q-66 0-113-47t-47-113v-1216q0-66 47-113t113-47h1600zM160 256q-13 0-22.5 9.5t-9.5 22.5v224h1664v-224q0-13-9.5-22.5t-22.5-9.5h-1600zM1760 1536q13 0 22.5-9.5t9.5-22.5v-608h-1664v608q0 13 9.5 22.5t22.5 9.5h1600zM256 1408v-128h256v128h-256zM640 1408v-128h384v128h-384z"
}
}]
};
exports.creditCard = creditCard; |
<filename>synthetic/img_scale.py
#
# Written by <NAME>
# Department of Astrophysical Sciences, Princeton University
#
# You can freely use the code.
#
import numpy
import math
def sky_median_sig_clip(input_arr, sig_fract, percent_fract, max_iter=100):
"""Estimating sky value for a given number of iterations
@type input_arr: numpy array
@param input_arr: image data array
@type sig_fract: float
@param sig_fract: fraction of sigma clipping
@type percent_fract: float
@param percent_fract: convergence fraction
@type max_iter: max. of iterations
@rtype: tuple
@return: (sky value, number of iteration)
"""
work_arr = numpy.ravel(input_arr)
old_sky = numpy.median(work_arr)
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.median(work_arr)
iteration = 0
while ((math.fabs(old_sky - new_sky)/new_sky) > percent_fract) and (iteration < max_iter) :
iteration += 1
old_sky = new_sky
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.median(work_arr)
return (new_sky, iteration)
def sky_mean_sig_clip(input_arr, sig_fract, percent_fract, max_iter=100):
"""Estimating sky value for a given number of iterations
@type input_arr: numpy array
@param input_arr: image data array
@type sig_fract: float
@param sig_fract: fraction of sigma clipping
@type percent_fract: float
@param percent_fract: convergence fraction
@type max_iter: max. of iterations
@rtype: tuple
@return: (sky value, number of iteration)
"""
work_arr = numpy.ravel(input_arr)
old_sky = numpy.mean(work_arr)
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.mean(work_arr)
iteration = 0
while ((math.fabs(old_sky - new_sky)/new_sky) > percent_fract) and (iteration < max_iter) :
iteration += 1
old_sky = new_sky
sig = work_arr.std()
upper_limit = old_sky + sig_fract * sig
lower_limit = old_sky - sig_fract * sig
indices = numpy.where((work_arr < upper_limit) & (work_arr > lower_limit))
work_arr = work_arr[indices]
new_sky = numpy.mean(work_arr)
return (new_sky, iteration)
def linear(inputArray, scale_min=None, scale_max=None):
"""Performs linear scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : linear"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
imageData = imageData.clip(min=scale_min, max=scale_max)
imageData = (imageData -scale_min) / (scale_max - scale_min)
indices = numpy.where(imageData < 0)
imageData[indices] = 0.0
indices = numpy.where(imageData > 1)
imageData[indices] = 1.0
return imageData
def sqrt(inputArray, scale_min=None, scale_max=None):
"""Performs sqrt scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : sqrt"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
imageData = imageData.clip(min=scale_min, max=scale_max)
imageData = imageData - scale_min
indices = numpy.where(imageData < 0)
imageData[indices] = 0.0
imageData = numpy.sqrt(imageData)
imageData = imageData / math.sqrt(scale_max - scale_min)
return imageData
def log(inputArray, scale_min=None, scale_max=None):
"""Performs log10 scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@rtype: numpy array
@return: image data array
"""
# print "img_scale : log"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
factor = math.log10(scale_max - scale_min)
indices0 = numpy.where(imageData < scale_min)
indices1 = numpy.where((imageData >= scale_min) & (imageData <= scale_max))
indices2 = numpy.where(imageData > scale_max)
imageData[indices0] = 0.0
imageData[indices2] = 1.0
imageData[indices1] = numpy.log10(imageData[indices1])/factor
return imageData
def asinh(inputArray, scale_min=None, scale_max=None, non_linear=2.0):
"""Performs asinh scaling of the input numpy array.
@type inputArray: numpy array
@param inputArray: image data array
@type scale_min: float
@param scale_min: minimum data value
@type scale_max: float
@param scale_max: maximum data value
@type non_linear: float
@param non_linear: non-linearity factor
@rtype: numpy array
@return: image data array
"""
# print "img_scale : asinh"
imageData=numpy.array(inputArray, copy=True)
if scale_min == None:
scale_min = imageData.min()
if scale_max == None:
scale_max = imageData.max()
factor = numpy.arcsinh((scale_max - scale_min)/non_linear)
indices0 = numpy.where(imageData < scale_min)
indices1 = numpy.where((imageData >= scale_min) & (imageData <= scale_max))
indices2 = numpy.where(imageData > scale_max)
imageData[indices0] = 0.0
imageData[indices2] = 1.0
imageData[indices1] = numpy.arcsinh((imageData[indices1] - scale_min)/non_linear)/factor
return imageData
|
#!/bin/bash
test_skip_if_k8s_disabled() {
if [[ ! -z "$WERF_TEST_K8S_DISABLED" ]]; then
skip "k8s test was disabled by \$WERF_TEST_K8S_DISABLED"
fi
}
test_requires_k8s_docker_registry() {
if [[ -z "$WERF_TEST_K8S_DOCKER_REGISTRY" ]] || [[ -z "$WERF_TEST_K8S_DOCKER_REGISTRY_USERNAME" ]] || [[ -z "$WERF_TEST_K8S_DOCKER_REGISTRY_PASSWORD" ]]; then
skip "\$WERF_TEST_K8S_DOCKER_REGISTRY, \$WERF_TEST_K8S_DOCKER_REGISTRY_USERNAME and \$WERF_TEST_K8S_DOCKER_REGISTRY_PASSWORD are required"
fi
}
test_k8s_init_project_name() {
export WERF_TEST_K8S_PROJECT_NAME=${1:-project}-$(generate_random_string)
}
|
import test from 'ava'
import { isObject } from '../'
test('is value an `Object` primitive', assert => {
assert.false(isObject('foo'))
assert.false(isObject(0))
assert.false(isObject([1, 2, 3]))
assert.false(isObject(function () {}))
assert.true(isObject({ foo: 'bar' }))
assert.true(isObject({}))
})
|
#!/bin/bash
# COPY THIS SCRIPT AND SET ALL OF THE VARIABLES
###############
# PRETRAINING #
###############
# shortname: the short name used to reference the dataset
export shortname="chest_xray_kids"
# the RGB pixel means/std of the dataset,
# DON'T just use the default if you don't know it!
# Compute using: ./compute-dataset-pixel-mean-std.py
export pixel_means="0.4815, 0.4815, 0.4815"
export pixel_stds="0.2377, 0.2377, 0.2377"
# how many iterations of pretraining? (each iter is one minibatch of 256)
# with basetraining
export bt_iters="50,500,5000,50000"
# without basetraining
export no_bt_iters="5000,50000,100000"
# We pretrain on both training and validation sets
# BUT NOT TEST!!!
# This file should have a list of all images for pretraining
# i.e. the train and val set
# it should NOT have labels (you'll get an error if it does)
export train_val_combined_list_path="data/chest_xray_kids/meta/train-val.txt"
# the list of images just for training (used for the linear evaluation)
# NEEDS labels
export train_list_path="data/chest_xray_kids/meta/train-labeled.txt"
# the list of images just for training (used for val on the linear evaluation)
# NEEDS labels
export val_list_path="data/chest_xray_kids/meta/val-labeled.txt"
# the list of images just for training (used for test on the linear evaluation)
# NEEDS labels
export test_list_path="data/chest_xray_kids/meta/test-labeled.txt"
# the base data path that the image lists reference
export base_data_path="data/chest_xray_kids"
#
# OPTIONAL - only change if you know what you're doing ;)
#
# 224 is a standard, but may not be appropriate for you data
export crop_size="224"
# basetrain weights, update this array with experiments you're running
export basetrain_weights=(
# standard moco basetrain
"data/basetrain_chkpts/moco_v2_800ep.pth"
# supervised basetrain
"data/basetrain_chkpts/imagenet_r50_supervised.pth"
# no basetrain
""
)
########
# Eval #
########
# COMMENT OUT THIS SECTION IF YOU DO NOT WANT TO GEN EVAL CONFIGS
# assuming the linear/semi-sup is a image classification
# (need to create your own config for other eval problems)
export num_classes="10" # eg 10 for cifar-10
# resize images to this size before taking a center crop of crop_size (defined above)
export test_precrop_size="256"
## NOTE: only change these if you know what you're doing
export linear_iters="5000"
export linear_lr_drop_iters="1651,3333" # when we drop the LR by 10 (1/3 and 2/3)
export linear_lr='30' # linear layer learning rate
# number of times to run the linear eval
export linear_reruns=3
############
# Finetune #
############
export ft_num_train_labels="1000,all" # do 1000 label and all train finetune evaluation
# TODO(cjrd) add number of finetune reruns (use different datasets of 100 and 1k labels)
# learning rates
export ft_lrs="0.01,0.001"
# finetuning amount when done by epochs
export ft_by_epoch="90"
export ft_by_epoch_lr_steps="30,60"
# finetuning amount when done by iters
export ft_by_iter="2500"
export ft_by_iter_lr_steps="833,1667"
##########
# Extras #
##########
# you may need to reduce this number if your cpu load is too high
export workers_per_gpu=2
|
using System.Data.Entity.Migrations;
public partial class AddedAdresses : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.Addresses",
c => new
{
Id = c.Int(nullable: false, identity: true),
UserId = c.Int(nullable: false),
Street = c.String(nullable: false, maxLength: 100),
City = c.String(nullable: false, maxLength: 100),
ZipCode = c.String(nullable: false, maxLength: 10),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Users", t => t.UserId, cascadeDelete: true)
.Index(t => t.UserId);
}
public override void Down()
{
DropForeignKey("dbo.Addresses", "UserId", "dbo.Users");
DropIndex("dbo.Addresses", new[] { "UserId" });
DropTable("dbo.Addresses");
}
} |
/*
https://leetcode.com/problems/maximum-subarray/
53. Maximum Subarray (Easy)
*/
class Solution {
public int maxSubArray(int[] nums) {
int n = nums.length;
if (n == 1) {
return nums[0];
}
int[] tmp = new int[n+1];
tmp[0] = nums[0];
int max = nums[0];
for (int i = 1; i < n; i++) {
int sum = tmp[i-1] + nums[i];
tmp[i] = (nums[i] > sum) ? nums[i] : sum;
if (tmp[i] > max) {
max = tmp[i];
}
}
return max;
}
} |
<gh_stars>1-10
import {
ChromosomePromise,
EvolveOptions,
Logger,
Population,
Problem,
ReinsertionFnOptions,
} from "../types.ts";
import * as strategies from "./reinsertion_strategies.ts";
export const reinsertionStrategies = Object.keys(strategies);
import { evaluate } from "./evaluate.ts";
import { select } from "./select.ts";
import { crossover } from "./crossover.ts";
import { mutation } from "./mutation.ts";
export const evolve = async <T>(
population: Population<T>,
problem: Problem<T>,
generation = 0,
lastMaxFitness = 0,
temperature = 0,
options: EvolveOptions<T>,
logUpdate: Logger | null = null,
): ChromosomePromise<T> => {
const {
fitnessSortDirection = "DESC",
coolingRate = 0.8,
selectionType,
selectionRate,
} = options;
population = await evaluate(
population,
problem.fitnessFn,
{ fitnessSortDirection },
);
const best = population[0];
// const fitnessScore = await problem.fitnessFn(best);
const fitnessSign = fitnessSortDirection === "ASC" ? 1 : -1;
temperature = coolingRate *
(temperature + (best.fitness - (lastMaxFitness * fitnessSign)));
if (logUpdate) {
await logUpdate(
`{"population":${population.length},"generation":${generation},"temperature":${
temperature.toFixed(2)
},"fitness":${best.fitness.toFixed(4)}}`,
);
}
if (problem.shouldTerminate(population, generation, temperature)) {
return best;
}
const { parents, leftovers } = select(
population,
{ selectionType, selectionRate },
temperature,
);
const children = crossover(parents, options);
const mutants = mutation(population, options);
const offsprings = children.concat(mutants);
return evolve(
reinsertion<T>(parents.flat(), offsprings, leftovers, options),
problem,
generation + 1,
best.fitness,
temperature,
options,
logUpdate,
);
};
function reinsertion<T>(
parents: Population<T>,
offsprings: Population<T>,
leftovers: Population<T>,
options: EvolveOptions<T>,
) {
const {
populationSize = 100,
reinsertionType = "pure",
survivalRate = 0.2,
survivorCount = 0,
reinsertionFn = strategies[reinsertionType],
maxPopulationFn,
} = options;
const {
maxPopulation = populationSize,
} = options;
const reinsertionOptions: ReinsertionFnOptions<T> = {
survivalRate,
survivorCount,
maxPopulation,
};
if (
typeof reinsertionFn !== "function" ||
!reinsertionStrategies.includes(reinsertionType)
) {
throw new Error(
`options.reinsertionType ${reinsertionType} is not a valid reinsertion function. Valid function are: ${
reinsertionStrategies.join(", ")
}`,
);
}
let populationSizeLimit = maxPopulation && maxPopulation > 0
? maxPopulation
: 0;
const newPopulation = reinsertionFn(
parents,
offsprings,
leftovers,
reinsertionOptions,
);
if (typeof maxPopulationFn === "function") {
populationSizeLimit = maxPopulationFn(newPopulation);
}
if (populationSizeLimit > 0) {
return newPopulation.slice(0, populationSizeLimit);
}
return newPopulation;
}
|
package com.meylism.provider;
import com.dslplatform.json.DslJson;
import com.dslplatform.json.runtime.Settings;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
public class StringsJsonProvider implements JsonProvider {
private final ObjectMapper jackson = new ObjectMapper();
private final ObjectMapper jacksonAfterburner = new ObjectMapper().
registerModule(new AfterburnerModule());
private JsonFactory jacksonFactory = new JsonFactory();
private final Gson gson = new GsonBuilder().create();
private final DslJson<Object> dslJson = new DslJson<>(
Settings.withRuntime().includeServiceLoader());
private final DslJson<Object> dsljson_reflection = new DslJson<>(Settings.withRuntime());
public StringsJsonProvider() {}
@Override
public ObjectMapper jackson() {
return jackson;
}
@Override
public JsonFactory jacksonFactory() {
return jacksonFactory;
}
@Override
public ObjectMapper jacksonAfterburner() {
return jacksonAfterburner;
}
@Override
public Gson gson() {
return gson;
}
@Override
public DslJson<Object> dsljson() {
return dslJson;
}
@Override
public DslJson<Object> dsljson_reflection() {
return dsljson_reflection;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.