text stringlengths 1 1.05M |
|---|
# Install and Load the e1071 library
install.packages('e1071')
library('e1071')
# Read in emails
emails <- readxl::read_excel("emails.xlsx")
# Create vector of classification labels
labels <- as.factor(c(rep('spam', length(emails$spam)), rep('not_spam', length(emails$not_spam))))
# Combine spam and not spam emails into corpus
corpus <- c(emails$spam, emails$not_spam)
# Create a document-term matrix
sparse_matrix <- corpus %>%
Corpus() %>%
tm_map(removeWords, c("subject")) %>%
tm_map(removePunctuation) %>%
tm_map(removeNumbers) %>%
tm_map(tolower) %>%
tm_map(stemDocument, language = 'english') %>%
tm_map(removeWords, stopwords('english')) %>%
DocumentTermMatrix() %>%
as.matrix()
# Perform SVM classification
clf <- svm(x = sparse_matrix,
y = labels,
kernel='linear',
cost=10)
# Predict class for a new email
new_email <- "This is a great offer for you"
test_email_vector <- predict(clf, new_email)
print(test_email_vector) |
package io.ph.bot.commands.fun;
import io.ph.bot.commands.Command;
import io.ph.bot.commands.CommandCategory;
import io.ph.bot.commands.CommandData;
import io.ph.bot.model.Permission;
import io.ph.util.Util;
import net.dv8tion.jda.core.entities.Message;
/**
* Make the bot say something
* @author Paul
*
*/
@CommandData (
defaultSyntax = "say",
aliases = {"echo"},
category = CommandCategory.FUN,
permission = Permission.NONE,
description = "Have the bot say something",
example = "Hi, it's me!"
)
public class Say extends Command {
@Override
public void executeCommand(Message msg) {
msg.getChannel().sendMessage(Util.getCommandContents(msg).replaceAll("[<|@|>]", "")).queue(success -> {
msg.delete().queue();
});
}
}
|
<filename>src/js/Templates/Shared/ScrollableMessage/ScrollableMessageHTMLBody.js
import React from 'react'
export default class ScrollableMessageHTMLBody extends React.Component {
render() {
return (
<div className="scrollable-message-text">
<p>{this.props.scrollableMessageBody}</p>
</div>
)
}
} |
package com.huatuo.citylist;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.text.TextUtils;
import com.huatuo.util.CommonUtil;
import com.huatuo.util.JsonUtil;
public class GetCityListUtil {
private static GetCityListUtil instance;
// private static List<String> fisrtCharOfCityList ;
private ArrayList<JSONObject> cityList;
public static GetCityListUtil getInstance() {
if (instance == null) {
synchronized (GetCityListUtil.class) {
if (instance == null) {
instance = new GetCityListUtil();
}
}
}
return instance;
}
public List<Content> addCityToList(String cityListStr) {
// fisrtCharOfCityList = new ArrayList<String>();
if (!TextUtils.isEmpty(cityListStr)) {
JSONObject jsonObject = null;
try {
jsonObject = new JSONObject(cityListStr);
if (null != jsonObject) {
// 城市列表
JSONArray cityListArray = jsonObject
.optJSONArray("cityList");
cityList = JsonUtil.jsonArray2List(cityListArray);
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// 初始化数据
List<Content> list = new ArrayList<Content>();
if (!CommonUtil.emptyListToString3(cityList)) {
Content content = null;
for (int j = 0; j < cityList.size(); j++) {
JSONObject jsonObject = cityList.get(j);
// "groupList": [],
// "initialCharacter": "B"
JSONArray cityListArray = jsonObject.optJSONArray("groupList");
String initialCharacter = jsonObject.optString(
"initialCharacter", "");
// fisrtCharOfCityList.add(initialCharacter);
ArrayList<JSONObject> area_list = JsonUtil
.jsonArray2List(cityListArray);
if (!CommonUtil.emptyListToString3(area_list)) {
for (int k = 0; k < area_list.size(); k++) {
JSONObject jsonObject2 = area_list.get(k);
String name = jsonObject2.optString("name", "");
String code = jsonObject2.optString("code", "");
String servingStatus = jsonObject2.optString(
"servingStatus", "");
// CommonUtil.log("------------------------name:" + name);
content = new Content(initialCharacter, name, code,
servingStatus);
list.add(content);
}
}
}
// }
// CommonUtil.log("------------------------list:" + list);
// HashSet h = new HashSet(fisrtCharOfCityList);
// fisrtCharOfCityList.clear();
// fisrtCharOfCityList.addAll(h);
// CommonUtil.log("城市列表首字母集合:fisrtCharOfCityList:"+fisrtCharOfCityList);
return list;
}
return null;
}
}
|
import React, { useState, useEffect } from 'react';
function getCurrentTime() {
return new Date().toLocaleTimeString();
}
function Clock() {
const [time, setTime] = useState(getCurrentTime());
const updateTime = () => {
setTime(getCurrentTime());
};
useEffect(() => {
setInterval(updateTime, 1000);
});
return (
<div>
<h1>{time}</h1>
</div>
);
}
export default Clock; |
<filename>extern/glow/src/glow/common/string_map.hh
#pragma once
#include <map>
#include <string>
#include <string_view>
namespace glow::util
{
namespace detail
{
struct void_less
{
using is_transparent = void;
template <class T, class U>
constexpr auto operator()(T&& lhs, U&& rhs) const noexcept
{
return std::forward<T>(lhs) < std::forward<U>(rhs);
}
};
}
// A string to T map that supports heterogenous lookup via std::string_view,
// and only creates a std::string key if none exists yet
template <class T>
struct string_map : std::map<std::string, T, detail::void_less>
{
using super_t = std::map<std::string, T, detail::void_less>;
using super_t::operator[];
T& operator[](std::string_view key)
{
if (auto it = this->find(key); it != this->end())
return it->second;
return super_t::operator[](std::string(key));
}
T& operator[](char const* key)
{
if (auto it = this->find(key); it != this->end())
return it->second;
return super_t::operator[](std::string(key));
}
};
}
|
<reponame>abhineet123/river_ice_segmentation
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converts PASCAL VOC 2012 data to TFRecord file format with Example protos.
PASCAL VOC 2012 dataset is expected to have the following directory structure:
+ pascal_voc_seg
- build_data.py
- build_voc2012_data.py (current working directory).
+ VOCdevkit
+ VOC2012
+ JPEGImages
+ SegmentationClass
+ ImageSets
+ Segmentation
+ tfrecord
Image folder:
./VOCdevkit/VOC2012/JPEGImages
Semantic segmentation annotations:
./VOCdevkit/VOC2012/SegmentationClass
list folder:
./VOCdevkit/VOC2012/ImageSets/Segmentation
This script converts data into sharded data files and save at tfrecord folder.
The Example proto contains the following fields:
image/encoded: encoded image content.
image/filename: image filename.
image/format: image file format.
image/height: image height.
image/width: image width.
image/channels: image channels.
image/segmentation/class/encoded: encoded semantic segmentation content.
image/segmentation/class/format: semantic segmentation file format.
"""
import glob
import math
import os.path
import sys
import build_data
import tensorflow as tf
# from scipy.misc import imread, imsave
from scipy import misc
import numpy as np
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('db_root_dir',
'/data/617/images/',
'Folder containing the 617 datasets')
tf.app.flags.DEFINE_string('db_dir',
'training_256_256_25_100_rot_15_90_flip',
'folder containing the dataset files')
tf.app.flags.DEFINE_string('image_format',
'png',
'image format')
tf.app.flags.DEFINE_string('label_format',
'png',
'label format')
tf.app.flags.DEFINE_string('output_dir',
'',
'Path to save converted SSTable of TensorFlow examples.')
tf.app.flags.DEFINE_integer('create_dummy_labels',
0,
'Flag to specify that no labels are available and so '
'dummy ones should be created.')
tf.app.flags.DEFINE_integer('selective_loss',
0,
'Use only the specified number of pixels per class per image')
_NUM_SHARDS = 4
def _convert_dataset(db_name):
"""Converts the specified dataset split to TFRecord format.
Args:
db_name: The dataset split (e.g., train, test).
Raises:
RuntimeError: If loaded image and label have different shape.
"""
output_dir = os.path.join(FLAGS.db_root_dir, FLAGS.output_dir, 'tfrecord')
sys.stdout.write('Processing {}\n\n'.format(db_name))
images = os.path.join(FLAGS.db_root_dir, db_name, 'images', '*.{}'.format(FLAGS.image_format))
print('Reading images from: {}'.format(images))
image_filenames = glob.glob(images)
if image_filenames is None:
raise SystemError('No images found at {}'.format(images))
if FLAGS.create_dummy_labels:
labels_path = os.path.join(FLAGS.db_root_dir, db_name, 'labels')
if not os.path.isdir(labels_path):
os.makedirs(labels_path)
print('Creating dummy labels at: {}'.format(labels_path))
for image_filename in image_filenames:
image = misc.imread(image_filename)
height, width, _ = image.shape
dummy_label = np.zeros((height, width), dtype=np.uint8)
out_fname = os.path.splitext(os.path.basename(image_filename))[0] + '.{}'.format(FLAGS.label_format)
misc.imsave(os.path.join(labels_path,out_fname), dummy_label)
print('Done')
labels = os.path.join(FLAGS.db_root_dir, db_name, 'labels', '*.{}'.format(FLAGS.label_format))
print('Reading labels from: {}'.format(labels))
seg_filenames = glob.glob(labels)
if seg_filenames is None:
raise SystemError('No labels found at {}'.format(labels))
# filenames = [x.strip('\n') for x in open(dataset_split, 'r')]
num_images = len(image_filenames)
num_labels = len(seg_filenames)
if num_images != num_labels:
raise SystemError('Mismatch between image and label file counts: {}, {}'.format(
num_images, num_labels))
num_per_shard = int(math.ceil(num_images / float(_NUM_SHARDS)))
image_reader = build_data.ImageReader('png', channels=3)
label_reader = build_data.ImageReader('png', channels=3)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
print('Writing tfrecords to: {}'.format(output_dir))
for shard_id in range(_NUM_SHARDS):
output_filename = os.path.join(
output_dir,
'%s-%05d-of-%05d.tfrecord' % (db_name, shard_id, _NUM_SHARDS))
with tf.python_io.TFRecordWriter(output_filename) as tfrecord_writer:
start_idx = shard_id * num_per_shard
end_idx = min((shard_id + 1) * num_per_shard, num_images)
for i in range(start_idx, end_idx):
sys.stdout.write('\r>> Converting image %d/%d shard %d' % (
i + 1, num_images, shard_id))
sys.stdout.flush()
image_filename = image_filenames[i]
f1 = os.path.basename(image_filename)[:-4]
seg_filename = seg_filenames[i]
f2 = os.path.basename(image_filename)[:-4]
if f1 != f2:
raise SystemError('Mismatch between image and label filenames: {}, {}'.format(
f1, f2))
# Read the image.
image_data = tf.gfile.FastGFile(image_filename, 'r').read()
height, width = image_reader.read_image_dims(image_data)
# Read the semantic segmentation annotation.
seg_data = tf.gfile.FastGFile(seg_filename, 'r').read()
seg_height, seg_width = label_reader.read_image_dims(seg_data)
if height != seg_height or width != seg_width:
raise RuntimeError('Shape mismatched between image and label.')
# Convert to tf example.
example = build_data.image_seg_to_tfexample(
image_data, image_filename, height, width, seg_data)
tfrecord_writer.write(example.SerializeToString())
sys.stdout.write('\n')
sys.stdout.flush()
def main(unused_argv):
_convert_dataset(FLAGS.db_dir)
if __name__ == '__main__':
tf.app.run()
|
import os
import logging
import configparser
import sys
import smtplib
import csv
import time
from pycoingecko import CoinGeckoAPI
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
# ------------------------------------------------------------------
# Logging Setup
# ------------------------------------------------------------------
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(lineno)d - %(message)s',
datefmt='%d-%m-%y %H:%M:%S')
file_handler = logging.FileHandler("settings\\logs.log", encoding='utf8')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# ------------------------------------------------------------------
# Config Setup
config = configparser.RawConfigParser()
configFilePath = r"settings/config.txt"
config.read(configFilePath, encoding="utf-8")
# Global Variables
geckoAPI = CoinGeckoAPI()
def get_coins():
"""
Gets user inputted coins and adds them to a csv file.
Only gets run on the first run of the program.
If the coins.csv file isn't found this function will be run
It asks the user to input the names of the coins they want to track, splitting them up by commas
It then graps each coin and makes sure that the coin can be found on CoinGecko
If not an error is returned
Then adds all coins to a csv and writes them to file
"""
print("\nPlease enter the FULL NAME of the coins you want to get the price of (if getting multiple, seperate them with commas):")
coins = input().split(",")
for i in range(len(coins)):
coins[i] = coins[i].strip()
response = geckoAPI.get_price(ids=coins[i], vs_currencies=config.get("CONFIG", "VS_CURRENCY"))
if len(response) == 0:
print(coins[i] + " doesn't exist on CoinGecko, please try again!")
sys.exit()
with open("settings/coins.csv", "w") as file:
csvwriter = csv.writer(file)
csvwriter.writerow(coins)
def get_crypto_price():
"""
Reads the coins and returns the price and 24hr change data back to the main function
First reads the coins from the coins.csv
It gets the price against the defined vs Currency and the 24hr change
Adds all the necessary info to a dictionary and returns it to the main function
Any errors get added to the log files
Raises
------
TypeError
Raised when syntax is wrong in code
KeyError
Raised when the coin name in the csv is different than the name returned from CoinGecko; happens normally with coins with a space ie. Shiba Inu
Exception
Just there to catch any extra exceptions that I may have missed
"""
crypto_list = []
try:
with open("settings/coins.csv", "r") as file:
reader = csv.reader(file)
coins = next(reader)
for crypto in coins:
response = geckoAPI.get_price(ids=crypto, vs_currencies=config.get("CONFIG", "VS_CURRENCY"), include_24hr_change=True,)
listings = {
"coin": crypto,
"price": "{:,}".format(response[crypto.replace(" ", "")][config.get("CONFIG", "VS_CURRENCY").lower()]),
"change": round(response[crypto.replace(" ", "")][config.get("CONFIG", "VS_CURRENCY").lower() + "_24h_change"], 2)
}
crypto_list.append(listings)
return crypto_list
except TypeError as e:
logger.error("Type Error: ", e)
except KeyError as e:
logger.error("Key Error for: ", e)
except Exception as e:
logger.erorr("General Error: ", e)
def send_email(listings):
"""
Sends an email to the user with the data for each coin
Sets up the SMTP connection to the user's email with the details given in config.txt
Creates a string called msg
Loops through the listings and adds the details to the msg
Sends that finished message to the user via email
Parameters
-----------
listings : list, required
The data for each coin, their price and 24hr change
Raises
------
Exception
Raised when something goes wrong with the email being sent. Output gets sent to the logger
"""
try:
email_content = "Here is your crypto updates:"
for i in range(len(listings)):
# msg += f'\n{listings[i]["coin"]} ->\tPrice:\t{listings[i]["price"]} {config.get("CONFIG", "VS_CURRENCY")}\tChange:\t{listings[i]["change"]}%'
email_content += "\n" + listings[i]["coin"].upper() + " - >Price: " + str(listings[i]["price"]) + config.get("CONFIG", "VS_CURRENCY") + "-> Change: " + str(listings[i]["change"]) + "%"
smtp = smtplib.SMTP(config.get('CONFIG', 'SMTP_SERVER'), int(config.get('CONFIG', 'SMTP_PORT')))
smtp.ehlo()
smtp.starttls()
smtp.login(config.get('CONFIG', 'SMTP_SENDING_EMAIL'), config.get('CONFIG', 'SMTP_PASSWORD'))
message = MIMEMultipart()
message["Subject"] = "Crypto Price Updates"
message.attach(MIMEText(email_content))
smtp.sendmail(
from_addr=config.get('CONFIG', 'SMTP_SENDING_EMAIL'),
to_addrs=config.get('CONFIG', 'SMTP_RECEIVING_EMAIL'),
msg=message.as_string()
)
logger.info("Email successfully sent to " + config.get('CONFIG', 'SMTP_RECEIVING_EMAIL'))
except Exception as e:
logger.error(e)
finally:
smtp.quit()
def main():
if not os.path.isfile("settings/coins.csv"):
get_coins()
# infinite loop
while True:
pricelistings = get_crypto_price()
send_email(pricelistings)
time.sleep(config.get("CONFIG", "TIME_INTERVAL"))
if __name__ == '__main__':
main()
|
alias dps='docker ps --format "table {{.Names}}\t{{.Ports}}\t{{.Status}}\t{{.Image}}" | (read -r; printf "%s\n" "$REPLY"; sort -k 1 )'
alias dpsa='docker ps -a --format "table {{.Names}}\t{{.Ports}}\t{{.Status}}\t{{.Image}}" | (read -r; printf "%s\n" "$REPLY"; sort -k 1 )'
alias dlf='docker logs -f'
alias dl='docker logs'
alias dspr='docker system prune'
alias dvpr='docker volume prune'
alias dnpr='docker network prune'
alias dipr='docker image prune'
alias drm='docker rm'
alias drmi='docker rmi'
alias drund='docker run -d'
alias drunit='docker run -it'
alias drunrm='docker run --rm'
alias drunrmit='docker run --rm -it'
alias drunrmd='docker run --rm -d'
alias dex='docker exec'
alias dexit='docker exec -it'
alias dbu='docker build -t'
|
<gh_stars>0
package com.bullhornsdk.data.model.response.list.customobject;
import com.bullhornsdk.data.model.entity.core.customobject.ClientCorporationCustomObjectInstance15;
import com.bullhornsdk.data.model.response.list.StandardListWrapper;
public class ClientCorporationCustomObjectInstance15ListWrapper extends StandardListWrapper<ClientCorporationCustomObjectInstance15> {
}
|
import re
from collections import Counter
# Define the list of stop words
stop_words = {"the", "and", "of", "in", "to", "you", "may", "not", "use", "this", "file", "at", "http://www.apache.org/licenses/license-2.0", "unless", "required", "by", "applicable", "law", "or", "agreed", "writing", "software"}
# Read the content of the file
with open("sample_text.txt", "r") as file:
content = file.read()
# Process the text, ignoring punctuation and case
words = re.findall(r'\b\w+\b', content.lower())
# Count the occurrences of each word, excluding stop words
word_counts = Counter(word for word in words if word not in stop_words)
# Output the word counts
for word, count in word_counts.items():
print(f"Word: {word}, Count: {count}") |
<filename>broker/nats/options.go
package nats
import (
"github.com/micro/go-micro/v2/broker"
nats "github.com/nats-io/nats.go"
)
type optionsKey struct{}
type drainConnectionKey struct{}
type drainSubscriptionKey struct{}
// Options accepts nats.Options
func Options(opts nats.Options) broker.Option {
return setBrokerOption(optionsKey{}, opts)
}
// DrainConnection will drain subscription on close
func DrainConnection() broker.Option {
return setBrokerOption(drainConnectionKey{}, true)
}
// DrainSubscription will drain pending messages when unsubscribe
func DrainSubscription() broker.SubscribeOption {
return setSubscribeOption(drainSubscriptionKey{}, true)
}
|
#
# Copyright 2010 The Apache Software Foundation
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
function getCredentialSetting {
name=$1
eval "val=\$$name"
if [ -z "$val" ] ; then
if [ -f "$bin"/credentials.sh ] ; then
val=`cat "$bin"/credentials.sh | grep $name | awk 'BEGIN { FS="=" } { print $2; }'`
if [ -z "$val" ] ; then
echo -n "$name: "
read -e val
echo "$name=$val" >> "$bin"/credentials.sh
fi
else
echo -n "$name: "
read -e val
echo "$name=$val" >> "$bin"/credentials.sh
fi
eval "$name=$val"
fi
}
|
use std::process::Command;
use std::io;
fn main() {
let repodir = "path/to/local/repository";
let url = "https://github.com/username/repository.git";
if !std::path::Path::new(repodir).join(".git").exists() {
if let Err(err) = clone(url, repodir) {
eprintln!("Error cloning repository: {}", err);
}
} else {
if let Err(err) = pull(repodir) {
eprintln!("Error pulling repository: {}", err);
}
}
}
fn clone(url: &str, repodir: &str) -> Result<(), io::Error> {
let output = Command::new("git")
.args(&["clone", url, repodir])
.output()?;
if output.status.success() {
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other, "Failed to clone repository"))
}
}
fn pull(repodir: &str) -> Result<(), io::Error> {
let output = Command::new("git")
.current_dir(repodir)
.args(&["pull"])
.output()?;
if output.status.success() {
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other, "Failed to pull repository"))
}
} |
import subprocess
def run_fsx_test(options, file_size, num_operations, prealloc_size, file_length):
command = f"./fsx {options} {file_size} -N {num_operations} -p {prealloc_size} -l {file_length}"
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
stdout, stderr = process.communicate()
return stdout, stderr
def analyze_test_results(stdout, stderr):
if "error" in stderr.lower() or "warning" in stderr.lower():
return "Issues detected"
else:
return "No issues detected"
def main():
options = "-z" # don't use zero range calls; not supported by cephfs
file_sizes = ["1MB", "10MB", "100MB"]
num_operations = 50000
prealloc_size = 10000
file_length = [1048576, 10485760, 104857600]
for i in range(len(file_sizes)):
stdout, stderr = run_fsx_test(options, file_sizes[i], num_operations, prealloc_size, file_length[i])
print(f"Test {i+1} - File size: {file_sizes[i]}")
print(analyze_test_results(stdout, stderr))
if __name__ == "__main__":
main() |
<gh_stars>100-1000
# Get twilio-ruby from twilio.com/docs/ruby/install
require 'twilio-ruby'
# Get your Account SID and Auth Token from twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
auth_token = ENV['TWILIO_AUTH_TOKEN']
api_key = ENV['TWILIO_API_KEY']
# Initialize Twilio Client
@client = Twilio::REST::Client.new(account_sid, auth_token)
@key = @client.api.keys(api_key).fetch
puts @key.friendly_name
|
# get password from eck elastic cluster
# PASSWORD=$(kubectl get secret quickstart-es-elastic-user -o go-template='{{.data.elastic | base64decode}}')
export PASSWORD=$(kubectl get secret quickstart-es-elastic-user -o go-template='{{.data.elastic | base64decode}}')
echo $PASSWORD
|
#!/bin/bash
dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source "${dir}/helpers.bash"
# dir might have been overwritten by helpers.bash
dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TEST_NAME=$(get_filename_without_extension $0)
LOGS_DIR="${dir}/cilium-files/${TEST_NAME}/logs"
redirect_debug_logs ${LOGS_DIR}
set -ex
function cleanup {
log "beginning cleanup for ${TEST_NAME}"
cilium policy delete --all 2> /dev/null || true
docker rm -f server client 2> /dev/null || true
monitor_stop
log "finished cleanup for ${TEST_NAME}"
}
function finish_test {
log "beginning finish_test for ${TEST_NAME}"
gather_files ${TEST_NAME} ${TEST_SUITE}
cleanup
log "done with finish_test for ${TEST_NAME}"
}
trap finish_test EXIT
cleanup
SERVER_LABEL="id.server"
CLIENT_LABEL="id.client"
NETPERF_IMAGE="tgraf/netperf"
monitor_start
logs_clear
create_cilium_docker_network
log "starting containers"
docker run -d -i --net=$TEST_NET --name server -l $SERVER_LABEL $NETPERF_IMAGE
docker run -d -i --net=$TEST_NET --name client -l $CLIENT_LABEL $NETPERF_IMAGE
log "done starting containers"
CLIENT_IP=$(docker inspect --format '{{ .NetworkSettings.Networks.cilium.GlobalIPv6Address }}' client)
CLIENT_IP4=$(docker inspect --format '{{ .NetworkSettings.Networks.cilium.IPAddress }}' client)
CLIENT_ID=$(cilium endpoint list | grep $CLIENT_IP | awk '{ print $1}')
SERVER_IP=$(docker inspect --format '{{ .NetworkSettings.Networks.cilium.GlobalIPv6Address }}' server)
SERVER_IP4=$(docker inspect --format '{{ .NetworkSettings.Networks.cilium.IPAddress }}' server)
SERVER_ID=$(cilium endpoint list | grep $SERVER_IP | awk '{ print $1}')
log "CLIENT_IP=$CLIENT_IP"
log "CLIENT_IP4=$CLIENT_IP4"
log "CLIENT_ID=$CLIENT_ID"
log "SERVER_IP=$SERVER_IP"
log "SERVER_IP4=$SERVER_IP4"
log "SERVER_ID=$SERVER_ID"
wait_for_docker_ipv6_addr client
wait_for_docker_ipv6_addr server
cat <<EOF | policy_import_and_wait -
[{
"endpointSelector": {"matchLabels":{"${SERVER_LABEL}":""}},
"ingress": [{
"fromEndpoints": [
{"matchLabels":{"${CLIENT_LABEL}":""}}
]
}]
}]
EOF
log "updating client endpoint configuration: NAT46=true"
cilium endpoint config ${CLIENT_ID} NAT46=true
function connectivity_test46() {
log "beginning connectivity_test46"
# ICMPv4 echo request from client to server should succeed
monitor_clear
log "pinging NAT46 address of server from client (should work)"
docker exec -i client ping6 -c 10 ::FFFF:$SERVER_IP4 || {
abort "Error: Could not ping nat46 address of server from client"
}
log "finished connectivity_test46"
}
connectivity_test46
log "deleting all policies from Cilium"
cilium -D policy delete --all
test_succeeded "${TEST_NAME}"
|
export var smallDate = function (date, locale) {
if (locale === void 0) { locale = 'en-us'; }
return ((new Date(date)).toLocaleDateString(locale, {
year: 'numeric',
month: '2-digit',
day: '2-digit',
}));
};
export var shortDate = function (date, locale) {
if (locale === void 0) { locale = 'en-us'; }
return ((new Date(date)).toLocaleDateString(locale, {
year: 'numeric',
month: 'long',
day: 'numeric',
}));
};
export var longDate = function (date, locale) {
if (locale === void 0) { locale = 'en-us'; }
return ((new Date(date)).toLocaleDateString(locale, {
weekday: 'short',
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
}));
};
|
POVs=`ls challenges/$1/ | grep pov_`
echo $POVs
for f in $POVs
do
echo `ls challenges/$1/*$f`
echo $1$f
done
#for f in pov/challenges/$1/Sensr_pov_*
#do
# echo $f
#done
|
import { assertEquals } from 'https://deno.land/std/testing/asserts.ts';
import { Ord } from '../types/mod.ts';
import { compare } from '../lib/compare/mod.ts';
import { iterator } from '../mod.ts';
function _compare<T>(a: T, b: T): Ord {
if (a > b) {
return Ord.Less;
}
if (a < b) {
return Ord.Greater;
}
return Ord.Equal;
}
Deno.test('minBy() [1,2,3,4,5]', async () => {
const a = iterator([1, 2, 3, Promise.resolve(4), 5]);
const actual = await a.minBy(compare);
const expected = 1;
assertEquals(actual, expected);
});
Deno.test('minBy() custom compare', async () => {
const i = iterator([1, 2, 3, Promise.resolve(4), 5]);
const actual = await i.minBy(_compare);
const expected = 5;
assertEquals(actual, expected);
});
Deno.test('minBy() empty iter', async () => {
const a = iterator<number>([]);
const actual = await a.minBy(compare);
const expected = undefined;
assertEquals(actual, expected);
});
|
import { IsNotEmpty, MaxLength } from "class-validator";
import { Field, InputType } from "type-graphql";
@InputType()
export class CreateExerciseInput {
@Field()
@MaxLength(100)
createdBy: string;
@Field()
@IsNotEmpty()
description: string;
@Field()
multiplier: number;
}
|
#!/usr/bin/env bash
have_kind() {
[[ -n "$(command -v kind)" ]]
}
if ! have_kind; then
echo "Please install kind first:"
echo " https://kind.sigs.k8s.io/docs/user/quick-start/#installation"
fi
kind delete clusters kind && \
docker kill kind-registry && \
docker rm kind-registry
|
<reponame>IVAS-TECH/orders-app-backend
function downloadFile(req, res) {
console.log(req.params);
res.sendStatus(200);
}
module.exports = downloadFile; |
#!/bin/sh
# Copyright (C) 2000-2020 Free Software Foundation, Inc.
# This file is part of the GNU C Library.
# Contributed by Bruno Haible <haible@clisp.cons.org>, 2000.
#
# The GNU C Library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# The GNU C Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with the GNU C Library; if not, see
# <https://www.gnu.org/licenses/>.
# Checks that the iconv() implementation (in both directions) for a
# stateless encoding agrees with the charmap table.
common_objpfx=$1
objpfx=$2
test_program_prefix=$3
charset=$4
charmap=$5
# sort is used on the build system.
LC_ALL=C
export LC_ALL
set -e
# Get the charmap.
./tst-table-charmap.sh ${charmap:-$charset} \
< ../localedata/charmaps/${charmap:-$charset} \
> ${objpfx}tst-${charset}.charmap.table
# When the charset is GB18030, truncate this table because for this encoding,
# the tst-table-from and tst-table-to programs scan the Unicode BMP only.
if test ${charset} = GB18030; then
grep '0x....$' < ${objpfx}tst-${charset}.charmap.table \
> ${objpfx}tst-${charset}.truncated.table
mv ${objpfx}tst-${charset}.truncated.table ${objpfx}tst-${charset}.charmap.table
fi
# Precomputed expexted differences between the charmap and iconv forward.
precomposed=${charset}.precomposed
# Precompute expected differences between the charmap and iconv backward.
if test ${charset} = EUC-TW; then
irreversible=${objpfx}tst-${charset}.irreversible
(grep '^0x8EA1' ${objpfx}tst-${charset}.charmap.table
cat ${charset}.irreversible
) > ${irreversible}
else
irreversible=${charset}.irreversible
fi
# iconv in one direction.
${test_program_prefix} \
${objpfx}tst-table-from ${charset} \
> ${objpfx}tst-${charset}.table
# iconv in the other direction.
${test_program_prefix} \
${objpfx}tst-table-to ${charset} | sort \
> ${objpfx}tst-${charset}.inverse.table
# Difference between the charmap and iconv backward.
diff ${objpfx}tst-${charset}.charmap.table ${objpfx}tst-${charset}.inverse.table | \
grep '^[<>]' | sed -e 's,^. ,,' > ${objpfx}tst-${charset}.irreversible.table
# Check 1: charmap and iconv forward should be identical, except for
# precomposed characters.
if test -f ${precomposed}; then
cat ${objpfx}tst-${charset}.table ${precomposed} | sort | uniq -u \
> ${objpfx}tst-${charset}.tmp.table
cmp -s ${objpfx}tst-${charset}.charmap.table ${objpfx}tst-${charset}.tmp.table ||
exit 1
else
cmp -s ${objpfx}tst-${charset}.charmap.table ${objpfx}tst-${charset}.table ||
exit 1
fi
# Check 2: the difference between the charmap and iconv backward.
if test -f ${irreversible}; then
cat ${objpfx}tst-${charset}.charmap.table ${irreversible} | sort | uniq -u \
> ${objpfx}tst-${charset}.tmp.table
cmp -s ${objpfx}tst-${charset}.tmp.table ${objpfx}tst-${charset}.inverse.table ||
exit 1
else
cmp -s ${objpfx}tst-${charset}.charmap.table ${objpfx}tst-${charset}.inverse.table ||
exit 1
fi
exit 0
|
<reponame>guzhongren/CMS<filename>src/layouts/index.tsx
import Header from './Header'
import Footer from './Footer'
import AdminHeader from './AdminHeader'
export {
Header,
AdminHeader,
Footer
} |
<gh_stars>10-100
//
// ATUSettingsViewController.h
// AutotypeURL
//
// Created by <NAME> on 7/10/19.
// Copyright © 2019 <NAME> All rights reserved.
//
#import <Cocoa/Cocoa.h>
@class ATUAutotypeURL;
@interface ATUSettingsViewController : NSViewController
@end
|
const express = require('express');
const app = express();
app.get('/redirect', (req, res) => {
let url = req.query.url;
res.redirect(url);
});
app.listen(3000, () => {
console.log('Example app listening on port 3000!');
}); |
<reponame>NajibAdan/kitsu-server
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: anime_staff
#
# id :integer not null, primary key
# role :string
# created_at :datetime
# updated_at :datetime
# anime_id :integer not null, indexed, indexed => [person_id]
# person_id :integer not null, indexed => [anime_id], indexed
#
# Indexes
#
# index_anime_staff_on_anime_id (anime_id)
# index_anime_staff_on_anime_id_and_person_id (anime_id,person_id) UNIQUE
# index_anime_staff_on_person_id (person_id)
#
# Foreign Keys
#
# fk_rails_cdd9599b2a (person_id => people.id)
# fk_rails_f8b16cdc79 (anime_id => anime.id)
#
# rubocop:enable Metrics/LineLength
class AnimeStaff < ApplicationRecord
validates :role, length: { maximum: 140 }
belongs_to :anime, required: true
belongs_to :person, required: true
end
|
package main
import (
"fmt"
"net/http"
)
func main() {
http.HandleFunc("/api/", apiHandler)
fmt.Println("starting server on :8080")
err := http.ListenAndServe(":8080", nil)
if err != nil {
fmt.Println(err)
}
}
func apiHandler(rw http.ResponseWriter, r *http.Request) {
rw.Header().Set("Content-Type", "application/json")
jsonData := map[string]string{"key": "value"}
json.NewEncoder(rw).Encode(jsonData)
} |
#!/bin/bash
TOOLCHAIN=xcode
CONFIG=MinSizeRel
ARGS=(
--verbose
--config ${CONFIG}
--fwd HUNTER_CONFIGURATION_TYPES=${CONFIG}
XGBOOSTER_SERIALIZE_WITH_CEREAL=ON
--jobs 8
)
build.py --toolchain ${TOOLCHAIN} ${ARGS[@]} --reconfig --install --open --test
|
const db = require('./../../database');
exports.addFeedback = (data, callback) => {
const { user_id, tutor_id, rating, feedback, date, time } = data;
let queryStr = `INSERT INTO feedback (user_id, tutor_id, rating, content, date, time) VALUES (?, ?, ?, ?, ?, ?)`;
let params = [
Number(user_id),
Number(tutor_id),
Number(rating),
String(feedback),
String(date),
String(time)
];
db.query(queryStr, params, (err, result) => {
if (err) {
console.error('There was an error adding feedback: ', err);
} else {
let average = `select AVG(rating) AS Average from feedback where tutor_id = ${tutor_id}`;
db.query(average, (err, results) => {
if (err) {
console.error('error inserting feedback : ', err);
} else {
let newQueryStr = `Update tutors Set rating=${
results[0].Average
} where id = ${tutor_id}`;
db.query(newQueryStr, (err, results) => {
if (err) {
callback(err);
} else {
callback(null, results);
}
});
}
});
}
});
};
exports.updateFeedback = ({ rating, content }, callback) => {
let queryStr = 'INSERT INTO feedback (rating, content) VALUES (?, ?)';
let params = [rating, content];
db.query(queryStr, params, callback);
};
exports.getFeedback = ({ id }, callback) => {
let queryStr = `SELECT * FROM feedback WHERE tutor_id = ${id}`;
db.query(queryStr, callback);
};
|
<filename>src/main/scala/gov/nasa/jpl/imce/oti/magicdraw/dynamicScripts/ui/ClassifierInspectorWidget.scala<gh_stars>0
/*
* Copyright 2016 California Institute of Technology ("Caltech").
* U.S. Government sponsorship acknowledged.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* License Terms
*/
package gov.nasa.jpl.imce.oti.magicdraw.dynamicScripts.ui
import java.awt.event.ActionEvent
import com.nomagic.magicdraw.core.Project
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Element
import gov.nasa.jpl.dynamicScripts.DynamicScriptsTypes
import gov.nasa.jpl.dynamicScripts.magicdraw.designations.MagicDrawElementKindDesignation
import gov.nasa.jpl.dynamicScripts.magicdraw.utils._
import gov.nasa.jpl.imce.oti.magicdraw.dynamicScripts.utils.OTIHelper
import org.omg.oti.magicdraw.uml.canonicalXMI.MagicDrawIDGenerator
import org.omg.oti.magicdraw.uml.canonicalXMI.helper._
import org.omg.oti.magicdraw.uml.read.MagicDrawUML
import org.omg.oti.uml.read.api._
import scala.collection.immutable._
import scala.util.Try
object ClassifierInspectorWidget {
import ComputedDerivedWidgetHelper._
def general
( project: Project, ev: ActionEvent, derived: DynamicScriptsTypes.ComputedDerivedWidget,
ek: MagicDrawElementKindDesignation, e: Element )
: Try[(java.awt.Component, Seq[ValidationAnnotation])]
= OTIHelper.toTry(
MagicDrawOTIHelper.getOTIMagicDrawInfoForDataCharacteristics(project),
(ordsa: MagicDrawOTIResolvedDocumentSetAdapterForDataProvider) => {
implicit val idg = MagicDrawIDGenerator()(ordsa.rds.ds)
elementOperationWidget[UMLClassifier[MagicDrawUML], UMLClassifier[MagicDrawUML]](
derived, e,
_.general,
ordsa.otiAdapter.umlOps)
})
def generalClassifier
( project: Project, ev: ActionEvent, derived: DynamicScriptsTypes.ComputedDerivedWidget,
ek: MagicDrawElementKindDesignation, e: Element )
: Try[(java.awt.Component, Seq[ValidationAnnotation])]
= OTIHelper.toTry(
MagicDrawOTIHelper.getOTIMagicDrawInfoForDataCharacteristics(project),
(ordsa: MagicDrawOTIResolvedDocumentSetAdapterForDataProvider) => {
implicit val idg = MagicDrawIDGenerator()(ordsa.rds.ds)
elementOperationWidget[UMLClassifier[MagicDrawUML], UMLClassifier[MagicDrawUML]](
derived, e,
_.general_classifier,
ordsa.otiAdapter.umlOps)
})
} |
<filename>src/app/app/app.controller.js
angular.module('memory')
.controller('AppCtrl', function($scope, gameService) {
'use strict';
$scope.createNewGame = function(){
gameService.restartGame();
};
});
|
<reponame>dominiek/bulkhead<filename>services/web/src/screens/Settings/Security.js
import React from 'react';
import { Link } from 'react-router-dom';
import { Segment, Button, Divider, Header, Label } from 'semantic';
import screen from 'helpers/screen';
import { request } from 'utils/api';
import { withSession } from 'stores';
import { Layout } from 'components';
import LoadButton from 'components/LoadButton';
import ErrorMessage from 'components/ErrorMessage';
import Menu from './Menu';
@screen
@withSession
export default class Security extends React.Component {
state = {
error: null,
};
componentDidMount() {
if (
Date.parse(this.context.user.accessConfirmedAt) <
Date.now() - 20 * 60 * 1000
) {
this.props.history.push(
`/confirm-access?to=${this.props.location.pathname}`
);
}
}
disableMFa = async () => {
this.setState({ error: null });
try {
await request({
method: 'DELETE',
path: '/1/mfa/disable',
});
await this.context.bootstrap();
} catch (e) {
if (e.status === 403) {
this.props.history.push(
`/confirm-access?to=${this.props.location.pathname}`
);
return;
} else {
this.setState({ error: e });
}
}
};
render() {
const { error } = this.state;
const { mfaMethod } = this.context.user;
return (
<React.Fragment>
<Menu />
<Divider hidden />
<Header>Two-factor authentication</Header>
<p>
After logging, you will be required to enter a code using one of the
methods below. You can always set up more than one method later.
</p>
<Divider hidden></Divider>
<Segment.Group>
<Segment>
<Layout horizontal spread>
<div>
<Header
size="tiny"
style={{ marginTop: 0, marginBottom: '0.5em' }}>
App authentication{' '}
{mfaMethod === 'otp' && <Label color="green">Enabled</Label>}
</Header>
<p>
Security codes will be generated by your preferred
authenticator app.
</p>
</div>
<div>
<Button
basic
size="small"
to="/settings/mfa-authenticator"
as={Link}>
{mfaMethod === 'otp' ? 'Config' : 'Enable'}
</Button>
</div>
</Layout>
</Segment>
<Segment>
<ErrorMessage error={error} />
<Layout horizontal spread>
<div>
<Header
size="tiny"
style={{ marginTop: 0, marginBottom: '0.5em' }}>
SMS authentication
{mfaMethod == 'sms' && <Label color="green">Enabled</Label>}
</Header>
<p>
Security codes will be sent via SMS to your mobile device.
</p>
</div>
<div>
<Button basic size="small" to="/settings/mfa-sms" as={Link}>
{mfaMethod === 'sms' ? 'Config' : 'Enable'}
</Button>
</div>
</Layout>
</Segment>
</Segment.Group>
{mfaMethod && (
<>
<Divider hidden></Divider>
<Segment color="red">
<Layout horizontal spread>
<div>
<Header
size="tiny"
style={{ marginTop: 0, marginBottom: '0.5em' }}>
Turn off two-factor authentication
</Header>
<p>
Turning off two-factor authentication will remove an extra
layer of security on your account.
</p>
</div>
<div>
<LoadButton
onClick={this.disableMFa}
basic
size="small"
color="red">
Turn off
</LoadButton>
</div>
</Layout>
</Segment>
</>
)}
</React.Fragment>
);
}
}
|
import { Component, OnInit } from '@angular/core';
import { Observable, of } from 'rxjs';
import { ZFormInputBase, ZFormInputText, ZFormProvider } from 'zmaterial';
@Component({
selector: 'app-user',
templateUrl: './user.component.html',
styleUrls: ['./user.component.scss']
})
export class UserComponent extends ZFormProvider implements OnInit {
public currentValue: any = {};
constructor() { super(); }
ngOnInit(): void {
}
getInputs(): Observable<ZFormInputBase<any>[]> {
return of([
new ZFormInputText({
label: 'Campo de Texto',
key: 'text',
type: 'text',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de Senha',
key: 'password',
type: 'password',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de E-mail',
key: 'email',
type: 'email',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de IP',
key: 'ip',
type: 'ip',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de CPF',
key: 'CPF',
type: 'CPF',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de CNPJ',
key: 'CNPJ',
type: 'CNPJ',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de CPF/CNPJ',
key: '<KEY>',
type: 'CPF/CNPJ',
required: true,
layout: {
cols: 50
}
}),
new ZFormInputText({
label: 'Campo de Placa',
key: 'vehiclePlate',
type: 'vehiclePlate',
required: true,
layout: {
cols: 50
}
}),
]);
}
public saveData(value: any): void {
console.log('Valores: ', value);
this.currentValue = value;
}
}
|
function generateRandomPassword() {
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()';
let password = '';
for (let i = 0; i < 12; i++) {
let index = Math.floor(Math.random() * characters.length);
let character = characters.charAt(index);
password += character;
}
return password;
} |
<reponame>MihaiAnca13/cvpage<gh_stars>0
import React from "react"
import { graphql } from "gatsby"
import Layout from "../components/layout"
import { Link } from "@reach/router"
import Grid from "@material-ui/core/Grid"
import Card from "@material-ui/core/Card"
import CardActionArea from "@material-ui/core/CardActionArea"
import { makeStyles } from "@material-ui/core"
import Paper from "@material-ui/core/Paper"
import Breadcrumbs from "@material-ui/core/Breadcrumbs"
import Typography from "@material-ui/core/Typography"
const useStyles = makeStyles(theme => ({
card: {
backgroundColor: "#1C768F",
padding: "0.1em",
fontSize: "1.5em",
color: "#FBF3F2",
},
link: {
color: "#032539",
marginLeft: "0 !important"
},
activeLink: {
color: "#1C768F"
},
paper: {
paddingTop: "0.5em",
marginLeft: "0.3em",
backgroundColor: "#FBF3F2"
},
expansionPanel: {
height: "auto",
backgroundColor: "#FBF3F2",
width: "96%",
color: "#032539",
position: "relative",
left: "2%",
top: "-0.1em",
borderBottomLeftRadius: "0.3em",
borderBottomRightRadius: "0.3em",
paddingLeft: "1em",
border: "1px solid #1C768F",
borderTop: "0px"
},
item: {
paddingBottom: "0.5em"
}
}))
export default ({ data, location }) => {
const classes = useStyles()
let pathname = location.pathname;
pathname = pathname.split("/");
if (pathname[pathname.length - 1] === "") {
pathname.pop();
}
let project_name = pathname[pathname.length - 1];
project_name = project_name.replace(/_/g, " ");
let nodes = data.allMarkdownRemark.edges;
nodes = nodes.map((item, key) => {
let aPath = item.node.fileAbsolutePath;
aPath = aPath.split("/");
let aux = aPath[aPath.length - 1];
aux = aux.substr(0, aux.length - 3);
const p = "/portfolio/" + aPath[aPath.length - 2] + "/" + aux;
return (
<div key={key} className={classes.item}>
<Link to={p}>
<Card className={classes.card}>
<CardActionArea>
<Grid
container
direction="row"
justify="space-between"
alignItems="baseline"
>
<Grid item>
<span>{item.node.frontmatter.title}</span>
</Grid>
<Grid item>
<span>{item.node.frontmatter.date}</span>
</Grid>
</Grid>
</CardActionArea>
</Card>
</Link>
<div className={classes.expansionPanel}>{item.node.frontmatter.summary}</div>
</div>
)
})
return (
<Layout>
<Paper elevation={0} className={classes.paper}>
<Breadcrumbs aria-label="breadcrumb">
<Link to={"/portfolio"} className={classes.link}>
Portfolio
</Link>
<Typography className={classes.activeLink}>{project_name}</Typography>
</Breadcrumbs>
</Paper>
<div className="container">
<h1>{project_name}</h1>
{nodes.length < 1 ?
<h3>This project is empty</h3> :
nodes}
</div>
</Layout>
)
}
export const query = graphql`
query($relativePath: String!) {
allMarkdownRemark(sort: {fields: frontmatter___date}, filter: {fileAbsolutePath: {regex: $relativePath}}){
edges {
node {
frontmatter {
title,
date,
summary
}
fileAbsolutePath
}
}
}
}
` |
TERMUX_PKG_HOMEPAGE=https://xorg.freedesktop.org/
TERMUX_PKG_DESCRIPTION="Create an index of scalable font files for X"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=1.2.1
TERMUX_PKG_REVISION=22
TERMUX_PKG_SRCURL=https://xorg.freedesktop.org/archive/individual/app/mkfontscale-${TERMUX_PKG_VERSION}.tar.bz2
TERMUX_PKG_SHA256=ca0495eb974a179dd742bfa6199d561bda1c8da4a0c5a667f21fd82aaab6bac7
TERMUX_PKG_DEPENDS="findutils, freetype, libfontenc, zlib"
TERMUX_PKG_BUILD_DEPENDS="xorg-util-macros, xorgproto"
TERMUX_PKG_CONFLICTS="xorg-mkfontdir"
TERMUX_PKG_REPLACES="xorg-mkfontdir"
termux_step_create_debscripts() {
cp -f "${TERMUX_PKG_BUILDER_DIR}/postinst" ./
cp -f "${TERMUX_PKG_BUILDER_DIR}/postrm" ./
cp -f "${TERMUX_PKG_BUILDER_DIR}/triggers" ./
}
|
#!/bin/bash
# configs/stm32f103-minimum/nrf24/setenv.sh
#
# Copyright (C) 2017 Gregory Nutt. All rights reserved.
# Author: Gregory Nutt <gnutt@nuttx.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name NuttX nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name NuttX nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
if [ "$_" = "$0" ] ; then
echo "You must source this script, not run it!" 1>&2
exit 1
fi
WD=`pwd`
if [ ! -x "setenv.sh" ]; then
echo "This script must be executed from the top-level NuttX build directory"
exit 1
fi
if [ -z "${PATH_ORIG}" ]; then
export PATH_ORIG="${PATH}"
fi
# This is the Cygwin path to the location where I installed the CodeSourcery
# toolchain under windows. You will also have to edit this if you install
# the CodeSourcery toolchain in any other location
#export TOOLCHAIN_BIN="/cygdrive/c/Program Files (x86)/CodeSourcery/Sourcery G++ Lite/bin"
#export TOOLCHAIN_BIN="/cygdrive/c/Program Files (x86)/CodeSourcery/Sourcery_CodeBench_Lite_for_ARM_EABI/bin"
# export TOOLCHAIN_BIN="/cygdrive/c/Users/MyName/MentorGraphics/Sourcery_CodeBench_Lite_for_ARM_EABI/bin"
# This is the location where I installed the ARM "GNU Tools for ARM Embedded Processors"
# You can this free toolchain here https://launchpad.net/gcc-arm-embedded
export TOOLCHAIN_BIN="/cygdrive/c/Program Files (x86)/GNU Tools ARM Embedded/4.9 2015q2/bin"
# This is the path to the location where I installed the devkitARM toolchain
# You can get this free toolchain from http://devkitpro.org/ or http://sourceforge.net/projects/devkitpro/
#export TOOLCHAIN_BIN="/cygdrive/c/Program Files (x86)/devkitARM/bin"
# This is the Cygwin path to the location where I build the buildroot
# toolchain.
# export TOOLCHAIN_BIN="${WD}/../buildroot/build_arm_nofpu/staging_dir/bin"
# Add the path to the toolchain to the PATH varialble
export PATH="${TOOLCHAIN_BIN}:/sbin:/usr/sbin:${PATH_ORIG}"
echo "PATH : ${PATH}"
|
let piArray = [];
let pi = 0;
for(let i = 0; i < 10; i++) {
pi = pi + (4 / (2*i + 1));
piArray.push(pi);
}
console.log(piArray); // [4, 2.66666666666667, 3.466666666666667, 2.895238095238096, 3.33968253968254, 2.9760461760461765, 3.2837384837384844] |
/**
* @module Experiences/Experience0
*/
import React, { Profiler, useRef } from 'react'
import { interval } from 'rxjs'
import { mapTo, raceWith } from 'rxjs/operators'
const onRender = (id, phase, actualDuration) => {
console.log(id, phase, actualDuration)
}
const obs1 = interval(1000).pipe(mapTo('fast one'))
const obs2 = interval(3000).pipe(mapTo('medium one'))
const obs3 = interval(5000).pipe(mapTo('slow one'))
const source$ = obs2.pipe(raceWith(obs1, obs3))
const observer = {
next: (v) => console.log(v),
complete: () => console.log('Completed')
}
/**
* @function Experience
* @return {Object} Return the dom of the Experience
*/
const Experience = () => {
const subscription = useRef(null)
const handleClick = () => {
subscription.current = source$.subscribe(observer)
}
const handleStop = () => {
subscription.current.unsubscribe()
}
return (
<Profiler id="Experience" onRender={onRender}>
<button onClick={handleClick}>
Call the race and then look at the console
</button>
<button onClick={handleStop}>Stop everything</button>
</Profiler>
)
}
export default Experience
|
<reponame>aricooperman/jLean
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.quantconnect.lean.data;
import java.math.BigDecimal;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.time.LocalDateTime;
import com.quantconnect.lean.DataFeedEndpoint;
import com.quantconnect.lean.MarketDataType;
import com.quantconnect.lean.SubscriptionTransportMedium;
import com.quantconnect.lean.Symbol;
/// Abstract base data class of QuantConnect. It is intended to be extended to define
/// generic user customizable data types while at the same time implementing the basics of data where possible
public abstract class BaseData implements IBaseData {
private MarketDataType dataType = MarketDataType.Base;
private LocalDateTime time;
private Symbol symbol = Symbol.EMPTY;
private BigDecimal value;
private boolean isFillForward;
/// Market Data Class of this data - does it come in individual price packets or is it grouped into OHLC.
/// <remarks>Data is classed into two categories - streams of instantaneous prices and groups of OHLC data.</remarks>
public MarketDataType getDataType() {
return dataType;
}
public void setDataType( MarketDataType value ) {
dataType = value;
}
/// True if this is a fill forward piece of data
public boolean isFillForward() {
return isFillForward;
}
/// Current time marker of this data packet.
/// <remarks>All data is timeseries based.</remarks>
public LocalDateTime getTime() {
return time;
}
public void setTime( LocalDateTime value ) {
time = value;
}
/// The end time of this data. Some data covers spans (trade bars) and as such we want
/// to know the entire time span covered
public LocalDateTime getEndTime() {
return time;
}
public void setEndTime( LocalDateTime value ) {
time = value;
}
/// Symbol representation for underlying Security
public Symbol getSymbol() {
return symbol;
}
public void setSymbol( Symbol value ) {
symbol = value;
}
/// Value representation of this data packet. All data requires a representative value for this moment in time.
/// For streams of data this is the price now, for OHLC packets this is the closing price.
public BigDecimal getValue() {
return value;
}
public void setValue( BigDecimal value ) {
this.value = value;
}
/// As this is a backtesting platform we'll provide an alias of value as price.
public BigDecimal getPrice() {
return value;
}
/// Constructor for initialising the dase data class
public BaseData() {
//Empty constructor required for fast-reflection initialization
}
/// <summary>
/// Reader converts each line of the data source into BaseData objects. Each data type creates its own factory method, and returns a new instance of the object
/// each time it is called. The returned object is assumed to be time stamped in the config.ExchangeTimeZone.
/// </summary>
/// <param name="config">Subscription data config setup object
/// <param name="line">Line of the source document
/// <param name="date">Date of the requested data
/// <param name="isLiveMode">true if we're in live mode, false for backtesting mode
/// <returns>Instance of the T:BaseData object generated by this line of the CSV</returns>
public BaseData reader( SubscriptionDataConfig config, String line, LocalDate date, boolean isLiveMode ) {
// stub implementation to prevent compile errors in user algorithms
final DataFeedEndpoint dataFeed = isLiveMode ? DataFeedEndpoint.LiveTrading : DataFeedEndpoint.Backtesting;
return reader( config, line, date, dataFeed );
}
/**
* Reader converts each line of the data source into BaseData objects. Each data type creates its own factory method, and returns a new instance of the object
/// each time it is called.
/// </summary>
/// <remarks>OBSOLETE:: This implementation is added for backward/forward compatibility purposes. This function is no longer called by the LEAN engine.</remarks>
/// <param name="config">Subscription data config setup object
/// <param name="line">Line of the source document
/// <param name="date">Date of the requested data
/// <param name="datafeed">Type of datafeed we're requesting - a live or backtest feed.
/// <returns>Instance of the T:BaseData object generated by this line of the CSV</returns>
@Deprecated("Reader(SubscriptionDataConfig, string, DateTime, DataFeedEndpoint) method has been made obsolete, use Reader(SubscriptionDataConfig, string, DateTime, bool) instead.")]
*/
public BaseData reader( SubscriptionDataConfig config, String line, LocalDate date, DataFeedEndpoint datafeed ) {
throw new UnsupportedOperationException( "Please implement Reader(SubscriptionDataConfig, string, DateTime, bool) on your custom data type: " + getClass().getName() );
}
/// <summary>
/// Return the URL string source of the file. This will be converted to a stream
/// </summary>
/// <param name="config">Configuration object
/// <param name="date">Date of this source file
/// <param name="isLiveMode">true if we're in live mode, false for backtesting mode
/// <returns>String URL of source file.</returns>
public SubscriptionDataSource getSource(SubscriptionDataConfig config, LocalDate date, boolean isLiveMode ) {
// stub implementation to prevent compile errors in user algorithms
final DataFeedEndpoint dataFeed = isLiveMode ? DataFeedEndpoint.LiveTrading : DataFeedEndpoint.Backtesting;
final String source = getSource( config, date, dataFeed );
if( isLiveMode ) {
// live trading by default always gets a rest endpoint
return new SubscriptionDataSource( Paths.get( source ), SubscriptionTransportMedium.Rest );
}
// construct a uri to determine if we have a local or remote file
final URI uri = URI.create( source );
try {
if( uri.isAbsolute() && !InetAddress.getByName( uri.getHost() ).isLoopbackAddress() )
return new SubscriptionDataSource( Paths.get( source ), SubscriptionTransportMedium.RemoteFile );
}
catch( UnknownHostException e ) { }
return new SubscriptionDataSource( Paths.get( source ), SubscriptionTransportMedium.LocalFile );
}
/**
* Return the URL string source of the file. This will be converted to a stream
/// <remarks>OBSOLETE:: This implementation is added for backward/forward compatibility purposes. This function is no longer called by the LEAN engine.</remarks>
/// <param name="config">Configuration object
/// <param name="date">Date of this source file
/// <param name="datafeed">Type of datafeed we're reqesting - backtest or live
/// <returns>String URL of source file.</returns>
@deprecated[Obsolete("GetSource(SubscriptionDataConfig, DateTime, DataFeedEndpoint) method has been made obsolete, use GetSource(SubscriptionDataConfig, DateTime, bool) instead.")]
*/
public String getSource( SubscriptionDataConfig config, LocalDate date, DataFeedEndpoint datafeed ) {
throw new UnsupportedOperationException( "Please implement GetSource(SubscriptionDataConfig, DateTime, bool) on your custom data type: " + getClass().getName() );
}
/// Return the URL String source of the file. This will be converted to a stream
/// <param name="config Configuration object
/// <param name="date Date of this source file
/// <param name="isLiveMode true if we're in live mode, false for backtesting mode
/// <returns>String URL of source file.</returns>
protected SubscriptionDataSource createSubscriptionDataSource( Path source, boolean isLiveMode ) {
if( isLiveMode )
// live trading by default always gets a rest endpoint
return new SubscriptionDataSource( source, SubscriptionTransportMedium.Rest );
// construct a uri to determine if we have a local or remote file
try {
final URI uri = source.toUri(); //, UriKind.RelativeOrAbsolute);
if( uri.isAbsolute() && !InetAddress.getByName( uri.getHost() ).isLoopbackAddress() )
return new SubscriptionDataSource( source, SubscriptionTransportMedium.RemoteFile );
}
catch( Exception e ) { }
return new SubscriptionDataSource( source, SubscriptionTransportMedium.LocalFile );
}
/// Updates this base data with a new trade
/// <param name="lastTrade The price of the last trade
/// <param name="tradeSize The quantity traded
public void updateTrade( BigDecimal lastTrade, long tradeSize ) {
update( lastTrade, BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.valueOf( tradeSize ), BigDecimal.ZERO, BigDecimal.ZERO );
}
/// Updates this base data with new quote information
/// <param name="bidPrice The current bid price
/// <param name="bidSize The current bid size
/// <param name="askPrice The current ask price
/// <param name="askSize The current ask size
public void updateQuote( BigDecimal bidPrice, long bidSize, BigDecimal askPrice, long askSize ) {
update( BigDecimal.ZERO, bidPrice, askPrice, BigDecimal.ZERO, BigDecimal.valueOf( bidSize ), BigDecimal.valueOf( askSize ) );
}
/// Updates this base data with the new quote bid information
/// <param name="bidPrice The current bid price
/// <param name="bidSize The current bid size
public void updateBid( BigDecimal bidPrice, long bidSize ) {
update( BigDecimal.ZERO, bidPrice, BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.valueOf( bidSize ), BigDecimal.ZERO );
}
/// Updates this base data with the new quote ask information
/// <param name="askPrice The current ask price
/// <param name="askSize The current ask size
public void updateAsk( BigDecimal askPrice, long askSize ) {
update( BigDecimal.ZERO, BigDecimal.ZERO, askPrice, BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.valueOf( askSize ) );
}
/// Update routine to build a bar/tick from a data update.
/// <param name="lastTrade The last trade price
/// <param name="bidPrice Current bid price
/// <param name="askPrice Current asking price
/// <param name="volume Volume of this trade
/// <param name="bidSize The size of the current bid, if available
/// <param name="askSize The size of the current ask, if available
public void update( BigDecimal lastTrade, BigDecimal bidPrice, BigDecimal askPrice, BigDecimal volume, BigDecimal bidSize, BigDecimal askSize ) {
value = lastTrade;
}
/// Return a new instance clone of this object, used in fill forward
/// <remarks>
/// This base implementation uses reflection to copy all public fields and properties
/// </remarks>
/// <param name="fillForward True if this is a fill forward clone
/// <returns>A clone of the current object</returns>
public BaseData clone( boolean fillForward ) {
final BaseData clone = clone();
clone.isFillForward = fillForward;
return clone;
}
/// Return a new instance clone of this object, used in fill forward
/// <remarks>
/// This base implementation uses reflection to copy all public fields and properties
/// </remarks>
/// <returns>A clone of the current object</returns>
public BaseData clone() {
try {
return (BaseData) super.clone();
}
catch( CloneNotSupportedException e ) {
throw new RuntimeException( e );
}
}
/// Formats a String with the symbol and value.
/// <returns>string - a String formatted as SPY: 167.753</returns>
public String toString() {
return String.format( "%s: %s", symbol, value );
}
}
|
#!/bin/bash
#
# Copyright 2019 Gianluca Frison, Dimitris Kouzoupis, Robin Verschueren,
# Andrea Zanelli, Niels van Duijkeren, Jonathan Frey, Tommaso Sartor,
# Branimir Novoselnik, Rien Quirynen, Rezart Qelibari, Dang Doan,
# Jonas Koenemann, Yutao Chen, Tobias Schöls, Jonas Schlagenhauf, Moritz Diehl
#
# This file is part of acados.
#
# The 2-Clause BSD License
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.;
#
if [ "${SECTION}" = 'before_install' ]; then
# export GENERIC target for osx
export BLASFEO_TARGET=GENERIC;
export HPIPM_TARGET=GENERIC;
export ACADOS_INSTALL_DIR="$(pwd)";
export ACADOS_SOURCE_DIR="$(pwd)";
elif [ "${SECTION}" = 'install' ]; then
source "${SCRIPT_DIR}/install_ccache.sh";
source "${SHARED_SCRIPT_DIR}/install_eigen.sh";
if [[ "${SWIG_MATLAB}" = 'ON' || "${SWIG_PYTHON}" = 'ON' ]] ||
[[ "${TEMPLATE_PYTHON}" = 'ON' || "${TEMPLATE_MATLAB}" = 'ON' ]] ||
[[ "${ACADOS_MATLAB}" = 'ON' || "${ACADOS_OCTAVE}" = 'ON' ]] ||
"${DEV_MATLAB}" = 'ON';
then
source "${SCRIPT_DIR}/install_casadi.sh";
fi
if [[ "${SWIG_PYTHON}" = 'ON' || "${TEMPLATE_PYTHON}" = 'ON' ]] ;
then
source "${SCRIPT_DIR}/install_python_dependencies.sh";
fi
if [[ "${SWIG_MATLAB}" = 'ON' || "${TEMPLATE_MATLAB}" = 'ON' ]] ||
[[ "${DEV_MATLAB}" = 'ON' || "${ACADOS_MATLAB}" = 'ON' ]];
then
source "${SHARED_SCRIPT_DIR}/install_matlab.sh";
fi
if [[ "${SWIG_MATLAB}" = 'ON' || "${SWIG_PYTHON}" = 'ON' ]];
then
source "${SHARED_SCRIPT_DIR}/install_swig.sh";
fi
elif [ "${SECTION}" = 'script' ]; then
source "${SHARED_SCRIPT_DIR}/script_acados_release.sh";
elif [ "${SECTION}" = 'after_success' ]; then
source "${SHARED_SCRIPT_DIR}/after_success_package_release.sh";
fi
|
def filter_tweets(tweets: dict, max_chars: int) -> dict:
filtered_tweets = {}
for tweet_name, tweet_content in tweets.items():
if len(tweet_content) <= max_chars:
filtered_tweets[tweet_name] = tweet_content
return filtered_tweets |
version=v1.9.4
export SINGULARITY_CACHEDIR=/cbio/users/ptgmat003/singularity # comment this out if unsure
export SINGULARITY_TMPDIR=/cbio/users/ptgmat003/singularity/tmp
singularity build metanovo_${version}.img docker://thyscbio/metanovo:${version}
|
alias ss='thin --stats "/thin/stats" start'
alias sg='ruby script/generate'
alias sd='ruby script/destroy'
alias sp='ruby script/plugin'
alias sr='ruby script/runner'
alias ssp='ruby script/spec'
alias rdbm='rake db:migrate'
alias rdbtp='rake db:test:prepare'
alias migrate='rake db:migrate && rake db:test:prepare'
alias sc='ruby script/console'
alias sd='ruby script/server --debugger'
alias devlog='tail -f log/development.log'
alias -g RET='RAILS_ENV=test'
alias -g REP='RAILS_ENV=production'
alias -g RED='RAILS_ENV=development'
function remote_console() {
/usr/bin/env ssh $1 "( cd $2 && ruby script/console production )"
}
|
package main
import(
"github.com/bwmarrin/discordgo"
)
// Packs command arguments into one struct to avoid unused argument warnings
type cmdArguments struct {
s *discordgo.Session
m *discordgo.MessageCreate
args []string
}
// All command handlers will use this signature for consistency
type cmdHandler func(args cmdArguments)
// Represents a command - each command must have it's own unique Command object
type Command struct {
name string
aliases []string
requiredArgs int
usage string
handler cmdHandler
dev bool
}
// Stores the list of command names to Command objects
var commandMap map[string]Command
// Command handler; parses the name and passes the arguments on to the correct handler
func command(s *discordgo.Session, m *discordgo.MessageCreate, args []string, cmd string) {
var command Command
var ok bool
// If we can't find the command, look for an alias
if command, ok = commandMap[cmd]; !ok {
foundCmd := false
for _, commandCheck := range commandMap {
if searchAliases(cmd, commandCheck.aliases) {
command = commandCheck
foundCmd = true
break
}
}
if !foundCmd {
return
}
}
// If it's a dev only command, check if the sender actually has permissions
if command.dev && !DeveloperList.contains(m.Author.ID) {
return
}
// Ensure the required argument count is met
if len(args) < command.requiredArgs {
_, _ = s.ChannelMessageSend(m.ChannelID, "Usage: !" + command.name + " " + command.usage)
return
}
// All good, call handler
command.handler(cmdArguments{s, m, args})
}
// Search for an alias
func searchAliases(query string, aliases []string) bool {
for _, alias := range aliases {
if alias == query {
return true
}
}
return false
}
// Build the command list
func buildCommandMap() {
commandMap = make(map[string]Command)
// Generic Commands (can be used by anyone)
addCommand("assemble",
[]string{"asm", "a"},
3,
"[architecture] {instructions ...}",
cmdAssemble,
false)
addCommand("disassemble",
[]string{"disasm", "disas", "d"},
3,
"[architecture] {opcodes ...}",
cmdDisassemble,
false)
addCommand("cve",
[]string{},
2,
"[CVE Identifier]",
cmdCve,
false)
addCommand("info",
[]string{},
2,
"[term]",
cmdInfo,
false)
addCommand("manual",
[]string{},
2,
"[architecture]",
cmdManual,
false)
addCommand("retrick",
[]string{},
0,
"",
cmdReTrick,
false)
addCommand("exploittrick",
[]string{"expltrick"},
0,
"",
cmdExploitTrick,
false)
addCommand("commands",
[]string{"cmds"},
0,
"",
cmdCommands,
false)
addCommand("motivation",
[]string{"motivateme"},
0,
"",
cmdMotivation,
false)
/*addCommand("readelf",
[]string{"elf"},
2,
"[link] {options ...}",
cmdReadelf,
false)*/
// Developer Only Commands
addCommand("devmode",
[]string{"developermode"},
0,
"",
cmdDevMode,
true)
addCommand("mem",
[]string{"memory"},
0,
"",
cmdMem,
true)
addCommand("devmode",
[]string{"developermode"},
0,
"",
cmdDevMode,
true)
addCommand("test",
[]string{},
0,
"",
cmdTest,
true)
addCommand("ping",
[]string{},
0,
"",
cmdPing,
true)
addCommand("say",
[]string{},
2,
"",
cmdSay,
true)
addCommand("uptime",
[]string{"up"},
0,
"",
cmdUptime,
true)
addCommand("restart",
[]string{"refresh"},
0,
"",
cmdRestart,
true)
addCommand("die",
[]string{"kill"},
0,
"",
cmdDie,
true)
}
// Adds a command to the command map
func addCommand(name string, aliases []string, requiredArgs int, usage string, handler cmdHandler, devOnly bool) {
cmd := Command {
name: name,
aliases: aliases,
requiredArgs: requiredArgs,
usage: usage,
handler: handler,
dev: devOnly}
commandMap[name] = cmd
}
// Sends a list of commands
func cmdCommands(params cmdArguments) {
s := params.s
m := params.m
commands := "```"
commands += "!assemble/asm [architecture] {instructions ...} - Assembles given instructions into opcodes. Instructions are separated by a ';'.\n"
commands += "!disassemble/disasm [architecture] {opcodes ...} - Disassembles given opcodes into instructions. Give in 'bb' format separated by a space.\n"
commands += "!cve [cve identifier] - Displays information on a given CVE from NVD.\n"
commands += "!info [identifier] - Gives information on the given word (like a dictionary).\n"
commands += "!retrick - Gives you a random RE trick.\n"
commands += "!expltrick = Gives you a random exploit dev trick.\n"
commands += "!manual [architecture] - Links a PDF manual for the given architecture.\n"
commands += "!motivation - you can do it!\n"
//commands += "!readelf [link] {options ...} - Reads and gives information about the ELF given by the link.\n"
commands += "!commands/cmds - You are here.\n"
commands += "```"
_, _ = s.ChannelMessageSend(m.ChannelID, "Here's a list of my commands: " + commands)
}
// Motivation!
func cmdMotivation(params cmdArguments) {
s := params.s
m := params.m
motivationalJapaneseFisherman := "https://www.youtube.com/watch?v=0Lq0d-cPpS4"
_, _ = s.ChannelMessageSend(m.ChannelID, motivationalJapaneseFisherman)
} |
#include <string.h>
void reverse_string(char *str)
{
int length = strlen(str);
char temp;
for(int i = 0; i < length/2; i++)
{
temp = str[i];
str[i] = str[length-1-i];
str[length-1-i] = temp;
}
} |
#!/bin/bash
set -e
## To run:
## Suppose:
# - gcloud creds are in ~/.gcloud_creds/<CREDS_FILENAME>.json
# - kaggle creds are in ~/.kaggle/<CREDS_FILENAME>.json
# - git ssh creds are in ~/.ssh/id_rsa_<...>
# - git known hosts are in ~/.ssh/known_hosts
# - existing Jenkins pvc is called "restored-jenkins"
## Then run:
# ./setup.sh ~/.gcloud_creds/<CREDS_FILENAME>.json ~/.kaggle/<CREDS_FILENAME>.json ~/.ssh/id_rsa_<...> ~/.ssh/known_hosts restored-jenkins
GCLOUD_SVC_ACCOUNT_FILE=$1
KAGGLE_CREDS_FILE=$2
GIT_PRIVATE_CREDS_FILE=$3
GIT_KNOWN_HOSTS_FILE=$4
EXISTING_JENKINS_PVC=$5
HELM_NAMESPACE=helm
if [[ -z "$JENKINS_BACKUP_BUCKET" ]]; then
echo "Jenkins backup bucket env var not set! Cannot proceed."
exit 1
else
echo "Using Jenkins backup bucket: $JENKINS_BACKUP_BUCKET"
fi
#KFAPP=kf-canela-cocoa
KUBEFLOW_SRC=~/.kubeflow
if [[ -z "$GKE_CLUSTER_NAME" ]]; then
echo "GKE Cluster name is empty! Cannot proceed."
exit 1
else
echo "Using GKE Cluster name: $GKE_CLUSTER_NAME"
fi
echo "Creating gcloud setup"
read -p "Is this ok (y/n)? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
echo "Proceeding..."
else
echo "Ok. Exiting"
exit 1
fi
./setup_gke.sh $GKE_CLUSTER_NAME
echo "Downloading kfctl"
read -p "Is this ok (y/n)? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
echo "Proceeding..."
./setup_kfctl.sh $KUBEFLOW_SRC
else
echo "Ok. Skipping the download and proceeding."
fi
echo "Setting up kubeflow."
read -p "Is this ok (y/n)? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
echo "Proceeding..."
./setup_kubeflow.sh
else
echo "Ok. Skipping the download and proceeding."
fi
./setup_secrets.sh $GCLOUD_SVC_ACCOUNT_FILE $KAGGLE_CREDS_FILE $GIT_PRIVATE_CREDS_FILE $GIT_KNOWN_HOSTS_FILE
./setup_experiment_roles.sh
./setup_helm.sh $HELM_NAMESPACE
./setup_dashboard.sh
if [[ -z "$EXISTING_JENKINS_PVC" ]]; then
echo "No Existing jenkins pvc arg provided. Creating a new one"
else
echo "Creating PVC ${EXISTING_JENKINS_PVC}"
./setup_existing_pvc.sh ${EXISTING_JENKINS_PVC} ${JENKINS_BACKUP_BUCKET}
fi
./setup_jenkins.sh "${EXISTING_JENKINS_PVC}" $HELM_NAMESPACE
./backup_jenkins.sh ${JENKINS_BACKUP_BUCKET} ${EXISTING_JENKINS_PVC:-canela-jenkins}
./setup_monitoring.sh
|
<reponame>williamcheong/origen
require "spec_helper"
describe "Origen.mode" do
it "returns an instance of Origen::Mode" do
Origen.mode.class.should == Origen::Mode
end
it "the mode can be set by assigning a symbol" do
Origen.mode = :debug
Origen.mode.class.should == Origen::Mode
Origen.mode.debug?.should == true
Origen.mode = :production
Origen.mode.class.should == Origen::Mode
Origen.mode.debug?.should == false
end
it "can be frozen" do
Origen.mode = :debug
Origen.mode.debug?.should == true
Origen.mode.freeze
Origen.mode = :production
Origen.mode.debug?.should == true
Origen.mode.unfreeze
Origen.mode = :production
Origen.mode.debug?.should == false
end
it "it can be compared to a symbol" do
Origen.mode = :debug
Origen.mode.class.should == Origen::Mode
(Origen.mode == :debug).should == true
(Origen.mode == :production).should == false
Origen.mode = :production
Origen.mode.class.should == Origen::Mode
(Origen.mode == :debug).should == false
(Origen.mode == :production).should == true
end
it "simulation is considered a debug mode" do
Origen.mode = :simulation
Origen.mode.simulation?.should == true
Origen.mode.debug?.should == true
end
it "is production by default" do
Origen::Mode.new.production?.should == true
end
it "can be set by an abbreviation" do
Origen.mode = :debug
(Origen.mode == :debug).should == true
Origen.mode = "prod"
(Origen.mode == :production).should == true
Origen.mode = "de"
(Origen.mode == :debug).should == true
Origen.mode = "sim"
(Origen.mode == :simulation).should == true
end
it "can be set by the mode command" do
cmd("origen mode production")
Origen.app.session(true) # Reload the session
load_target "empty"
(Origen.mode == :production).should == true
cmd("origen mode debug")
Origen.app.session(true) # Reload the session
load_target "empty"
(Origen.mode == :debug).should == true
# Verify that the target can override the session default
cmd("origen mode production")
Origen.app.session(true) # Reload the session
load_target "debug"
(Origen.mode == :debug).should == true
# Back to debug for future tests
cmd("origen mode debug")
Origen.app.session(true) # Reload the session
end
end
|
import React, { Component } from "react";
import {
View,
TouchableOpacity,
Text,
TextInput,
StyleSheet
} from "react-native";
import { connect } from "react-redux";
import CardField from "./CardField";
import { addDeck } from "../actions";
import { NavigationActions } from "react-navigation";
import { white } from "../utils/colors";
import { saveDeck } from "../utils/api";
import GenericButton from "./GenericButton";
import styles from "./Styles.js";
// FIXME: When creating a deck the system does not get refreshed correctly...
class CreateDeck extends Component {
state = {
title: "",
allFilled: false
};
submit = (() => {
const { decks } = this.props;
const { title } = this.state;
if (title) {
const newDeck = {
title: title,
cards: []
};
// Update Redux.
this.props.dispatch(
addDeck({
[title]: newDeck
})
);
// Save to 'DB' (AsyncStorage).
saveDeck(newDeck);
// Clean the title so if create deck is clicked again it will be empty.
this.setState({
title: '',
allFilled: false
});
// Redirect the user to Deck Details.
this.gotoDeckDetails(title);
} else {
this.setState({
allFilled: true
});
}
});
// Navigate to the DeckDetail View, passing a title property.
gotoDeckDetails = title => {
this.props.navigation.navigate("DeckDetails", { title: title });
};
// Display the warning message.
showAlertMessage = () => {
if (this.state.allFilled) {
return <Text style={styles.message}>The title is required!</Text>;
}
};
render() {
return (
<View style={stylesCustom.container}>
<CardField fieldText={"Deck Title?"} />
{this.showAlertMessage()}
<TextInput
onChangeText={title => this.setState({ title })}
placeholder="Deck Title"
style={styles.input}
value={this.state.title}
/>
<GenericButton
btnStyle={"greenBtn"}
btnText={"Submit"}
onPress={this.submit}
textStyle={"greenBtnText"}
/>
</View>
);
}
}
const stylesCustom = StyleSheet.create({
container: {
flex: 1,
backgroundColor: white
}
});
function mapStateToProps(decks) {
return { decks };
}
export default connect(mapStateToProps)(CreateDeck);
|
package com.uber.nullaway;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class DummyOptionsConfigTest {
DummyOptionsConfig dummyOptionsConfig;
@Before
public void setup() {
dummyOptionsConfig = new DummyOptionsConfig();
}
@Test
public void allDeclaredMethodsThrowIllegalStateException() {
// DummyOptionsConfig is expected to throw a runtime exception if ever used (see documentation
// on that class)
// this test guarantees that all methods declared in the class throw the exception
Class<? extends DummyOptionsConfig> klass = dummyOptionsConfig.getClass();
for (Method method : klass.getDeclaredMethods()) {
if (method.getName().contains("jacocoInit")) {
// Declared method added by jacoco coverage reporting (via reflection?). Plots within
// plots...
continue;
}
Class<?>[] parameterTypes = method.getParameterTypes();
Object[] nullParams = Arrays.stream(parameterTypes).map((t) -> null).toArray();
Exception reflectionException =
assertThrows(
InvocationTargetException.class,
() -> {
method.invoke(dummyOptionsConfig, nullParams);
},
String.format(
"Expected method DummyOptionsConfig.%s to fail with IllegalStateException.",
method.getName()));
// The real exception, not wrapped by reflection exceptions
Throwable cause = reflectionException.getCause();
assertThat(cause, instanceOf(IllegalStateException.class));
IllegalStateException exception = (IllegalStateException) cause;
assertEquals(exception.getMessage(), DummyOptionsConfig.ERROR_MESSAGE);
}
}
}
|
#! /bin/sh
rm -f *.o *.sym *~ *.so heap zheap xzheap
|
<reponame>extratone/neocities
window.YTD.ip_audit.part0 = [
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-16T00:41:07.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-15T23:38:26.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-15T13:04:48.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-07T19:58:03.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-06T22:28:51.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-12-05T12:41:10.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-22T02:56:47.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-20T12:00:03.000Z",
"loginIp" : "192.168.3.11"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-20T04:53:57.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-18T04:32:52.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-17T21:30:51.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-14T09:48:35.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-13T04:38:37.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-12T23:58:04.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-02T07:01:26.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-11-01T04:54:06.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-31T12:24:07.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-30T01:32:01.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-29T21:07:15.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-28T20:01:05.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-27T01:56:12.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-26T23:50:00.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-25T21:36:15.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-22T20:07:22.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-21T17:26:38.000Z",
"loginIp" : "172.16.31.10"
}
},
{
"ipAudit" : {
"accountId" : "1625103836",
"createdAt" : "2021-10-19T21:37:41.000Z",
"loginIp" : "172.16.31.10"
}
}
] |
<gh_stars>0
package pt.up.fe.els2021.functions;
import com.fasterxml.jackson.annotation.JsonProperty;
import pt.up.fe.els2021.Table;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public record SelectFunction(@JsonProperty("columns") List<String> columnNames) implements TableFunction {
@Override
public Table apply(Table table) {
var columnIndices = new HashMap<String, Integer>();
for (int i = 0; i < table.columnNames().size(); i++) {
var name = table.columnNames().get(i);
columnIndices.put(name, i);
}
var newColumns = columnNames.stream().map(columnName -> table.columns().get(columnIndices.get(columnName)))
.toList();
return new Table(columnNames, newColumns);
}
public static final class Builder extends TableFunction.Builder {
List<String> columns = new ArrayList<>();
@Override
public TableFunction build() {
return new SelectFunction(columns);
}
public Builder column(String column) {
columns.add(column);
return this;
}
}
}
|
#!/bin/bash
#
# Copyright IBM Corp All Rights Reserved
#
# SPDX-License-Identifier: Apache-2.0
#
# Exit on first error
set -e
# don't rewrite paths for Windows Git Bash users
export MSYS_NO_PATHCONV=1
starttime=$(date +%s)
CC_SRC_LANGUAGE=${1:-"go"}
CC_SRC_LANGUAGE=`echo "$CC_SRC_LANGUAGE" | tr [:upper:] [:lower:]`
if [ "$CC_SRC_LANGUAGE" = "go" -o "$CC_SRC_LANGUAGE" = "golang" ]; then
CC_RUNTIME_LANGUAGE=golang
CC_SRC_PATH=github.com/chaincode/fabcar/go
elif [ "$CC_SRC_LANGUAGE" = "java" ]; then
CC_RUNTIME_LANGUAGE=java
CC_SRC_PATH=/opt/gopath/src/github.com/chaincode/fabcar/java
elif [ "$CC_SRC_LANGUAGE" = "javascript" ]; then
CC_RUNTIME_LANGUAGE=node # chaincode runtime language is node.js
CC_SRC_PATH=/opt/gopath/src/github.com/chaincode/fabcar/javascript
elif [ "$CC_SRC_LANGUAGE" = "typescript" ]; then
CC_RUNTIME_LANGUAGE=node # chaincode runtime language is node.js
CC_SRC_PATH=/opt/gopath/src/github.com/chaincode/fabcar/typescript
echo Compiling TypeScript code into JavaScript ...
pushd ../chaincode/fabcar/typescript
npm install
npm run build
popd
echo Finished compiling TypeScript code into JavaScript
else
echo The chaincode language ${CC_SRC_LANGUAGE} is not supported by this script
echo Supported chaincode languages are: go, javascript, and typescript
exit 1
fi
# clean the keystore
rm -rf ./hfc-key-store
# launch network; create channel and join peer to channel
cd ../first-network
echo y | ./byfn.sh down
echo y | ./byfn.sh up -a -n -s couchdb
CONFIG_ROOT=/opt/gopath/src/github.com/hyperledger/fabric/peer
ORG1_MSPCONFIGPATH=${CONFIG_ROOT}/crypto/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp
ORG1_TLS_ROOTCERT_FILE=${CONFIG_ROOT}/crypto/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/ca.crt
ORG2_MSPCONFIGPATH=${CONFIG_ROOT}/crypto/peerOrganizations/org2.example.com/users/Admin@org2.example.com/msp
ORG2_TLS_ROOTCERT_FILE=${CONFIG_ROOT}/crypto/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/ca.crt
ORDERER_TLS_ROOTCERT_FILE=${CONFIG_ROOT}/crypto/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts/tlsca.example.com-cert.pem
set -x
echo "Installing smart contract on peer0.org1.example.com"
docker exec \
-e CORE_PEER_LOCALMSPID=Org1MSP \
-e CORE_PEER_ADDRESS=peer0.org1.example.com:7051 \
-e CORE_PEER_MSPCONFIGPATH=${ORG1_MSPCONFIGPATH} \
-e CORE_PEER_TLS_ROOTCERT_FILE=${ORG1_TLS_ROOTCERT_FILE} \
cli \
peer chaincode install \
-n SimpleToken \
-v 1.0 \
-p "$CC_SRC_PATH" \
-l "$CC_RUNTIME_LANGUAGE"
echo "Installing smart contract on peer1.org1.example.com"
docker exec \
-e CORE_PEER_LOCALMSPID=Org1MSP \
-e CORE_PEER_ADDRESS=peer1.org1.example.com:8051 \
-e CORE_PEER_MSPCONFIGPATH=${ORG1_MSPCONFIGPATH} \
-e CORE_PEER_TLS_ROOTCERT_FILE=${ORG1_TLS_ROOTCERT_FILE} \
cli \
peer chaincode install \
-n SimpleToken \
-v 1.0 \
-p "$CC_SRC_PATH" \
-l "$CC_RUNTIME_LANGUAGE"
echo "Installing smart contract on peer0.org2.example.com"
docker exec \
-e CORE_PEER_LOCALMSPID=Org2MSP \
-e CORE_PEER_ADDRESS=peer0.org2.example.com:9051 \
-e CORE_PEER_MSPCONFIGPATH=${ORG2_MSPCONFIGPATH} \
-e CORE_PEER_TLS_ROOTCERT_FILE=${ORG2_TLS_ROOTCERT_FILE} \
cli \
peer chaincode install \
-n SimpleToken \
-v 1.0 \
-p "$CC_SRC_PATH" \
-l "$CC_RUNTIME_LANGUAGE"
echo "Installing smart contract on peer1.org2.example.com"
docker exec \
-e CORE_PEER_LOCALMSPID=Org2MSP \
-e CORE_PEER_ADDRESS=peer1.org2.example.com:10051 \
-e CORE_PEER_MSPCONFIGPATH=${ORG2_MSPCONFIGPATH} \
-e CORE_PEER_TLS_ROOTCERT_FILE=${ORG2_TLS_ROOTCERT_FILE} \
cli \
peer chaincode install \
-n SimpleToken \
-v 1.0 \
-p "$CC_SRC_PATH" \
-l "$CC_RUNTIME_LANGUAGE"
echo "Instantiating smart contract on mychannel"
docker exec \
-e CORE_PEER_LOCALMSPID=Org1MSP \
-e CORE_PEER_MSPCONFIGPATH=${ORG1_MSPCONFIGPATH} \
cli \
peer chaincode instantiate \
-o orderer.example.com:7050 \
-C mychannel \
-n SimpleToken \
-l "$CC_RUNTIME_LANGUAGE" \
-v 1.0 \
-c '{"Args": []}' \
-P "AND('Org1MSP.member','Org2MSP.member')" \
--tls \
--cafile ${ORDERER_TLS_ROOTCERT_FILE} \
--peerAddresses peer0.org1.example.com:7051 \
--tlsRootCertFiles ${ORG1_TLS_ROOTCERT_FILE}
echo "Waiting for instantiation request to be committed ..."
sleep 10
echo "Submitting initLedger transaction to smart contract on mychannel"
echo "The transaction is sent to all of the peers so that chaincode is built before receiving the following requests"
docker exec \
-e CORE_PEER_LOCALMSPID=Org1MSP \
-e CORE_PEER_MSPCONFIGPATH=${ORG1_MSPCONFIGPATH} \
cli \
peer chaincode invoke \
-o orderer.example.com:7050 \
-C mychannel \
-n SimpleToken \
-c '{"function":"initLedger","Args": ["{\"symbol\":\"VIR\",\"name\":\"Simple Token\", \"supply\":\"100\"}"]}' \
--waitForEvent \
--tls \
--cafile ${ORDERER_TLS_ROOTCERT_FILE} \
--peerAddresses peer0.org1.example.com:7051 \
--peerAddresses peer1.org1.example.com:8051 \
--peerAddresses peer0.org2.example.com:9051 \
--peerAddresses peer1.org2.example.com:10051 \
--tlsRootCertFiles ${ORG1_TLS_ROOTCERT_FILE} \
--tlsRootCertFiles ${ORG1_TLS_ROOTCERT_FILE} \
--tlsRootCertFiles ${ORG2_TLS_ROOTCERT_FILE} \
--tlsRootCertFiles ${ORG2_TLS_ROOTCERT_FILE}
set +x
cat <<EOF
Total setup execution time : $(($(date +%s) - starttime)) secs ...
Next, use the FabCar applications to interact with the deployed FabCar contract.
The FabCar applications are available in multiple programming languages.
Follow the instructions for the programming language of your choice:
JavaScript:
Start by changing into the "javascript" directory:
cd javascript
Next, install all required packages:
npm install
Then run the following applications to enroll the admin user, and register a new user
called user1 which will be used by the other applications to interact with the deployed
FabCar contract:
node enrollAdmin
node registerUser
You can run the invoke application as follows. By default, the invoke application will
create a new car, but you can update the application to submit other transactions:
node invoke
You can run the query application as follows. By default, the query application will
return all cars, but you can update the application to evaluate other transactions:
node query
TypeScript:
Start by changing into the "typescript" directory:
cd typescript
Next, install all required packages:
npm install
Next, compile the TypeScript code into JavaScript:
npm run build
Then run the following applications to enroll the admin user, and register a new user
called user1 which will be used by the other applications to interact with the deployed
FabCar contract:
node dist/enrollAdmin
node dist/registerUser
You can run the invoke application as follows. By default, the invoke application will
create a new car, but you can update the application to submit other transactions:
node dist/invoke
You can run the query application as follows. By default, the query application will
return all cars, but you can update the application to evaluate other transactions:
node dist/query
Java:
Start by changing into the "java" directory:
cd java
Then, install dependencies and run the test using:
mvn test
The test will invoke the sample client app which perform the following:
- Enroll admin and user1 and import them into the wallet (if they don't already exist there)
- Submit a transaction to create a new car
- Evaluate a transaction (query) to return details of this car
- Submit a transaction to change the owner of this car
- Evaluate a transaction (query) to return the updated details of this car
EOF
|
def remove_duplicates(nums):
res = []
for num in nums:
if num not in res:
res.append(num)
return res
uniques = remove_duplicates(nums)
print(uniques) # prints [10, 20, 5, 30] |
#include "catch.hpp"
#include "boson/boson.h"
#include "boson/syscalls.h"
#include "boson/net/socket.h"
#include <unistd.h>
#include <iostream>
#include "boson/logger.h"
#include "boson/semaphore.h"
#include "boson/select.h"
#ifdef BOSON_USE_VALGRIND
#include "valgrind/valgrind.h"
#endif
using namespace boson;
using namespace std::literals;
namespace {
inline int time_factor() {
#ifdef BOSON_USE_VALGRIND
return RUNNING_ON_VALGRIND ? 10 : 1;
#else
return 1;
#endif
}
}
TEST_CASE("Sockets - Simple accept/connect", "[syscalls][sockets][accept][connect]") {
boson::debug::logger_instance(&std::cout);
SECTION("Simple connection") {
boson::run(1, [&]() {
boson::channel<std::nullptr_t,2> tickets;
start(
[](auto tickets) -> void {
// Listen on a port
int listening_socket = boson::net::create_listening_socket(10101);
struct sockaddr_in cli_addr;
socklen_t clilen = sizeof(cli_addr);
int new_connection = boson::accept(listening_socket, (struct sockaddr*)&cli_addr, &clilen);
tickets << nullptr;
boson::close(listening_socket);
}, tickets);
start(
[](auto tickets) -> void {
struct sockaddr_in cli_addr;
cli_addr.sin_addr.s_addr = ::inet_addr("127.0.0.1");
cli_addr.sin_family = AF_INET;
cli_addr.sin_port = htons(10101);
socklen_t clilen = sizeof(cli_addr);
int sockfd = boson::socket(AF_INET, SOCK_STREAM, 0);
//::fcntl(sockfd, F_SETFL, O_NONBLOCK);
int new_connection = boson::connect(sockfd, (struct sockaddr*)&cli_addr, clilen);
CHECK((0 == new_connection));
tickets << nullptr;
::shutdown(sockfd, SHUT_WR);
boson::close(sockfd);
},tickets);
std::nullptr_t dummy;
CHECK(tickets >> dummy);
CHECK(tickets >> dummy);
});
}
SECTION("Simple connection with data") {
boson::run(1, [&]() {
boson::channel<std::nullptr_t,2> tickets;
start(
[](auto tickets) -> void {
// Listen on a port
int listening_socket = boson::net::create_listening_socket(10101);
struct sockaddr_in cli_addr;
socklen_t clilen = sizeof(cli_addr);
int new_connection = boson::accept(listening_socket, (struct sockaddr*)&cli_addr, &clilen);
size_t buffer = 0;
ssize_t rc = boson::recv(new_connection, &buffer, sizeof(size_t), 0);
CHECK(rc == sizeof(size_t));
CHECK(buffer == 1);
tickets << nullptr;
boson::close(listening_socket);
}, tickets);
start(
[](auto tickets) -> void {
struct sockaddr_in cli_addr;
cli_addr.sin_addr.s_addr = ::inet_addr("127.0.0.1");
cli_addr.sin_family = AF_INET;
cli_addr.sin_port = htons(10101);
socklen_t clilen = sizeof(cli_addr);
int sockfd = boson::socket(AF_INET, SOCK_STREAM, 0);
//::fcntl(sockfd, F_SETFL, O_NONBLOCK);
int new_connection = boson::connect(sockfd, (struct sockaddr*)&cli_addr, clilen);
CHECK((0 == new_connection));
size_t data = 1;
ssize_t rc = boson::send(sockfd, &data, sizeof(size_t), 0);
CHECK(rc == sizeof(size_t));
tickets << nullptr;
::shutdown(sockfd, SHUT_WR);
boson::close(sockfd);
},tickets);
std::nullptr_t dummy;
CHECK(tickets >> dummy);
CHECK(tickets >> dummy);
});
}
SECTION("Reconnect") {
boson::run(1, [&]() {
boson::channel<std::nullptr_t,1> tickets_for_accept, tickets_for_connect;
start(
[tickets_for_accept, tickets_for_connect]() mutable {
// Listen on a port
int listening_socket = boson::net::create_listening_socket(10101);
struct sockaddr_in cli_addr;
socklen_t clilen = sizeof(cli_addr);
// Accept first
int new_connection = boson::accept(listening_socket, (struct sockaddr*)&cli_addr, &clilen);
CHECK(new_connection > 0);
// Accept 2nd
//new_connection = boson::accept(listening_socket, (struct sockaddr*)&cli_addr, &clilen);
//CHECK(new_connection > 0);
boson::close(new_connection);
});
start(
[tickets_for_accept, tickets_for_connect]() mutable {
std::nullptr_t sink;
// Prepare connection
struct sockaddr_in cli_addr;
cli_addr.sin_addr.s_addr = ::inet_addr("127.0.0.1");
cli_addr.sin_family = AF_INET;
cli_addr.sin_port = htons(10101);
socklen_t clilen = sizeof(cli_addr);
// start First
int sockfd = boson::socket(AF_INET, SOCK_STREAM, 0);
int sockfd2 = boson::socket(AF_INET, SOCK_STREAM, 0);
int rc = boson::connect(sockfd, (struct sockaddr*)&cli_addr, clilen);
CHECK(rc == 0);
::shutdown(sockfd, SHUT_WR);
rc = boson::close(sockfd);
CHECK(rc == 0);
//// start second
cli_addr.sin_addr.s_addr = ::inet_addr("127.0.0.1");
cli_addr.sin_family = AF_INET;
cli_addr.sin_port = htons(10101);
rc = boson::connect(sockfd2, (struct sockaddr*)&cli_addr, clilen);
//boson::debug::log(strerror(errno));
CHECK(rc == 0);
::shutdown(sockfd2, SHUT_WR);
boson::close(sockfd2);
});
});
}
}
|
//
// PageViewController.h
// GraphicsTest
//
// Created by yiyaowang on 15/10/20.
// Copyright © 2015年 zengyao. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "RootViewController.h"
@interface PageViewController : UIViewController<UIScrollViewDelegate>
{
UIScrollView* pageScrollview;
UIPageControl* pageControl;
UIButton* button;
}
@end
|
#!/bin/sh
set -e
echo Asking for certificate renewal
$RENEW
|
#! /bin/bash
# get path of current script: https://stackoverflow.com/a/39340259/207661
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
pushd "$SCRIPT_DIR" >/dev/null
set -e
set -x
MIN_GCC_VERSION=6.0.0
gccBuild=false
function version_less_than_equal_to() { test "$(printf '%s\n' "$@" | sort -V | head -n 1)" = "$1"; }
# Parse command line arguments
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--gcc)
gccBuild=true
shift # past argument
;;
esac
done
# check for rpclib
if [ ! -d "./external/rpclib/rpclib-2.2.1" ]; then
echo "ERROR: new version of AirSim requires newer rpclib."
echo "please run setup.sh first and then run build.sh again."
exit 1
fi
# check for local cmake build created by setup.sh
if [ -d "./cmake_build" ]; then
if [ "$(uname)" == "Darwin" ]; then
CMAKE="$(greadlink -f cmake_build/bin/cmake)"
else
CMAKE="$(readlink -f cmake_build/bin/cmake)"
fi
else
CMAKE=$(which cmake)
fi
# set up paths of cc and cxx compiler
if $gccBuild; then
# variable for build output
build_dir=build_gcc_debug
# gcc tools
gcc_ver=$(gcc -dumpfullversion)
gcc_path=$(which cmake)
if [[ "$gcc_path" == "" ]] ; then
echo "ERROR: run setup.sh to install a good version of gcc."
exit 1
fi
if version_less_than_equal_to $gcc_ver $MIN_GCC_VERSION; then
export CC="gcc-6"
export CXX="g++-6"
else
export CC="gcc"
export CXX="g++"
fi
else
#check for correct verion of llvm
if [[ ! -d "llvm-source-50" ]]; then
if [[ -d "llvm-source-39" ]]; then
echo "Hello there! We just upgraded AirSim to Unreal Engine 4.18."
echo "Here are few easy steps for upgrade so everything is new and shiny :)"
echo "https://github.com/Microsoft/AirSim/blob/master/docs/unreal_upgrade.md"
exit 1
else
echo "The llvm-souce-50 folder was not found! Mystery indeed."
fi
fi
# check for libc++
if [[ !(-d "./llvm-build/output/lib") ]]; then
echo "ERROR: clang++ and libc++ is necessary to compile AirSim and run it in Unreal engine"
echo "Please run setup.sh first."
exit 1
fi
# variable for build output
build_dir=build_debug
if [ "$(uname)" == "Darwin" ]; then
export CC=/usr/local/opt/llvm-5.0/bin/clang-5.0
export CXX=/usr/local/opt/llvm-5.0/bin/clang++-5.0
else
export CC="clang-5.0"
export CXX="clang++-5.0"
fi
fi
#install EIGEN library
if [[ !(-d "./AirLib/deps/eigen3/Eigen") ]]; then
echo "### Eigen is not installed. Please run setup.sh first."
exit 1
fi
echo "putting build in $build_dir folder, to clean, just delete the directory..."
# this ensures the cmake files will be built in our $build_dir instead.
if [[ -f "./cmake/CMakeCache.txt" ]]; then
rm "./cmake/CMakeCache.txt"
fi
if [[ -d "./cmake/CMakeFiles" ]]; then
rm -rf "./cmake/CMakeFiles"
fi
if [[ ! -d $build_dir ]]; then
mkdir -p $build_dir
pushd $build_dir >/dev/null
"$CMAKE" ../cmake -DCMAKE_BUILD_TYPE=Debug \
|| (popd && rm -r $build_dir && exit 1)
popd >/dev/null
fi
pushd $build_dir >/dev/null
# final linking of the binaries can fail due to a missing libc++abi library
# (happens on Fedora, see https://bugzilla.redhat.com/show_bug.cgi?id=1332306).
# So we only build the libraries here for now
make -j`nproc`
popd >/dev/null
mkdir -p AirLib/lib/x64/Debug
mkdir -p AirLib/deps/rpclib/lib
mkdir -p AirLib/deps/MavLinkCom/lib
cp $build_dir/output/lib/libAirLib.a AirLib/lib
cp $build_dir/output/lib/libMavLinkCom.a AirLib/deps/MavLinkCom/lib
cp $build_dir/output/lib/librpc.a AirLib/deps/rpclib/lib/librpc.a
# Update AirLib/lib, AirLib/deps, Plugins folders with new binaries
rsync -a --delete $build_dir/output/lib/ AirLib/lib/x64/Debug
rsync -a --delete external/rpclib/rpclib-2.2.1/include AirLib/deps/rpclib
rsync -a --delete MavLinkCom/include AirLib/deps/MavLinkCom
rsync -a --delete AirLib Unreal/Plugins/AirSim/Source
# Update Blocks project
Unreal/Environments/Blocks/clean.sh
mkdir -p Unreal/Environments/Blocks/Plugins
rsync -a --delete Unreal/Plugins/AirSim Unreal/Environments/Blocks/Plugins
# Update Neigh project
rsync -a --delete Unreal/Plugins/AirSim Unreal/Environments/Neigh/Plugins
set +x
echo ""
echo ""
echo "=================================================================="
echo " AirSim plugin is built! Here's how to build Unreal project."
echo "=================================================================="
echo "If you are using Blocks environment, its already updated."
echo "If you are using your own environment, update plugin using,"
echo "rsync -a --delete Unreal/Plugins path/to/MyUnrealProject"
echo ""
echo "For help see:"
echo "https://github.com/Microsoft/AirSim/blob/master/docs/build_linux.md"
echo "=================================================================="
popd >/dev/null
|
#include <iostream>
#include <vector>
using namespace std;
void generateSubsets(vector<int> arr, int k, int n, vector<int> subset, int index) {
if(subset.size() == k) {
for(int &x : subset) {
cout << x << " ";
}
cout << "\n";
return;
}
if(index == n) {
return;
}
generateSubsets(arr, k, n, subset, index+1);
subset.push_back(arr[index]);
generateSubsets(arr, k, n, subset, index+1);
subset.pop_back();
}
int main () {
vector<int> arr = {1, 2, 3};
int k = 2;
int n = arr.size();
vector<int> subset;
int index = 0;
generateSubsets(arr, k, n, subset, index);
return 0;
} |
#!/bin/bash -e
scriptdir=$(realpath $(dirname "$0"))
source ${scriptdir}/../common/common.sh
function mysudo() {
if [[ $DISTRO == "macosx" ]]; then
"$@"
else
sudo $@
fi
}
LINUX_DISTR=${LINUX_DISTR:-'centos'}
LINUX_DISTR_VER=${LINUX_DISTR_VER:-}
CONTRAIL_KEEP_LOG_FILES=${CONTRAIL_KEEP_LOG_FILES:-'false'}
mkdir -p ${WORKSPACE}/output/logs
logfile="${WORKSPACE}/output/logs/build-tf-dev-env.log"
echo "Building tf-dev-env image: ${DEVENV_IMAGE}" | tee $logfile
build_opts="--build-arg LC_ALL=en_US.UTF-8 --build-arg LANG=en_US.UTF-8 --build-arg LANGUAGE=en_US.UTF-8"
build_opts+=" --network host --no-cache --tag ${DEVENV_IMAGE} --tag ${CONTAINER_REGISTRY}/${DEVENV_IMAGE} -f Dockerfile.${LINUX_DISTR} ."
if [[ "$ENABLE_RHSM_REPOS" == 'true' ]] ; then
build_opts+=" --build-arg ENABLE_RHSM_REPOS=$ENABLE_RHSM_REPOS"
fi
if [[ $DISTRO != 'macosx' ]] ; then
CONTRAIL_KEEP_LOG_FILES=${CONTRAIL_KEEP_LOG_FILES,,}
fi
if [[ "${CONTRAIL_KEEP_LOG_FILES}" != 'true' ]] ; then
mysudo docker build $build_opts 2>&1 | tee -a $logfile
result=${PIPESTATUS[0]}
if [ $result -eq 0 ]; then
rm -f $logfile
fi
else
# skip output into terminal
mysudo docker build $build_opts >> $logfile 2>&1
result=${PIPESTATUS[0]}
fi
exit $result
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.monitor = void 0;
var monitor = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M468.7,64H43.3c-6,0-11.3,5-11.3,11.1v265.7c0,6.2,5.2,11.1,11.3,11.1h425.4c6,0,11.3-5,11.3-11.1V75.1\r\n\t\tC480,69,474.8,64,468.7,64z M448,320H64V96h384V320z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M468.7,64H43.3c-6,0-11.3,5-11.3,11.1v265.7c0,6.2,5.2,11.1,11.3,11.1h425.4c6,0,11.3-5,11.3-11.1V75.1\r\n\t\tC480,69,474.8,64,468.7,64z M448,320H64V96h384V320z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M302.5,448c28-0.5,41.5-3.9,29-12.5c-12.5-8.7-28.5-15.3-29-22.5c-0.3-3.7-1.7-45-1.7-45H256h-44.8c0,0-1.5,41.3-1.7,45\r\n\t\tc-0.5,7.1-16.5,13.8-29,22.5c-12.5,8.7,1,12,29,12.5H302.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M302.5,448c28-0.5,41.5-3.9,29-12.5c-12.5-8.7-28.5-15.3-29-22.5c-0.3-3.7-1.7-45-1.7-45H256h-44.8c0,0-1.5,41.3-1.7,45\r\n\t\tc-0.5,7.1-16.5,13.8-29,22.5c-12.5,8.7,1,12,29,12.5H302.5z"
},
"children": []
}]
}]
}]
};
exports.monitor = monitor; |
<filename>vnpy/api/gateio/vngateio_ws.py
# encoding: utf-8
import json
from time import time, sleep
from threading import Thread
from datetime import datetime
import base64
import hmac
import hashlib
import json
import gzip, binascii, os
import urllib , requests
import websocket
import time
from vnpy.trader.vtFunction import systemSymbolToVnSymbol , VnSymbolToSystemSymbol
import json
GATEIO_SOCKET_URL = "wss://ws.gate.io/v3"
'''
'''
class Gate_WSDataApi(object):
"""基于Websocket的API对象"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.host = '' # 服务器地址
self.apiKey = '' # 用户名
self.secretKey = '' # 密码
self.ws = None # websocket应用对象
self.thread = None # 工作线程
self.subscribeStrList = set([])
#----------------------------------------------------------------------
def reconnect(self):
"""重新连接"""
# 首先关闭之前的连接
#self.close()
# 再执行重连任务
self.ws = websocket.WebSocketApp(self.host,
on_message=self.onMessage,
on_error=self.onError,
on_close=self.onClose,
on_open=self.onOpen)
self.thread = Thread(target=self.ws.run_forever , args = (None , None , 60, 30))
self.thread.start()
#----------------------------------------------------------------------
def connect_Subpot(self, apiKey , secretKey , trace = False):
self.host = GATEIO_SOCKET_URL
self.apiKey = apiKey
self.secretKey = secretKey
self.trace = trace
websocket.enableTrace(trace)
self.ws = websocket.WebSocketApp(self.host,
on_message=self.onMessage,
on_error=self.onError,
on_close=self.onClose,
on_open=self.onOpen)
self.thread = Thread(target = self.ws.run_forever , args = (None , None , 60, 30))
# self.thread_heart = Thread(target = self.run_forever_heart)
self.thread.start()
# self.thread_heart.start()
#----------------------------------------------------------------------
def onMessage(self, ws, evt):
"""
信息推送事件
:param ws: 接口
:param evt: 事件
:return:
"""
print(u'vngate_nwe.onMessage:{}'.format(evt))
#----------------------------------------------------------------------
def onError(self, ws, evt):
"""
接口错误推送事件
:param ws:
:param evt:
:return:
"""
print(u'vngate_nwe.onApiError:{}'.format(evt))
#----------------------------------------------------------------------
def onClose(self, ws):
"""
接口断开事件
:param ws:
:return:
"""
print(u'vngate_nwe.onClose')
#----------------------------------------------------------------------
def onOpen(self, ws):
"""
接口打开事件
:param ws:
:return:
"""
print(u'vngate_nwe.onOpen')
#----------------------------------------------------------------------
def sendSocketCmd( self, client_id , method , json_params = []):
send_json = {
"id": client_id,
"method": method,
"params": json_params
}
self.ws.send( json.dumps(send_json))
'''
vngate_nwe.onMessage:{"error": null, "result": {"period": 86400, "open": "18.1604", "close": "17.03"
, "high": "18.53", "low": "16.54", "last": "17.03", "change": "-6.22", "quoteVolume": "1015826.99811
89865", "baseVolume": "17910280.42194529534205249261"}, "id": 1}
'''
#----------------------------------------------------------------------
def querySpotTicker(self , u_id , symbol_pair = "EOS_USDT" , time_period = 86400):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "ticker.query" , [ symbol_pair , time_period])
'''
vngate_nwe.onMessage:{
"error": null,
"result": {
"asks": [
[
"16.9507",
"293.6242489299"
],
[
"16.9586",
"1591.5376104592"
],
],
"bids": [
[
"16.95",
"0.1094"
],
[
"16.9471",
"163.602"
],
[
"16.9431",
"1.4607"
],
]
},
"id": 1
}
'''
#----------------------------------------------------------------------
def querySpotDepth(self , u_id , symbol_pair = "EOS_USDT" , limit = 5 , interval = "0.00000001"):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "depth.query" , [ symbol_pair , limit , interval])
'''
{
"error": null,
"result": [
{
"id": 7177814,
"time": 1523887673.562782,
"price": "6.05",
"amount": "20",
"type": "buy"
},
{
"id": 7177813,
"time": 1523887354.256974,
"price": "6.05",
"amount": "15",
"type": "buy"
},
],
"id": 12309
}
'''
#----------------------------------------------------------------------
def querySpotTrades(self, u_id , symbol_pair = "EOS_USDT" , limit = 2 , last_id = 7177813):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "trades.query" , [ symbol_pair , limit , last_id])
'''
{
"error": null,
"result": [
[
1492358400, time
"7000.00", open
"8000.0", close
"8100.00", highest
"6800.00", lowest
"1000.00" volume
"123456.00" amount
"BTC_USDT" market name
]
...
]
"id": 12312
}
'''
#----------------------------------------------------------------------
def querySpotKline(self, u_id , symbol_pair = "BTC_USDT", start = 1516951219 , end_time = 1516951219 , interval = 1800):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "kline.query" , [ symbol_pair , start , end_time, interval])
'''
vngate_nwe.onMessage:{"error": null, "result": {"status": "success"}, "id": 2}
vngate_nwe.onMessage:{"method": "ticker.update", "params": ["BOT_USDT", {"period": 86400, "open": "0
.7383", "close": "0.9048", "high": "1.015", "low": "0.715", "last": "0.9048", "change": "22.55", "qu
oteVolume": "4565863.1552367147", "baseVolume": "4071168.7349472209511"}], "id": null}
vngate_nwe.onMessage:{"method": "ticker.update", "params": ["BOT_USDT", {"period": 86400, "open": "0
.7383", "close": "0.9049", "high": "1.015", "low": "0.715", "last": "0.9049", "change": "22.56", "qu
oteVolume": "4571805.6819467147", "baseVolume": "4076546.0501166889511"}], "id": null}
'''
#----------------------------------------------------------------------
def subscribeSpotTicker(self, u_id , symbol_pair = "BOT_USDT"):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "ticker.subscribe" , [ symbol_pair ])
'''
vngate_nwe.onMessage:{"method": "depth.update", "params": [true, {"asks": [["0.893", "813.385"], ["0
.8931", "102.65936009"], ["0.8932", "288.8898"], ["0.9058", "2028"], ["0.9067", "10"], ["0.9076", "4
87.11"], ["0.9084", "1000"], ["0.9085", "17.49966971"], ["0.9086", "49.468551235"], ["0.9087", "1950
.59"]], "bids": [["0.8929", "88.76"], ["0.8921", "198.01888"], ["0.892", "256.09"], ["0.8919", "3280
.5348"], ["0.8803", "1382"], ["0.8802", "2257.925"], ["0.8801", "16.58862017"], ["0.88", "300"], ["0
.8779", "822.56"], ["0.8669", "774.0223"]]}, "BOT_USDT"], "id": null}
vngate_nwe.onMessage:{"method": "depth.update", "params": [false, {"bids": [["0.8929", "110.0925"]]}
, "BOT_USDT"], "id": null}
Can only subscribe one market at the same time, market list is not supported currently. For multiple subscriptions, only the last one takes effect.
'''
#----------------------------------------------------------------------
def subscribeSpotDepth(self, u_id , symbol_pair = "BOT_USDT" , limit = 30, interval = "0.00000001"):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "depth.subscribe" , [ symbol_pair , limit , interval])
'''
vngate_nwe.onMessage:{"error": null, "result": {"status": "success"}, "id": 3}
vngate_nwe.onMessage:{"method": "trades.update", "params": ["BOT_USDT", [{"id": 56675623, "time": 15
25592829.2169299, "price": "0.9096", "amount": "310.3478", "type": "sell"}, {"id": 56675622, "time":
1525592829.2167261, "price": "0.9096", "amount": "461", "type": "sell"}, {"id": 56667395
, "time": 1525591676.7347641, "price": "0.9085", "amount": "847.41", "type": "sell"}]], "id": null}
'''
#----------------------------------------------------------------------
def subscribeSpotTrades(self, u_id , symbol_pair = "BOT_USDT" ):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "trades.subscribe" , [ symbol_pair ])
'''
vngate_nwe.onMessage:{"error": null, "result": {"status": "success"}, "id": 3}
vngate_nwe.onMessage:{"method": "kline.update", "params": [[1525591800, "0.9085", "0.9174", "0.9217"
, "0.9049", "78364.430712655", "71495.1179278982815", "BOT_USDT"]], "id": null}
'''
#----------------------------------------------------------------------
def subscribeSpotKline(self, u_id , symbol_pair = "BOT_USDT" , interval = 1800):
symbol_pair = systemSymbolToVnSymbol(symbol_pair)
self.sendSocketCmd( u_id , "kline.subscribe" , [ symbol_pair , interval]) |
import React from 'react';
import axios from 'axios';
class MyForm extends React.Component {
state = {
name: '',
email: ''
};
handleChange = (e) => {
this.setState({
[e.target.name]: e.target.value
});
};
handleSubmit = (e) => {
e.preventDefault();
const userData = {
name: this.state.name,
email: this.state.email
};
axios
.post('/api/endpoint', userData)
.then(res => console.log(res.data))
.catch(err => console.log(err));
};
render() {
return (
<div>
<form onSubmit={this.handleSubmit}>
<input
type="text"
name="name"
placeholder="Name"
value={this.state.name}
onChange={this.handleChange}
/>
<input
type="email"
name="email"
placeholder="Email"
value={this.state.email}
onChange={this.handleChange}
/>
<button type="submit">Submit</button>
</form>
</div>
);
}
}
export default MyForm; |
#!/usr/bin/env bash
DOMAIN=$1
FILENAME=$2
./exec_in_container.sh $DOMAIN "cat /usr/share/docassemble/log/$FILENAME"
|
<filename>pgadmin/dlg/dlgMainConfig.cpp
//////////////////////////////////////////////////////////////////////////
//
// pgAdmin III - PostgreSQL Tools
//
// Copyright (C) 2002 - 2016, The pgAdmin Development Team
// This software is released under the PostgreSQL Licence
//
// dlgMainConfig.cpp - Configure setting
//
//////////////////////////////////////////////////////////////////////////
// App headers
#include "pgAdmin3.h"
#include "dlg/dlgMainConfig.h"
// Icons
#include "images/property.pngc"
BEGIN_EVENT_TABLE(dlgMainConfig, DialogWithHelp)
EVT_BUTTON (wxID_OK, dlgMainConfig::OnOK)
EVT_BUTTON (wxID_CANCEL, dlgMainConfig::OnCancel)
EVT_TEXT(XRCID("txtValue"), dlgMainConfig::OnChange)
EVT_TEXT(XRCID("cbValue"), dlgMainConfig::OnChange)
EVT_CHECKBOX(XRCID("chkValue"), dlgMainConfig::OnChange)
END_EVENT_TABLE()
#define chkEnabled CTRL_CHECKBOX("chkEnabled")
#define cbValue CTRL_COMBOBOX("cbValue")
#define txtValue CTRL_TEXT("txtValue")
#define chkValue CTRL_CHECKBOX("chkValue")
#define txtComment CTRL_TEXT("txtComment")
#define stName CTRL_STATIC("stName")
#define stDescription CTRL_STATIC("stDescription")
static const wxChar *contextStrings[] =
{
__("Internal - not externally settable"),
__("Postmaster - set on server start"),
__("SIGHUP - reloaded on SIGHUP signal"),
__("Backend - overridable in individual backend"),
__("Suset - may be overridden by superuser"),
__("Userlimit - may be set by user"),
__("Userset - may be set by user"),
__("Unknown")
};
static const wxChar *sourceStrings[] =
{
__("Variable has still its initial default value"),
__("Set via environment variable"),
__("Set in configuration file"),
__("Set on command line"),
__("Set by unprivileged command"),
__("Set in database variables"),
__("Set in user variables"),
__("Set in client parameters"),
__("set by Override"),
__("Set interactively"),
__("Set by test"),
__("Set by session parameters")
};
dlgMainConfig::dlgMainConfig(pgFrame *parent, pgSettingItem *_item) :
DialogWithHelp((frmMain *)parent)
{
SetFont(settings->GetSystemFont());
LoadResource((wxWindow *)parent, wxT("dlgMainConfig"));
// Icon
SetIcon(*property_png_ico);
RestorePosition();
item = _item;
SetTitle(wxString::Format(_("Configuration setting \"%s\""), item->name.c_str()));
// Setup the default values
cbValue->Hide();
chkValue->Hide();
if (!item->newLine)
{
if (item->orgLine)
item->newLine = new pgConfigLine(item->orgLine);
else
{
item->newLine = new pgConfigLine();
item->newLine->item = item;
}
}
chkEnabled->SetValue(!item->newLine->isComment);
txtValue->SetValue(item->newLine->value);
txtComment->SetValue(item->newLine->comment);
wxFont fntLabel = stName->GetFont();
//ABDUL: 4 Sep 2021:BEGIN
#if wxCHECK_VERSION(3, 1, 0)
fntLabel.SetWeight(wxFONTWEIGHT_BOLD);
#else
fntLabel.SetWeight(wxBOLD);
#endif
stName->SetFont(fntLabel);
stName->SetLabel(item->name);
wxString str;
str += _("Category") + wxString(wxT(": ")) + item->category + END_OF_LINE;
str += _("Context") + wxString(wxT(": "));
str += wxGetTranslation(contextStrings[item->context]);
str += END_OF_LINE;
if (item->source != pgSettingItem::PGC_UNKNOWNSOURCE)
{
str += _("Current value") + wxString(wxT(": "));
if (item->value == wxT("unset") && item->source == pgSettingItem::PGC_DEFAULT)
str += _("unset");
else
str += item->value + END_OF_LINE wxT(" ") + wxGetTranslation(sourceStrings[item->source]);
str += END_OF_LINE;
}
stDescription->SetLabel(str + END_OF_LINE + item->short_desc + END_OF_LINE + item->extra_desc);
btnOK->Enable();
}
dlgMainConfig::~dlgMainConfig()
{
SavePosition();
}
wxString dlgMainConfig::GetValue()
{
return txtValue->GetValue();
}
wxString dlgMainConfig::GetHelpPage() const
{
return wxT("pg/runtime-config");
}
void dlgMainConfig::OnChange(wxCommandEvent &ev)
{
}
void dlgMainConfig::OnOK(wxCommandEvent &ev)
{
#ifdef __WXGTK__
if (!btnOK->IsEnabled())
return;
#endif
item->newLine->value = GetValue();
item->newLine->comment = txtComment->GetValue();
item->newLine->isComment = !chkEnabled->GetValue();
EndModal(wxID_OK);
}
void dlgMainConfig::OnCancel(wxCommandEvent &ev)
{
EndModal(wxID_CANCEL);
}
int dlgMainConfig::Go()
{
// Set focus on the Password textbox and show modal
return ShowModal();
}
|
import torch
def move_to_gpu_if_available(x: torch.Tensor) -> torch.Tensor:
if torch.cuda.is_available():
return x.cuda()
else:
return x |
#!/bin/bash
##########################################################################
#Aqueduct - Compliance Remediation Content
#Copyright (C) 2011,2012
# Vincent C. Passaro (vincent.passaro@gmail.com)
# Shannon Mitchell (shannon.mitchell@fusiontechnology-llc.com)
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor,
#Boston, MA 02110-1301, USA.
##########################################################################
###################### Fotis Networks LLC ###############################
# By Vincent C. Passaro #
# Fotis Networks LLC #
# Vincent[.]Passaro[@]fotisnetworks[.]com #
# www.fotisnetworks.com #
###################### Fotis Networks LLC ###############################
#
# _____________________________________________________________________
# | Version | Change Information | Author | Date |
# |__________|_______________________|____________________|____________|
# | 1.0 | Initial Script | Vincent C. Passaro | 1-Aug-2012 |
# | | Creation | | |
# |__________|_______________________|____________________|____________|
#
#######################DISA INFORMATION##################################
# Group ID (Vulid): V-24357
# Group Title: GEN002870
# Rule ID: SV-37948r1_rule
# Severity: low
# Rule Version (STIG-ID): GEN002870
# Rule Title: The system must be configured to send audit records to a
# remote audit server.
#
# Vulnerability Discussion: Audit records contain evidence that can be
# used in the investigation of compromised systems. To prevent this
# evidence from compromise, it must be sent to a separate system
# continuously. Methods for sending audit records include, but are not
# limited to, system audit tools used to send logs directly to another host
# or through the system's syslog service to another host.
#
# Responsibility: System Administrator
# IAControls: ECTB-1
#
# Check Content:
#
# Verify the system is configured to forward all audit records to a
# remote server. If the system is not configured to provide this function,
# this is a finding.
# Procedure:
# Ensure the audit option for the kernel is enabled.
# grep "audit" /boot/grub/grub.conf
# If the kernel does not have the "audit=1" option specified, this is a
# finding.
# Ensure the kernel auditing is active.
# grep "active" /etc/audisp/plugins.d/syslog.conf
# If the "active" setting is either missing or not set to "yes", this is a
# finding.
# Ensure all audit records are forwarded to a remote server.
# grep "\*.\*" /etc/syslog.conf |grep "@" (for syslog)
# or:
# grep "\*.\*" /etc/rsyslog.conf | grep "@" (for rsyslog)
# If neither of these lines exist, it is a finding.
#
# Fix Text:
#
# Configure the system to send audit records to a remote server.
# Procedure:
# These instructions assume a known remote audit server is available to
# this system.
# Modify /etc/syslog.conf to contain a line sending all audit records to a
# remote audit server. The server is specified by placing an "@" before the
# DNS name or IP address in the line.
# *.* @<remote audit server>
# Edit the "active" line in /etc/audisp/plugins.d/syslog.conf so it shows
# "active = yes".
# Restart audit and syslog:
# service auditd restart
# service syslog restart
#######################DISA INFORMATION##################################
# Global Variables
PDI=GEN002870
# Start-Lockdown
|
package ru.job4j.lspparkingtask.vehicles;
public class Truck extends Vehicle{
}
|
<filename>src/com/twu/biblioteca/resources/Movie.java
package com.twu.biblioteca.resources;
import com.twu.biblioteca.resources.Resource;
import java.util.Objects;
/**
* Created by fmorais on 8/4/15.
*/
public class Movie extends Resource {
private String director;
int rating;
public Movie(String name, String director, int year, int rating) {
super(name, year);
this.director = director;
this.rating = rating;
}
// TODO Test Equals
@Override
public boolean equals(Object other) {
if(other instanceof Movie) {
Movie otherMovie = (Movie) other;
return this.showDetails().equals(otherMovie.showDetails()) ? true : false;
}
return false;
}
@Override
public String showDetails() {
return String.format("%s - %s - %d - %d", name, director, year, rating);
}
}
|
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_ASMJS_ASM_TYPES_H_
#define V8_ASMJS_ASM_TYPES_H_
#include <string>
#include "src/base/compiler-specific.h"
#include "src/base/macros.h"
#include "src/globals.h"
#include "src/zone/zone-containers.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
namespace wasm {
class AsmType;
class AsmFFIType;
class AsmFunctionType;
class AsmOverloadedFunctionType;
class AsmFunctionTableType;
// List of V(CamelName, string_name, number, parent_types)
#define FOR_EACH_ASM_VALUE_TYPE_LIST(V) \
/* These tags are not types that are expressable in the asm source. They */ \
/* are used to express semantic information about the types they tag. */ \
V(Heap, "[]", 1, 0) \
V(FloatishDoubleQ, "floatish|double?", 2, 0) \
V(FloatQDoubleQ, "float?|double?", 3, 0) \
/* The following are actual types that appear in the asm source. */ \
V(Void, "void", 4, 0) \
V(Extern, "extern", 5, 0) \
V(DoubleQ, "double?", 6, kAsmFloatishDoubleQ | kAsmFloatQDoubleQ) \
V(Double, "double", 7, kAsmDoubleQ | kAsmExtern) \
V(Intish, "intish", 8, 0) \
V(Int, "int", 9, kAsmIntish) \
V(Signed, "signed", 10, kAsmInt | kAsmExtern) \
V(Unsigned, "unsigned", 11, kAsmInt) \
V(FixNum, "fixnum", 12, kAsmSigned | kAsmUnsigned) \
V(Floatish, "floatish", 13, kAsmFloatishDoubleQ) \
V(FloatQ, "float?", 14, kAsmFloatQDoubleQ | kAsmFloatish) \
V(Float, "float", 15, kAsmFloatQ) \
/* Types used for expressing the Heap accesses. */ \
V(Uint8Array, "Uint8Array", 16, kAsmHeap) \
V(Int8Array, "Int8Array", 17, kAsmHeap) \
V(Uint16Array, "Uint16Array", 18, kAsmHeap) \
V(Int16Array, "Int16Array", 19, kAsmHeap) \
V(Uint32Array, "Uint32Array", 20, kAsmHeap) \
V(Int32Array, "Int32Array", 21, kAsmHeap) \
V(Float32Array, "Float32Array", 22, kAsmHeap) \
V(Float64Array, "Float64Array", 23, kAsmHeap) \
/* None is used to represent errors in the type checker. */ \
V(None, "<none>", 31, 0)
// List of V(CamelName)
#define FOR_EACH_ASM_CALLABLE_TYPE_LIST(V) \
V(FunctionType) \
V(FFIType) \
V(OverloadedFunctionType) \
V(FunctionTableType)
class AsmValueType {
public:
typedef uint32_t bitset_t;
enum : uint32_t {
#define DEFINE_TAG(CamelName, string_name, number, parent_types) \
kAsm##CamelName = ((1u << (number)) | (parent_types)),
FOR_EACH_ASM_VALUE_TYPE_LIST(DEFINE_TAG)
#undef DEFINE_TAG
kAsmUnknown = 0,
kAsmValueTypeTag = 1u
};
private:
friend class AsmType;
static AsmValueType* AsValueType(AsmType* type) {
if ((reinterpret_cast<uintptr_t>(type) & kAsmValueTypeTag) ==
kAsmValueTypeTag) {
return reinterpret_cast<AsmValueType*>(type);
}
return nullptr;
}
bitset_t Bitset() const {
DCHECK((reinterpret_cast<uintptr_t>(this) & kAsmValueTypeTag) ==
kAsmValueTypeTag);
return static_cast<bitset_t>(reinterpret_cast<uintptr_t>(this) &
~kAsmValueTypeTag);
}
static AsmType* New(bitset_t bits) {
DCHECK_EQ((bits & kAsmValueTypeTag), 0u);
return reinterpret_cast<AsmType*>(
static_cast<uintptr_t>(bits | kAsmValueTypeTag));
}
// AsmValueTypes can't be created except through AsmValueType::New.
DISALLOW_IMPLICIT_CONSTRUCTORS(AsmValueType);
};
class V8_EXPORT_PRIVATE AsmCallableType : public NON_EXPORTED_BASE(ZoneObject) {
public:
virtual std::string Name() = 0;
virtual bool CanBeInvokedWith(AsmType* return_type,
const ZoneVector<AsmType*>& args) = 0;
#define DECLARE_CAST(CamelName) \
virtual Asm##CamelName* As##CamelName() { return nullptr; }
FOR_EACH_ASM_CALLABLE_TYPE_LIST(DECLARE_CAST)
#undef DECLARE_CAST
protected:
AsmCallableType() = default;
virtual ~AsmCallableType() = default;
virtual bool IsA(AsmType* other);
private:
friend class AsmType;
DISALLOW_COPY_AND_ASSIGN(AsmCallableType);
};
class V8_EXPORT_PRIVATE AsmFunctionType final : public AsmCallableType {
public:
AsmFunctionType* AsFunctionType() final { return this; }
void AddArgument(AsmType* type) { args_.push_back(type); }
const ZoneVector<AsmType*> Arguments() const { return args_; }
AsmType* ReturnType() const { return return_type_; }
bool CanBeInvokedWith(AsmType* return_type,
const ZoneVector<AsmType*>& args) override;
protected:
AsmFunctionType(Zone* zone, AsmType* return_type)
: return_type_(return_type), args_(zone) {}
private:
friend AsmType;
std::string Name() override;
bool IsA(AsmType* other) override;
AsmType* return_type_;
ZoneVector<AsmType*> args_;
DISALLOW_COPY_AND_ASSIGN(AsmFunctionType);
};
class V8_EXPORT_PRIVATE AsmOverloadedFunctionType final
: public AsmCallableType {
public:
AsmOverloadedFunctionType* AsOverloadedFunctionType() override {
return this;
}
void AddOverload(AsmType* overload);
private:
friend AsmType;
explicit AsmOverloadedFunctionType(Zone* zone) : overloads_(zone) {}
std::string Name() override;
bool CanBeInvokedWith(AsmType* return_type,
const ZoneVector<AsmType*>& args) override;
ZoneVector<AsmType*> overloads_;
DISALLOW_IMPLICIT_CONSTRUCTORS(AsmOverloadedFunctionType);
};
class V8_EXPORT_PRIVATE AsmFFIType final : public AsmCallableType {
public:
AsmFFIType* AsFFIType() override { return this; }
std::string Name() override { return "Function"; }
bool CanBeInvokedWith(AsmType* return_type,
const ZoneVector<AsmType*>& args) override;
private:
friend AsmType;
AsmFFIType() = default;
DISALLOW_COPY_AND_ASSIGN(AsmFFIType);
};
class V8_EXPORT_PRIVATE AsmFunctionTableType : public AsmCallableType {
public:
AsmFunctionTableType* AsFunctionTableType() override { return this; }
std::string Name() override;
bool CanBeInvokedWith(AsmType* return_type,
const ZoneVector<AsmType*>& args) override;
size_t length() const { return length_; }
AsmType* signature() { return signature_; }
private:
friend class AsmType;
AsmFunctionTableType(size_t length, AsmType* signature);
size_t length_;
AsmType* signature_;
DISALLOW_IMPLICIT_CONSTRUCTORS(AsmFunctionTableType);
};
class V8_EXPORT_PRIVATE AsmType {
public:
#define DEFINE_CONSTRUCTOR(CamelName, string_name, number, parent_types) \
static AsmType* CamelName() { \
return AsmValueType::New(AsmValueType::kAsm##CamelName); \
}
FOR_EACH_ASM_VALUE_TYPE_LIST(DEFINE_CONSTRUCTOR)
#undef DEFINE_CONSTRUCTOR
#define DEFINE_CAST(CamelCase) \
Asm##CamelCase* As##CamelCase() { \
if (AsValueType() != nullptr) { \
return nullptr; \
} \
return reinterpret_cast<AsmCallableType*>(this)->As##CamelCase(); \
}
FOR_EACH_ASM_CALLABLE_TYPE_LIST(DEFINE_CAST)
#undef DEFINE_CAST
AsmValueType* AsValueType() { return AsmValueType::AsValueType(this); }
AsmCallableType* AsCallableType();
// A function returning ret. Callers still need to invoke AddArgument with the
// returned type to fully create this type.
static AsmType* Function(Zone* zone, AsmType* ret) {
AsmFunctionType* f = new (zone) AsmFunctionType(zone, ret);
return reinterpret_cast<AsmType*>(f);
}
// Overloaded function types. Not creatable by asm source, but useful to
// represent the overloaded stdlib functions.
static AsmType* OverloadedFunction(Zone* zone) {
auto* f = new (zone) AsmOverloadedFunctionType(zone);
return reinterpret_cast<AsmType*>(f);
}
// The type for fround(src).
static AsmType* FroundType(Zone* zone);
// The (variadic) type for min and max.
static AsmType* MinMaxType(Zone* zone, AsmType* dest, AsmType* src);
// The type for foreign functions.
static AsmType* FFIType(Zone* zone) {
auto* f = new (zone) AsmFFIType();
return reinterpret_cast<AsmType*>(f);
}
// The type for function tables.
static AsmType* FunctionTableType(Zone* zone, size_t length,
AsmType* signature) {
auto* f = new (zone) AsmFunctionTableType(length, signature);
return reinterpret_cast<AsmType*>(f);
}
std::string Name();
// IsExactly returns true if this is the exact same type as that. For
// non-value types (e.g., callables), this returns this == that.
bool IsExactly(AsmType* that);
// IsA is used to query whether this is an instance of that (i.e., if this is
// a type derived from that.) For non-value types (e.g., callables), this
// returns this == that.
bool IsA(AsmType* that);
// Types allowed in return statements. void is the type for returns without
// an expression.
bool IsReturnType() {
return this == AsmType::Void() || this == AsmType::Double() ||
this == AsmType::Signed() || this == AsmType::Float();
}
// Converts this to the corresponding valid argument type.
AsmType* ToReturnType() {
if (this->IsA(AsmType::Signed())) {
return AsmType::Signed();
}
if (this->IsA(AsmType::Double())) {
return AsmType::Double();
}
if (this->IsA(AsmType::Float())) {
return AsmType::Float();
}
if (this->IsA(AsmType::Void())) {
return AsmType::Void();
}
return AsmType::None();
}
// Types allowed to be parameters in asm functions.
bool IsParameterType() {
return this == AsmType::Double() || this == AsmType::Int() ||
this == AsmType::Float();
}
// Converts this to the corresponding valid argument type.
AsmType* ToParameterType() {
if (this->IsA(AsmType::Int())) {
return AsmType::Int();
}
if (this->IsA(AsmType::Double())) {
return AsmType::Double();
}
if (this->IsA(AsmType::Float())) {
return AsmType::Float();
}
return AsmType::None();
}
// Types allowed to be compared using the comparison operators.
bool IsComparableType() {
return this == AsmType::Double() || this == AsmType::Signed() ||
this == AsmType::Unsigned() || this == AsmType::Float();
}
// The following methods are meant to be used for inspecting the traits of
// element types for the heap view types.
enum : int32_t { kNotHeapType = -1 };
// Returns the element size if this is a heap type. Otherwise returns
// kNotHeapType.
int32_t ElementSizeInBytes();
// Returns the load type if this is a heap type. AsmType::None is returned if
// this is not a heap type.
AsmType* LoadType();
// Returns the store type if this is a heap type. AsmType::None is returned if
// this is not a heap type.
AsmType* StoreType();
};
} // namespace wasm
} // namespace internal
} // namespace v8
#endif // V8_ASMJS_ASM_TYPES_H_
|
#!/bin/bash
# Fail on any error.
# set -e
# Display commands being run.
# WARNING: please only enable 'set -x' if necessary for debugging, and be very
# careful if you handle credentials (e.g. from Keystore) with 'set -x':
# statements like "export VAR=$(cat /tmp/keystore/credentials)" will result in
# the credentials being printed in build logs.
# Additionally, recursive invocation with credentials as command-line
# parameters, will print the full command, with credentials, in the build logs.
# set -x
# Code under repo is checked out to ${KOKORO_ARTIFACTS_DIR}/github.
# The final directory name in this path is determined by the scm name specified
# in the job configuration.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../" >/dev/null 2>&1 && pwd )"
echo "Installing conan configuration (profiles, settings, etc.)..."
conan config install $DIR/contrib/conan/configs/linux || exit $?
# We replace the default remotes by our internal artifactory server
# which acts as a secure source for prebuilt dependencies.
cp -v $DIR/kokoro/conan/config/remotes.json ~/.conan/remotes.json
export CONAN_REVISIONS_ENABLED=1
profile=ggp_relwithdebinfo
# This variable is picked up by the `conan package` call.
# The current public key from keystore will be embedded into
# the install_signed_package.sh script.
export ORBIT_SIGNING_PUBLIC_KEY_FILE="/tmpfs/src/keystore/74938_SigningPublicGpg"
cd ${KOKORO_ARTIFACTS_DIR}/github/orbitprofiler
conan install -u -pr $profile -if build_$profile/ --build outdated -o debian_packaging=True $DIR
conan build -bf build_$profile/ $DIR
conan package -bf $DIR/build_$profile/ $DIR
rm -rf ~/.gnupg/
rm -rf /dev/shm/signing.gpg
mkdir -p ~/.gnupg
chmod 700 ~/.gnupg
echo "allow-loopback-pinentry" > ~/.gnupg/gpg-agent.conf
GPG_OPTIONS="--pinentry-mode loopback --batch --no-tty --yes --no-default-keyring --keyring /dev/shm/signing.gpg --passphrase-file /tmpfs/src/keystore/74938_SigningPrivateGpgKeyPassword"
gpg $GPG_OPTIONS --import /tmpfs/src/keystore/74938_SigningPrivateGpg
for deb in $DIR/build_$profile/package/*.deb; do
gpg $GPG_OPTIONS --output "$deb.asc" --detach-sign "$deb"
done
# Uncomment the three lines below to print the external ip into the log and
# keep the vm alive for two hours. This is useful to debug build failures that
# can not be resolved by looking into sponge alone. Also comment out the
# "set -e" at the top of this file (otherwise a failed build will exit this
# script immediately).
# external_ip=$(curl -s -H "Metadata-Flavor: Google" http://metadata/computeMetadata/v1/instance/network-interfaces/0/access-configs/0/external-ip)
# echo "INSTANCE_EXTERNAL_IP=${external_ip}"
# sleep 7200;
|
#!/bin/sh
# Test of transliteration in gettext functions.
# Copyright (C) 2000-2017 Free Software Foundation, Inc.
# This file is part of the GNU C Library.
# The GNU C Library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# The GNU C Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with the GNU C Library; if not, see
# <http://www.gnu.org/licenses/>.
set -e
common_objpfx=$1
test_program_prefix=$2
objpfx=$3
# Create the locale directories.
mkdir -p ${objpfx}localedir/existing-locale/LC_MESSAGES
msgfmt -o ${objpfx}domaindir/existing-locale/LC_MESSAGES/translit.mo \
translit.po
${test_program_prefix} \
${objpfx}tst-translit > ${objpfx}tst-translit.out ${objpfx}domaindir
exit $?
|
<html>
<head>
<title>Contact Form</title>
<script>
function validateForm() {
var name = document.getElementById('name').value;
var email = document.getElementById('email').value;
var message = document.getElementById('message').value;
if (name == "" || email == "" || message == "") {
alert('All fields are required!');
return false;
}
return true;
}
</script>
</head>
<body>
<form name="contact" action="submit.php" onsubmit="return validateForm();">
<label>Name</label>
<input type="text" id="name" />
<label>Email</label>
<input type="email" id="email" />
<label>Message</label>
<textarea rows="4" id="message"></textarea>
<input type="submit" value="Submit" />
</form>
</body>
</html> |
<gh_stars>0
package flux
import "fmt"
// Provides simple printer printers
// succeedMark is the Unicode codepoint for a check mark.
const succeedMark = "\u2713"
// failedMark is the Unicode codepoint for an X mark.
const failedMark = "\u2717"
//Passed returns a msg with a check mark
func Passed(msg string, v ...interface{}) string {
return render(succeedMark, msg, v...)
}
//Failed returns a msg with a x mark
func Failed(msg string, v ...interface{}) string {
return render(failedMark, msg, v...)
}
// FatalPrinter provides a hasher,stop after print interface with Fatal function
type FatalPrinter interface {
Fatal(v ...interface{})
}
//FatalFailed uses the log to print out the failed message
func FatalFailed(fr FatalPrinter, msg string, v ...interface{}) {
fr.Fatal(Failed(msg, v...))
}
//FatalPassed uses the log to print out the passed message
func FatalPassed(fr FatalPrinter, msg string, v ...interface{}) {
fr.Fatal(Passed(msg, v...))
}
// LogPrinter provides a simple printer interface with normal log function
type LogPrinter interface {
Log(v ...interface{})
}
//LogFailed uses the log to print out the failed message
func LogFailed(pr LogPrinter, msg string, v ...interface{}) {
pr.Log(Failed(msg, v...))
}
//LogPassed uses the log to print out the passed message
func LogPassed(pr LogPrinter, msg string, v ...interface{}) {
pr.Log(Passed(msg, v...))
}
// SimplePrinter provides a simple printer interface with normal print function
type SimplePrinter interface {
Print(v ...interface{})
}
//PrintFailed uses the log to print out the failed message
func PrintFailed(pr SimplePrinter, msg string, v ...interface{}) {
pr.Print(Passed(msg, v...))
}
//PrintPassed uses the log to print out the passed message
func PrintPassed(pr SimplePrinter, msg string, v ...interface{}) {
pr.Print(Passed(msg, v...))
}
//Render returns a new string with the mark put in place
func render(mark, msg string, v ...interface{}) string {
rms := fmt.Sprintf("\t%s: %s", msg, mark)
return fmt.Sprintf(rms, v...)
}
|
<filename>external/geoext2/tests/headless/casperjs-1.1-beta.1/tests/suites/casper/keys.js
/*jshint strict:false*/
/*global CasperError, casper, console, phantom, require*/
var utils = require('utils');
casper.test.begin('sendKeys() tests', 4, function(test) {
casper.start('tests/site/form.html', function() {
this.sendKeys('input[name="email"]', '<EMAIL>');
this.sendKeys('input[name="language"]', 'fr', {keepFocus: true});
this.click('#autocomplete li:first-child');
this.sendKeys('textarea', "Damn, I’m looking good.");
var values = this.getFormValues('form[action="result.html"]');
test.assertEquals(values.email, '<EMAIL>',
'Casper.sendKeys() sends keys to given input');
test.assertEquals(values.language, 'french',
'Casper.sendKeys() sends keys to given input and keeps focus afterweards');
test.assertEquals(values.content, "Damn, I’m looking good.",
'Casper.sendKeys() sends keys to given textarea');
this.sendKeys('input[name="notype"]', "I have no type.");
values = this.getFormValues('form#no-type-test-form');
test.assertEquals(values.notype, "I have no type.",
'Casper.sendKeys() sends keys to given input without type attribute');
}).run(function() {
test.done();
});
});
if (utils.gteVersion(phantom.version, '1.9.0')) {
casper.test.begin('sendKeys() key modifiers tests', 1, function(test) {
casper.start().then(function() {
this.setContent([
'<input>',
'<script>var keys = []; window.addEventListener("keypress", function(e) {',
' keys.push({code: e.which, alt: e.altKey, ctrl: e.ctrlKey});',
'})</script>'
].join(''));
this.sendKeys('input', 'k');
this.sendKeys('input', 'k', {modifiers: "ctrl"});
this.sendKeys('input', 'k', {modifiers: "ctrl+alt"});
test.assertEquals(this.getGlobal('keys'),
[
{code: 107, alt: false, ctrl: false},
{code: 107, alt: false, ctrl: true},
{code: 107, alt: true, ctrl: true}
], 'sendKeys() uses key modifiers');
}).run(function() {
test.done();
});
});
}
|
#!/bin/bash
set -eo pipefail
mkdir -p bin
cd bin
wget https://github.com/oauth2-proxy/oauth2-proxy/releases/download/v7.0.1/oauth2-proxy-v7.0.1.linux-amd64.tar.gz
wget https://github.com/oauth2-proxy/oauth2-proxy/releases/download/v7.0.1/oauth2-proxy-v7.0.1.linux-amd64-sha256sum.txt
tar xf oauth2-proxy-v7.0.1.linux-amd64.tar.gz
sha256sum -c oauth2-proxy-v7.0.1.linux-amd64-sha256sum.txt
mv oauth2-proxy-v7.0.1.linux-amd64/oauth2-proxy oauth2-proxy
rm -rf *.tar.gz *.txt oauth2-proxy-v7.0.1.linux-amd64 |
package com.alipay.api.response;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.offline.market.reporterror.create response.
*
* @author auto create
* @since 1.0, 2021-12-08 23:30:42
*/
public class AlipayOfflineMarketReporterrorCreateResponse extends AlipayResponse {
private static final long serialVersionUID = 5426364269323863185L;
}
|
package com.ice.restring;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.res.Resources;
/**
* A context wrapper which provide another Resources instead of the original one.
*/
class CustomResourcesContextWrapper extends ContextWrapper {
private Resources resources;
public CustomResourcesContextWrapper(Context base, Resources resources) {
super(base);
this.resources = resources;
}
@Override
public Resources getResources() {
return resources;
}
}
|
CREATE SEQUENCE IF NOT EXISTS gcdefault.mapnote_mapnoteid_seq;
CREATE SEQUENCE IF NOT EXISTS gcdefault.mapnotecategory_mapnotecategoryid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.county_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.places_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.railroads_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.streets_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.surfacewater_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcverbase00001.water_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcversa00001.servicearea_gid_seq;
CREATE SEQUENCE IF NOT EXISTS gcversa00001.serviceareatype_gid_seq; |
#!/usr/bin/env python
import rospy
from std_msgs.msg import String
def talker():
pub = rospy.Publisher('e_stop', String, queue_size=10)
rospy.init_node('estop_sim', anonymous=True)
while not rospy.is_shutdown():
raw_input("press enter to send estop...")
pub.publish("Estop by simulation node");
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
|
<reponame>umaru724/TencentOS_E53_IA1<filename>platform/vendor_bsp/nordic/nRF5_SDK_15.3.0/examples/ble_peripheral/ble_app_beacon/main.c<gh_stars>1-10
/**
* Copyright (c) 2014 - 2019, Nordic Semiconductor ASA
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form, except as embedded into a Nordic
* Semiconductor ASA integrated circuit in a product or a software update for
* such product, must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* 3. Neither the name of Nordic Semiconductor ASA nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* 4. This software, with or without modification, must only be used with a
* Nordic Semiconductor ASA integrated circuit.
*
* 5. Any software provided in binary form under this license must not be reverse
* engineered, decompiled, modified and/or disassembled.
*
* THIS SOFTWARE IS PROVIDED BY NORDIC SEMICONDUCTOR ASA "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL NORDIC SEMICONDUCTOR ASA OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
/** @file
*
* @defgroup ble_sdk_app_beacon_main main.c
* @{
* @ingroup ble_sdk_app_beacon
* @brief Beacon Transmitter Sample Application main file.
*
* This file contains the source code for an Beacon transmitter sample application.
*/
#include <stdbool.h>
#include <stdint.h>
#include "nordic_common.h"
#include "bsp.h"
#include "nrf_soc.h"
#include "nrf_sdh.h"
#include "nrf_sdh_ble.h"
#include "ble_advdata.h"
#include "app_timer.h"
#include "nrf_pwr_mgmt.h"
#include "nrf_log.h"
#include "nrf_log_ctrl.h"
#include "nrf_log_default_backends.h"
#define APP_BLE_CONN_CFG_TAG 1 /**< A tag identifying the SoftDevice BLE configuration. */
#define NON_CONNECTABLE_ADV_INTERVAL MSEC_TO_UNITS(100, UNIT_0_625_MS) /**< The advertising interval for non-connectable advertisement (100 ms). This value can vary between 100ms to 10.24s). */
#define APP_BEACON_INFO_LENGTH 0x17 /**< Total length of information advertised by the Beacon. */
#define APP_ADV_DATA_LENGTH 0x15 /**< Length of manufacturer specific data in the advertisement. */
#define APP_DEVICE_TYPE 0x02 /**< 0x02 refers to Beacon. */
#define APP_MEASURED_RSSI 0xC3 /**< The Beacon's measured RSSI at 1 meter distance in dBm. */
#define APP_COMPANY_IDENTIFIER 0x0059 /**< Company identifier for Nordic Semiconductor ASA. as per www.bluetooth.org. */
#define APP_MAJOR_VALUE 0x01, 0x02 /**< Major value used to identify Beacons. */
#define APP_MINOR_VALUE 0x03, 0x04 /**< Minor value used to identify Beacons. */
#define APP_BEACON_UUID 0x01, 0x12, 0x23, 0x34, \
0x45, 0x56, 0x67, 0x78, \
0x89, 0x9a, 0xab, 0xbc, \
0xcd, 0xde, 0xef, 0xf0 /**< Proprietary UUID for Beacon. */
#define DEAD_BEEF 0xDEADBEEF /**< Value used as error code on stack dump, can be used to identify stack location on stack unwind. */
#if defined(USE_UICR_FOR_MAJ_MIN_VALUES)
#define MAJ_VAL_OFFSET_IN_BEACON_INFO 18 /**< Position of the MSB of the Major Value in m_beacon_info array. */
#define UICR_ADDRESS 0x10001080 /**< Address of the UICR register used by this example. The major and minor versions to be encoded into the advertising data will be picked up from this location. */
#endif
static ble_gap_adv_params_t m_adv_params; /**< Parameters to be passed to the stack when starting advertising. */
static uint8_t m_adv_handle = BLE_GAP_ADV_SET_HANDLE_NOT_SET; /**< Advertising handle used to identify an advertising set. */
static uint8_t m_enc_advdata[BLE_GAP_ADV_SET_DATA_SIZE_MAX]; /**< Buffer for storing an encoded advertising set. */
/**@brief Struct that contains pointers to the encoded advertising data. */
static ble_gap_adv_data_t m_adv_data =
{
.adv_data =
{
.p_data = m_enc_advdata,
.len = BLE_GAP_ADV_SET_DATA_SIZE_MAX
},
.scan_rsp_data =
{
.p_data = NULL,
.len = 0
}
};
static uint8_t m_beacon_info[APP_BEACON_INFO_LENGTH] = /**< Information advertised by the Beacon. */
{
APP_DEVICE_TYPE, // Manufacturer specific information. Specifies the device type in this
// implementation.
APP_ADV_DATA_LENGTH, // Manufacturer specific information. Specifies the length of the
// manufacturer specific data in this implementation.
APP_BEACON_UUID, // 128 bit UUID value.
APP_MAJOR_VALUE, // Major arbitrary value that can be used to distinguish between Beacons.
APP_MINOR_VALUE, // Minor arbitrary value that can be used to distinguish between Beacons.
APP_MEASURED_RSSI // Manufacturer specific information. The Beacon's measured TX power in
// this implementation.
};
/**@brief Callback function for asserts in the SoftDevice.
*
* @details This function will be called in case of an assert in the SoftDevice.
*
* @warning This handler is an example only and does not fit a final product. You need to analyze
* how your product is supposed to react in case of Assert.
* @warning On assert from the SoftDevice, the system can only recover on reset.
*
* @param[in] line_num Line number of the failing ASSERT call.
* @param[in] file_name File name of the failing ASSERT call.
*/
void assert_nrf_callback(uint16_t line_num, const uint8_t * p_file_name)
{
app_error_handler(DEAD_BEEF, line_num, p_file_name);
}
/**@brief Function for initializing the Advertising functionality.
*
* @details Encodes the required advertising data and passes it to the stack.
* Also builds a structure to be passed to the stack when starting advertising.
*/
static void advertising_init(void)
{
uint32_t err_code;
ble_advdata_t advdata;
uint8_t flags = BLE_GAP_ADV_FLAG_BR_EDR_NOT_SUPPORTED;
ble_advdata_manuf_data_t manuf_specific_data;
manuf_specific_data.company_identifier = APP_COMPANY_IDENTIFIER;
#if defined(USE_UICR_FOR_MAJ_MIN_VALUES)
// If USE_UICR_FOR_MAJ_MIN_VALUES is defined, the major and minor values will be read from the
// UICR instead of using the default values. The major and minor values obtained from the UICR
// are encoded into advertising data in big endian order (MSB First).
// To set the UICR used by this example to a desired value, write to the address 0x10001080
// using the nrfjprog tool. The command to be used is as follows.
// nrfjprog --snr <Segger-chip-Serial-Number> --memwr 0x10001080 --val <your major/minor value>
// For example, for a major value and minor value of 0xabcd and 0x0102 respectively, the
// the following command should be used.
// nrfjprog --snr <Segger-chip-Serial-Number> --memwr 0x10001080 --val 0xabcd0102
uint16_t major_value = ((*(uint32_t *)UICR_ADDRESS) & 0xFFFF0000) >> 16;
uint16_t minor_value = ((*(uint32_t *)UICR_ADDRESS) & 0x0000FFFF);
uint8_t index = MAJ_VAL_OFFSET_IN_BEACON_INFO;
m_beacon_info[index++] = MSB_16(major_value);
m_beacon_info[index++] = LSB_16(major_value);
m_beacon_info[index++] = MSB_16(minor_value);
m_beacon_info[index++] = LSB_16(minor_value);
#endif
manuf_specific_data.data.p_data = (uint8_t *) m_beacon_info;
manuf_specific_data.data.size = APP_BEACON_INFO_LENGTH;
// Build and set advertising data.
memset(&advdata, 0, sizeof(advdata));
advdata.name_type = BLE_ADVDATA_NO_NAME;
advdata.flags = flags;
advdata.p_manuf_specific_data = &manuf_specific_data;
// Initialize advertising parameters (used when starting advertising).
memset(&m_adv_params, 0, sizeof(m_adv_params));
m_adv_params.properties.type = BLE_GAP_ADV_TYPE_NONCONNECTABLE_NONSCANNABLE_UNDIRECTED;
m_adv_params.p_peer_addr = NULL; // Undirected advertisement.
m_adv_params.filter_policy = BLE_GAP_ADV_FP_ANY;
m_adv_params.interval = NON_CONNECTABLE_ADV_INTERVAL;
m_adv_params.duration = 0; // Never time out.
err_code = ble_advdata_encode(&advdata, m_adv_data.adv_data.p_data, &m_adv_data.adv_data.len);
APP_ERROR_CHECK(err_code);
err_code = sd_ble_gap_adv_set_configure(&m_adv_handle, &m_adv_data, &m_adv_params);
APP_ERROR_CHECK(err_code);
}
/**@brief Function for starting advertising.
*/
static void advertising_start(void)
{
ret_code_t err_code;
err_code = sd_ble_gap_adv_start(m_adv_handle, APP_BLE_CONN_CFG_TAG);
APP_ERROR_CHECK(err_code);
err_code = bsp_indication_set(BSP_INDICATE_ADVERTISING);
APP_ERROR_CHECK(err_code);
}
/**@brief Function for initializing the BLE stack.
*
* @details Initializes the SoftDevice and the BLE event interrupt.
*/
static void ble_stack_init(void)
{
ret_code_t err_code;
err_code = nrf_sdh_enable_request();
APP_ERROR_CHECK(err_code);
// Configure the BLE stack using the default settings.
// Fetch the start address of the application RAM.
uint32_t ram_start = 0;
err_code = nrf_sdh_ble_default_cfg_set(APP_BLE_CONN_CFG_TAG, &ram_start);
APP_ERROR_CHECK(err_code);
// Enable BLE stack.
err_code = nrf_sdh_ble_enable(&ram_start);
APP_ERROR_CHECK(err_code);
}
/**@brief Function for initializing logging. */
static void log_init(void)
{
ret_code_t err_code = NRF_LOG_INIT(NULL);
APP_ERROR_CHECK(err_code);
NRF_LOG_DEFAULT_BACKENDS_INIT();
}
/**@brief Function for initializing LEDs. */
static void leds_init(void)
{
ret_code_t err_code = bsp_init(BSP_INIT_LEDS, NULL);
APP_ERROR_CHECK(err_code);
}
/**@brief Function for initializing timers. */
static void timers_init(void)
{
ret_code_t err_code = app_timer_init();
APP_ERROR_CHECK(err_code);
}
/**@brief Function for initializing power management.
*/
static void power_management_init(void)
{
ret_code_t err_code;
err_code = nrf_pwr_mgmt_init();
APP_ERROR_CHECK(err_code);
}
/**@brief Function for handling the idle state (main loop).
*
* @details If there is no pending log operation, then sleep until next the next event occurs.
*/
static void idle_state_handle(void)
{
if (NRF_LOG_PROCESS() == false)
{
nrf_pwr_mgmt_run();
}
}
/**
* @brief Function for application main entry.
*/
int main(void)
{
// Initialize.
log_init();
timers_init();
leds_init();
power_management_init();
ble_stack_init();
advertising_init();
// Start execution.
NRF_LOG_INFO("Beacon example started.");
advertising_start();
// Enter main loop.
for (;; )
{
idle_state_handle();
}
}
/**
* @}
*/
|
def diff_characters(firstString, secondString):
if len(firstString) != len(secondString):
return -1
diff_characters = 0
for i in range(len(firstString)):
if firstString[i] != secondString[i]:
diff_characters +=1
return diff_characters |
def gitDiff(workingDirectory, stagingArea):
difference = ""
for char in stagingArea:
if char not in workingDirectory:
difference += char
return difference |
<gh_stars>1000+
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package rest
import (
"net/http"
"configcenter/src/apimachinery/flowctrl"
"configcenter/src/framework/clientset/discovery"
chttp "configcenter/src/framework/common/http"
)
// http request verb type
type VerbType string
const (
PUT VerbType = http.MethodPut
POST VerbType = http.MethodPost
GET VerbType = http.MethodGet
DELETE VerbType = http.MethodDelete
PATCH VerbType = http.MethodPatch
)
type Capability struct {
Client chttp.HttpClient
Discover discovery.Interface
Throttle flowctrl.RateLimiter
Mock MockInfo
}
type MockInfo struct {
Mocked bool
SetMockData bool
MockData interface{}
}
|
#!/bin/sh
electron-packager . PopHub --platform=darwin --arch=x64 --version=0.32.3 --app-bundle-id=jp.questbeat.pophub-electron --app-version=0.0.1 --icon=pophub.icns --ignore="src|dist|scripts|gulpfile\\.js|pophub\\.icns|screenshot\\.png" --out=dist/ --prune --overwrite --asar
|
<reponame>smagill/opensphere-desktop
package io.opensphere.xyztile.mantle;
import static org.junit.Assert.assertEquals;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.xyztile.model.Projection;
import io.opensphere.xyztile.model.XYZDataTypeInfo;
import io.opensphere.xyztile.model.XYZServerInfo;
import io.opensphere.xyztile.model.XYZSettings;
import io.opensphere.xyztile.model.XYZTileLayerInfo;
/**
* Unit test for {@link XYZDataGroupInfoAssistant}.
*/
public class XYZDataGroupInfoAssistantTestDisplay
{
/**
* The layer id used for tests.
*/
private static final String ourLayerId = "iamlayerid";
/**
* Tests getting the settings ui.
*/
@Test
public void testGetSettingsUIComponent()
{
EasyMockSupport support = new EasyMockSupport();
XYZTileLayerInfo layer = new XYZTileLayerInfo(ourLayerId, "A Name", Projection.EPSG_4326, 2, true, 4,
new XYZServerInfo("serverName", "http://somehost"));
SettingsBroker broker = createBroker(support, layer);
DataGroupInfo dataGroup = createDataGroup(support, layer);
support.replayAll();
XYZDataGroupInfoAssistant assistant = new XYZDataGroupInfoAssistant(broker);
XYZSettingsUI settings = (XYZSettingsUI)assistant.getSettingsUIComponent(null, dataGroup);
assertEquals(layer, settings.getLayer());
support.verifyAll();
}
/**
* Creates an easy mocked {@link SettingsBroker}.
*
* @param support Used to create the mock.
* @param layer The layer.
* @return The mocked settings broker.
*/
private SettingsBroker createBroker(EasyMockSupport support, XYZTileLayerInfo layer)
{
SettingsBroker broker = support.createMock(SettingsBroker.class);
EasyMock.expect(broker.getSettings(EasyMock.eq(layer))).andReturn(new XYZSettings());
return broker;
}
/**
* Creates an easy mocked {@link DataGroupInfo}.
*
* @param support Used to create the mock.
* @param layerInfo The layer info.
* @return The mocked {@link DataGroupInfo}.
*/
private DataGroupInfo createDataGroup(EasyMockSupport support, XYZTileLayerInfo layerInfo)
{
XYZDataTypeInfo xyzType = new XYZDataTypeInfo(null, layerInfo);
DataTypeInfo dataType = support.createMock(DataTypeInfo.class);
DataGroupInfo group = support.createMock(DataGroupInfo.class);
EasyMock.expect(group.getMembers(EasyMock.eq(false))).andReturn(New.set(dataType, xyzType));
return group;
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -e
cd /arrow/rust
# show activated toolchain
rustup show
# clean first
cargo clean
# flatbuffer codegen
echo Generating IPC
flatc --rust -o arrow/src/ipc/gen/ ../format/*.fbs
# work around some bugs in flatbuffers
find arrow/src/ipc/gen/ -name "*_generated.rs" -exec sed -i 's/type__type/type_type/g' {} \;
# raises on any formatting errors
echo "Running formatting checks ..."
cargo +stable fmt --all -- --check
echo "Formatting checks completed"
# build entire project
RUSTFLAGS="-D warnings" cargo build --all-targets
# run tests
cargo test
# make sure we can build Arrow sub-crate without default features
pushd arrow
cargo build --no-default-features
popd
# run Arrow examples
pushd arrow
cargo run --example builders
cargo run --example dynamic_types
cargo run --example read_csv
cargo run --example read_csv_infer_schema
popd
# run DataFusion examples
pushd datafusion
cargo run --example csv_sql
cargo run --example parquet_sql
popd
|
<filename>docs/html/structCatch_1_1Matchers_1_1StdString_1_1StartsWithMatcher.js
var structCatch_1_1Matchers_1_1StdString_1_1StartsWithMatcher =
[
[ "StartsWithMatcher", "structCatch_1_1Matchers_1_1StdString_1_1StartsWithMatcher.html#a7b86f258bdbd131a6e7bcd94a8977325", null ],
[ "match", "structCatch_1_1Matchers_1_1StdString_1_1StartsWithMatcher.html#a7da4747aed0c48989d8be59a89e2b7fb", null ]
]; |
package fastreflection
import (
"github.com/cosmos/cosmos-proto/generator"
"google.golang.org/protobuf/compiler/protogen"
"google.golang.org/protobuf/reflect/protoreflect"
)
type newFieldGen struct {
*generator.GeneratedFile
typeName string
message *protogen.Message
}
func (g *newFieldGen) generate() {
g.genComment()
g.P("func (x *", g.typeName, ") NewField(fd ", protoreflectPkg.Ident("FieldDescriptor"), ") ", protoreflectPkg.Ident("Value"), " {")
g.P("switch fd.FullName() {")
for _, field := range g.message.Fields {
g.P("case \"", field.Desc.FullName(), "\":")
g.genField(field)
}
g.P("default: ")
g.P("if fd.IsExtension() {")
g.P("panic(", fmtPkg.Ident("Errorf"), "(\"proto3 declared messages do not support extensions: ", g.message.Desc.FullName(), "\"))")
g.P("}")
g.P("panic(fmt.Errorf(\"message ", g.message.Desc.FullName(), " does not contain field %s\", fd.FullName()))")
g.P("}")
g.P("}")
}
func (g *newFieldGen) genComment() {
g.P("// NewField returns a new value that is assignable to the field")
g.P("// for the given descriptor. For scalars, this returns the default value.")
g.P("// For lists, maps, and messages, this returns a new, empty, mutable value.")
}
func (g *newFieldGen) genField(field *protogen.Field) {
switch {
case field.Desc.IsMap(), field.Desc.IsList(), field.Desc.Kind() == protoreflect.MessageKind:
g.genMutable(field)
default:
g.P("return ", kindToValueConstructor(field.Desc.Kind()), "(", zeroValueForField(g.GeneratedFile, field), ")")
}
}
func (g *newFieldGen) genMutable(field *protogen.Field) {
switch {
case field.Oneof != nil:
g.genOneof(field)
case field.Desc.IsMap():
g.P("m := make(map[", getGoType(g.GeneratedFile, field.Message.Fields[0]), "]", getGoType(g.GeneratedFile, field.Message.Fields[1]), ")")
g.P("return ", protoreflectPkg.Ident("ValueOfMap"), "(&", mapTypeName(field), "{m: &m})")
case field.Desc.IsList():
g.P("list := []", getGoType(g.GeneratedFile, field), "{}")
g.P("return ", protoreflectPkg.Ident("ValueOfList"), "(&", listTypeName(field), "{list: &list})")
case field.Desc.Kind() == protoreflect.MessageKind:
g.P("m := new(", g.QualifiedGoIdent(field.Message.GoIdent), ")")
g.P("return ", protoreflectPkg.Ident("ValueOfMessage"), "(m.ProtoReflect())")
default:
panic("unreachable")
}
}
func (g *newFieldGen) genOneof(field *protogen.Field) {
if field.Desc.Kind() != protoreflect.MessageKind {
panic("newfield oneof fastGenerator should be applied only to mutable message types")
}
g.P("value := &", g.QualifiedGoIdent(field.Message.GoIdent), "{}")
g.P("return ", protoreflectPkg.Ident("ValueOfMessage"), "(value.ProtoReflect())")
}
|
#!/usr/bin/env bash
CONFIG=${1:-local-development}
# Jump to root directory
cd "$( dirname "${BASH_SOURCE[0]}" )"/../..
export GO111MODULE=on
export DEVELOPMENT_UI=on
export LOG_LEVEL=debug
export LOG_FORMAT=text
case $CONFIG in
debug)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
ENABLE_MODULES="text2vec-contextionary" \
dlv debug ./cmd/weaviate-server -- \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-development)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
ENABLE_MODULES="text2vec-contextionary" \
CLUSTER_HOSTNAME="node1" \
CLUSTER_GOSSIP_BIND_PORT="7000" \
CLUSTER_DATA_BIND_PORT="7001" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
second-node)
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
PERSISTENCE_DATA_PATH="./data-node2" \
CLUSTER_HOSTNAME="node2" \
CLUSTER_GOSSIP_BIND_PORT="7002" \
CLUSTER_DATA_BIND_PORT="7003" \
CLUSTER_JOIN="localhost:7000" \
CONTEXTIONARY_URL=localhost:9999 \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
ENABLE_MODULES="text2vec-contextionary" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8081 \
--read-timeout=600s \
--write-timeout=600s
;;
local-transformers)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-transformers \
PERSISTENCE_DATA_PATH="./data" \
TRANSFORMERS_INFERENCE_API="http://localhost:8000" \
ENABLE_MODULES="text2vec-transformers" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-qna)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
QNA_INFERENCE_API="http://localhost:8001" \
ENABLE_MODULES="text2vec-contextionary,qna-transformers" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-image)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
IMAGE_INFERENCE_API="http://localhost:8002" \
ENABLE_MODULES="text2vec-contextionary,img2vec-neural" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-ner)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
NER_INFERENCE_API="http://localhost:8003" \
ENABLE_MODULES="text2vec-contextionary,ner-transformers" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-spellcheck)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
PERSISTENCE_DATA_PATH="./data" \
SPELLCHECK_INFERENCE_API="http://localhost:8004" \
ENABLE_MODULES="text2vec-contextionary,text-spellcheck" \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080 \
--read-timeout=600s \
--write-timeout=600s
;;
local-oidc)
CONTEXTIONARY_URL=localhost:9999 \
QUERY_DEFAULTS_LIMIT=20 \
ORIGIN=http://localhost:8080 \
PERSISTENCE_DATA_PATH="./data" \
AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=false \
AUTHENTICATION_OIDC_ENABLED=true \
AUTHENTICATION_OIDC_ISSUER=http://localhost:9090/auth/realms/weaviate \
AUTHENTICATION_OIDC_USERNAME_CLAIM=email \
AUTHENTICATION_OIDC_GROUPS_CLAIM=groups \
AUTHENTICATION_OIDC_CLIENT_ID=demo \
AUTHORIZATION_ADMINLIST_ENABLED=true \
AUTHORIZATION_ADMINLIST_USERS=john@doe.com \
DEFAULT_VECTORIZER_MODULE=text2vec-contextionary \
go run ./cmd/weaviate-server \
--scheme http \
--host "127.0.0.1" \
--port 8080
;;
*)
echo "Invalid config" 2>&1
exit 1
;;
esac
|
<!DOCTYPE html>
<html>
<head>
<title>Chart Demonstration</title>
</head>
<body>
<div id="chart" style="width: 400px; height: 400px;"></div>
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
<script>
var data = [2,4,8,10,15];
var layout = {
marker: {
color: '#000000'
},
xaxis: {
title: 'data'
},
yaxis: {
title: 'values'
},
title: 'Chart of data'
};
Plotly.newPlot('chart', [{
x: data,
y: data,
type: 'bar'
}], layout);
</script>
</body>
</html> |
<reponame>nkitku/drag2symlink
import * as React from 'react';
export function DragBox() {
return (
<div className="holder">
<div className="holder__call">Drag Your Folders Here</div>
</div>
);
}
|
import { CommandBase } from "@guild-utils/command-base";
import { Message } from "discord.js";
import { ConfigurateUsecase } from "protocol_configurate-usecase";
import { getLangType } from "../../util/get-lang";
import {
buildTargetAndExecutor,
ConfigCommandCommonOption,
updateConfig,
UpdateResultResponses,
} from "./util";
export class CommandSet implements CommandBase {
constructor(
private readonly usecase: ConfigurateUsecase,
private readonly responses: (lang: string) => UpdateResultResponses,
private readonly getLang: getLangType
) {}
async run(
message: Message,
[key, values]: [string, string[]],
option: ConfigCommandCommonOption
): Promise<void> {
const { target, executor } = buildTargetAndExecutor(message, option);
await updateConfig(message, this.responses(await this.getLang()), key, () =>
this.usecase.set(
target,
key,
values.length === 1 ? values[0] : values,
executor
)
);
}
}
|
<reponame>bvodola/vodola.com.br
import styled from "styled-components"
const Row = styled.div`
display: flex;
flex-direction: column;
justify-content: start;
margin-top: ${props =>
props.padded ? "36px" : props.paddedSm ? "18px" : "0"};
@media (min-width: 900px) {
flex-direction: row;
}
${props =>
props.stacked &&
`
flex-wrap: wrap;
> * {
@media (min-width: 700px) {
flex-basis: calc(50% - 40px);
margin: 20px;
}
@media (min-width: 900px) {
flex-basis: calc(33% - 40px);
}
@media (min-width: 1200px) {
flex-basis: calc(25% - 40px);
}
`}
`
const Col = styled.div`
display: ${props => props.display || "flex"};
align-items: ${props => props.alignItemsSm || "center"};
@media (min-width: 900px) {
align-items: ${props => props.alignItemsLg || "center"};
}
flex: ${props => (props.size ? props.size : 1)};
flex-direction: column;
padding-left: 10px;
padding-right: 10px;
:first-child {
padding-left: 0;
}
:last-child {
padding-right: 0;
}
`
export { Row, Col }
|
/*
* Copyright 2015 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package oic.simulator.serviceprovider.view.dialogs;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import oic.simulator.serviceprovider.utils.Constants;
public class UpdatePropertiesPage extends WizardPage {
private Text resNameTxt;
private Text resUriTxt;
private Text resTypeTxt;
private String resName;
private String resURI;
private String resType;
protected UpdatePropertiesPage() {
super("Update Properties");
}
@Override
public void createControl(Composite parent) {
setPageComplete(true);
setTitle(Constants.UPDATE_PROP_PAGE_TITLE);
setMessage(Constants.UPDATE_PROP_PAGE_MESSAGE);
Composite comp = new Composite(parent, SWT.NONE);
GridLayout gridLayout = new GridLayout();
comp.setLayout(gridLayout);
GridData gd = new GridData(SWT.FILL, SWT.FILL, true, true);
comp.setLayoutData(gd);
Group grp = new Group(comp, SWT.NONE);
gridLayout = new GridLayout(2, false);
grp.setLayout(gridLayout);
gd = new GridData(SWT.FILL, SWT.FILL, true, true);
grp.setLayoutData(gd);
Label resNameLbl = new Label(grp, SWT.NULL);
resNameLbl.setText("Resource Name");
gd = new GridData();
gd.verticalIndent = 20;
resNameLbl.setLayoutData(gd);
resNameTxt = new Text(grp, SWT.BORDER);
resNameTxt.setFocus();
gd = new GridData();
gd.widthHint = 300;
gd.verticalIndent = 20;
resNameTxt.setLayoutData(gd);
Label resUriLbl = new Label(grp, SWT.NULL);
resUriLbl.setText("Resource URI");
gd = new GridData();
gd.verticalIndent = 10;
resUriLbl.setLayoutData(gd);
resUriTxt = new Text(grp, SWT.BORDER);
gd = new GridData();
gd.widthHint = 300;
gd.verticalIndent = 10;
resUriTxt.setLayoutData(gd);
Label resTypeLbl = new Label(grp, SWT.NULL);
resTypeLbl.setText("Resource Type");
gd = new GridData();
gd.verticalIndent = 10;
resTypeLbl.setLayoutData(gd);
resTypeTxt = new Text(grp, SWT.BORDER);
gd = new GridData();
gd.widthHint = 300;
gd.verticalIndent = 10;
resTypeTxt.setLayoutData(gd);
Label descLbl = new Label(comp, SWT.NONE);
descLbl.setText("Description:");
gd = new GridData();
descLbl.setLayoutData(gd);
final Text text = new Text(comp, SWT.MULTI | SWT.READ_ONLY | SWT.BORDER
| SWT.WRAP | SWT.V_SCROLL);
text.setText("These properties can be changed later from properties view.");
gd = new GridData(SWT.FILL, SWT.FILL, true, true);
text.setLayoutData(gd);
addUIListeners();
// Initialize data
if (resUriTxt.getText().length() < 1 && null != resURI) {
resUriTxt.setText(resURI);
}
if (resNameTxt.getText().length() < 1 && null != resName) {
resNameTxt.setText(resName);
}
if (resTypeTxt.getText().length() < 1 && null != resType) {
resTypeTxt.setText(resType);
}
setControl(comp);
}
private void addUIListeners() {
resNameTxt.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
resName = resNameTxt.getText();
setPageComplete(isSelectionDone());
}
});
resUriTxt.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
resURI = resUriTxt.getText();
if (null == resURI) {
return;
}
setPageComplete(isSelectionDone());
}
});
resTypeTxt.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
resType = resTypeTxt.getText();
if (null == resType) {
return;
}
setPageComplete(isSelectionDone());
}
});
}
@Override
public boolean canFlipToNextPage() {
return false;
}
public boolean isSelectionDone() {
boolean done = false;
if (null != resName && resName.trim().length() > 0 && null != resURI
&& resURI.trim().length() > 0 && null != resType
&& resType.trim().length() > 0) {
done = true;
}
return done;
}
@Override
public IWizardPage getNextPage() {
return null;
}
public void setResName(String resName) {
if (null == resName) {
resName = "";
}
this.resName = resName;
if (null != resName && !resNameTxt.isDisposed())
resNameTxt.setText(resName);
}
public void setResURI(String resURI) {
if (null == resURI) {
resURI = "";
}
this.resURI = resURI;
if (null != resUriTxt && !resUriTxt.isDisposed())
resUriTxt.setText(resURI);
}
public void setResType(String resType) {
if (null == resType) {
resType = "";
}
this.resType = resType;
if (null != resType && !resTypeTxt.isDisposed())
resTypeTxt.setText(resType);
}
public String getResName() {
return resName;
}
public String getResURI() {
return resURI;
}
public String getResType() {
return resType;
}
}
|
#!/usr/bin/env bash
#
# Starts a beacon node based upon a genesis state created by
# `./setup.sh`.
#
# Usage: ./beacon_node.sh <DATADIR> <NETWORK-PORT> <HTTP-PORT> <OPTIONAL-DEBUG-LEVEL>
source ./vars.env
DEBUG_LEVEL=${4:-info}
exec lighthouse \
--debug-level $DEBUG_LEVEL \
bn \
--datadir $1 \
--testnet-dir $TESTNET_DIR \
--staking \
--enr-address 127.0.0.1 \
--enr-udp-port $2 \
--enr-tcp-port $2 \
--port $2 \
--http-port $3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.