text stringlengths 1 1.05M |
|---|
gate=1
while [ $gate ]
do
sleep 1s
gate=`ls /home/m/.pid_* 2>/dev/null`
echo $gate;
done
echo "done!" |
<reponame>ksenia-kh/fairseq-factored<filename>preprocessing/stanford/feature_tagger_gender_bias/hf_translate_mt.py<gh_stars>0
import os
import sys
import torch
from transformers import MarianTokenizer, MarianMTModel
model_name = 'Helsinki-NLP/opus-mt-en-ROMANCE'
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)
PATH = '/home/usuaris/veu/ksenia.kharitonova/tfm/data/mt/'
LANG = '>>es<<'
def main():
with open(os.path.join(PATH, 'en.txt'), 'r', encoding="utf8") as file:
text = file.readlines()
en_original = [line.split('\t')[2] for line in text]
n = len(en_original)
en_original_to_translate = [LANG+' '+line for line in en_original]
print('Generating translations 1')
translated1 = model.generate(**tokenizer.prepare_seq2seq_batch(en_original_to_translate[:n//2]))
print('Generating translations 2')
translated2 = model.generate(**tokenizer.prepare_seq2seq_batch(en_original_to_translate[n//2:]))
translated = torch.cat((translated1, translated2), dim=0)
print('Decoding output')
tgt_text = [tokenizer.decode(t, skip_special_tokens=True) for t in translated]
result = [en_original[i]+' ||| '+tgt_text[i]+'\n' for i in range(len(en_original))]
with open(os.path.join(PATH, 'en-es.txt'),'w', encoding="utf8") as f:
for line in result:
f.write(line)
if __name__ == "__main__":
#sys.settrace(main())
main()
|
package com.scand.realmbrowser;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.os.IBinder;
import android.support.annotation.Nullable;
import android.support.v4.app.NotificationCompat;
import android.text.TextUtils;
/**
* Created by Slabodeniuk on 2/18/16.
*/
public class RealmBrowserService extends Service {
private static final int NOTIFICATION_ID = 9696;
public static void startService(Context context) {
Intent intent = new Intent(context, RealmBrowserService.class);
context.startService(intent);
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
String appName = getApplicationInfo()
.loadLabel(getPackageManager()).toString();
Intent notifyIntent = new Intent(this, BrowserActivity.class);
PendingIntent notifyPendingIntent =
PendingIntent.getActivity(this, 0, notifyIntent, PendingIntent.FLAG_UPDATE_CURRENT);
Notification notification = new NotificationCompat.Builder(this)
.setSmallIcon(R.drawable.realm_browser_notification_icon)
.setColor(getResources().getColor(R.color.realm_browser_notification_color))
.setContentTitle(TextUtils.isEmpty(appName)
? getString(R.string.realm_browser_notification_title)
: appName)
.setContentText(getString(R.string.realm_browser_notification_text))
.setAutoCancel(false)
.setLocalOnly(true)
.setContentIntent(notifyPendingIntent)
.build();
startForeground(NOTIFICATION_ID, notification);
return START_STICKY;
}
}
|
package com.lnoah.portfolio.springboot.controller.dto;
import com.lnoah.portfolio.springboot.model.posts.Posts;
import lombok.Getter;
import java.time.LocalDateTime;
@Getter
public class PostsListResponseDto {
private Long id;
private String title;
private String author;
private LocalDateTime modifiedDate;
public PostsListResponseDto (Posts entity){
this.id = entity.getId();
this.title = entity.getTitle();
this.author = entity.getAuthor();
this.modifiedDate = entity.getModifiedDate().withNano(0);
}
}
|
#!/usr/bin/env bash
#
# Setup
#
echo "Setting up"
cd /vagrant
USER=vagrant
sudo locale-gen en_GB.UTF-8
sudo apt-get update -y
sudo apt-get upgrade -y
sudo apt-get install -y htop unzip curl git dos2unix
#
# NGINX
#
echo "Installing: NGINX"
sudo add-apt-repository ppa:nginx/stable -y
sudo apt-get update
sudo apt-get install -y nginx
rm /etc/nginx/sites-available/default
sudo cp /vagrant/vm/VagrantfileNginxCommon /etc/nginx/sites-available/common
sudo cp /vagrant/vm/VagrantfileNginxDefault /etc/nginx/sites-available/default
sudo cp /vagrant/vm/VagrantfileNginx.conf /etc/nginx/nginx.conf
#
# PHP + Composer + Imagick
#
echo "Installing: PHP + Composer"
sudo add-apt-repository ppa:ondrej/php -y
sudo apt-get update -y
sudo apt-get install -y php7.3-fpm php-apcu php-imagick php7.3-dev php7.3-cli php7.3-tidy php7.3-json
sudo apt-get install -y php7.3-fpm php7.3-intl php7.3-mysql php7.3-sqlite php7.3-curl php7.3-gd
sudo apt-get install -y php7.3-mbstring php7.3-dom php7.3-xml php7.3-zip php7.3-tidy php7.3-bcmath
sudo sed -i 's|display_errors = Off|display_errors = On|' /etc/php/7.3/fpm/php.ini
sudo sed -i 's|memory_limit = 128M|memory_limit = -1|' /etc/php/7.3/fpm/php.ini
sudo sed -i "s|www-data|$USER|" /etc/php/7.3/fpm/pool.d/www.conf
#
# MySQL
#
echo "Installing: MySQL"
echo "mysql-server mysql-server/root_password password dalamud" | debconf-set-selections
echo "mysql-server mysql-server/root_password_again password dalamud" | debconf-set-selections
sudo apt install mysql-server -y
mysql -uroot -pdalamud < /vagrant/vm/Database.sql
#
# Redis
#
echo "Installing: Redis"
sudo apt-get install redis-server -y
git clone https://github.com/phpredis/phpredis.git
cd phpredis && phpize && ./configure && make && sudo make install
cd /vagrant
rm -rf /vagrant/phpredis
sudo echo "extension=redis.so" > /etc/php/7.3/mods-available/redis.ini
sudo ln -sf /etc/php/7.3/mods-available/redis.ini /etc/php/7.3/fpm/conf.d/20-redis.ini
sudo ln -sf /etc/php/7.3/mods-available/redis.ini /etc/php/7.3/cli/conf.d/20-redis.ini
sudo service php7.3-fpm restart
#
# Install JAVA + ElasticSearch
#
echo "Installing: Java + ElasticSearch"
export _JAVA_OPTIONS="-Xmx4g -Xms4g"
sudo apt install -y openjdk-8-jre apt-transport-https
sudo wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
sudo bash -c 'sudo echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" > /etc/apt/sources.list.d/elastic.list'
sudo apt update
sudo apt install -y elasticsearch
sudo sed -i 's|#network.host: 192.168.0.1|network.host: 0.0.0.0|g' /etc/elasticsearch/elasticsearch.yml
sudo systemctl start elasticsearch
#
# Finish
#
echo "Finishing up ..."
sudo mkdir -p /vagrant_xivapi /vagrant_mogboard
sudo chown vagrant:vagrant /vagrant_xivapi /vagrant_mogboard
sudo chmod -R 777 /vagrant_xivapi /vagrant_mogboard
sudo service nginx restart
sudo service php7.3-fpm restart
sudo apt-get autoremove -y
sudo apt-get update -y
sudo apt-get upgrade -y
bash /vagrant/bin/version
echo "- Testing ElasticSearch in 10 seconds ..."
sleep 10
curl -X GET 'http://localhost:9200'
|
def fmt_bundle_ctx_id(type, id, version):
return f"{type}_{id}_{version}" |
<filename>src/components/ContinentCard.js<gh_stars>1-10
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
const ContinentCard = (props) => {
const {
continent, area, population, id, data,
} = props;
let background = 'bg-sky-900';
if (id) background = [2, 3, 6, 7, 10, 11].includes(id) ? 'bg-sky-700' : 'bg-sky-800';
const span = id === 0 ? 'col-span-2 md:col-span-3 bg-sky-900' : '';
return (
<Link to={`continent/${continent}`} className={`${background} ${span} flex flex-col text-white py-4 px-2 flex justify-center`} id={id} state={data}>
<h2 className="text-3xl lg:text-4xl uppercase font-bold text-white">{continent}</h2>
<p className="text-sm md:text-base pt-2">
{' '}
Area:
{new Intl.NumberFormat('de-DE').format(area)}
m²
</p>
<p className="text-sm md:text-base">
Population:
{new Intl.NumberFormat('de-DE').format(population)}
</p>
</Link>
);
};
ContinentCard.propTypes = {
continent: PropTypes.string.isRequired,
area: PropTypes.number.isRequired,
population: PropTypes.number.isRequired,
id: PropTypes.number.isRequired,
data: PropTypes.arrayOf(PropTypes.object),
};
ContinentCard.defaultProps = {
data: [{}],
};
export default ContinentCard;
|
import boto3
def launch_ec2_instance(user_data_file, instance_class, ami_id):
try:
with open(user_data_file, 'r') as f:
user_data = f.read()
except FileNotFoundError:
print("Error: User data file not found")
return
if instance_class == 'notebook':
ec2 = boto3.resource('ec2')
try:
instances = ec2.create_instances(ImageId=ami_id, MinCount=1, MaxCount=1,
BlockDeviceMappings=[
# Add block device mappings as required
],
# Add other instance parameters as required
)
print("EC2 instance launched successfully")
except Exception as e:
print("Error launching EC2 instance:", e)
else:
print("Error: Unrecognized instance class")
# Example usage
launch_ec2_instance('path/to/user_data.txt', 'notebook', 'ami-12345678') |
#!/bin/bash
set -e
if [ -z "$VIRTUAL_ENV" ]; then
echo "This requires the chia python virtual environment."
echo "Execute '. ./activate' before running."
exit 1
fi
# Allows overriding the branch or commit to build in chia-blockchain-gui
SUBMODULE_BRANCH=$1
UBUNTU=false
# Manage npm and other install requirements on an OS specific basis
if [ "$(uname)" = "Linux" ]; then
#LINUX=1
if type apt-get; then
# Debian/Ubuntu
UBUNTU=true
sudo apt-get install -y npm nodejs libxss1
elif type yum && [ ! -f "/etc/redhat-release" ] && [ ! -f "/etc/centos-release" ]; then
# AMZN 2
echo "Installing on Amazon Linux 2"
curl -sL https://rpm.nodesource.com/setup_10.x | sudo bash -
sudo yum install -y nodejs
elif type yum && [ -f /etc/redhat-release ] || [ -f /etc/centos-release ]; then
# CentOS or Redhat
echo "Installing on CentOS/Redhat"
curl -sL https://rpm.nodesource.com/setup_10.x | sudo bash -
sudo yum install -y nodejs
fi
elif [ "$(uname)" = "Darwin" ] && type brew && ! npm version >/dev/null 2>&1; then
# Install npm if not installed
brew install npm
elif [ "$(uname)" = "OpenBSD" ]; then
pkg_add node
elif [ "$(uname)" = "FreeBSD" ]; then
pkg install node
fi
# Ubuntu before 20.04LTS has an ancient node.js
echo ""
UBUNTU_PRE_2004=false
if $UBUNTU; then
UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))')
fi
if [ "$UBUNTU_PRE_2004" = "True" ]; then
echo "Installing on Ubuntu older than 20.04 LTS: Ugrading node.js to stable"
UBUNTU_PRE_2004=true # Unfortunately Python returns True when shell expects true
sudo npm install -g n
sudo n stable
export PATH="$PATH"
fi
if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "False" ]; then
echo "Installing on Ubuntu 20.04 LTS or newer: Using installed node.js version"
fi
# We will set up node.js on GitHub Actions and Azure Pipelines directly
# for Mac and Windows so skip unless completing a source/developer install
# Ubuntu special cases above
if [ ! "$CI" ]; then
echo "Running git submodule update --init --recursive."
echo ""
git submodule update --init --recursive
echo "Running git submodule update."
echo ""
git submodule update
cd chia-blockchain-gui
if [ "$SUBMODULE_BRANCH" ];
then
git fetch
git checkout "$SUBMODULE_BRANCH"
git pull
echo ""
echo "Building the GUI with branch $SUBMODULE_BRANCH"
echo ""
fi
npm install
npm audit fix || true
npm run locale:extract
npm run locale:compile
npm run build
else
echo "Skipping node.js in install.sh on MacOS ci"
fi
echo ""
echo "Chia blockchain install-gui.sh complete."
echo ""
echo "Type 'cd chia-blockchain-gui' and then 'npm run electron &' to start the GUI"
|
#!/usr/bin/env bash
src=en
tgt=de
bedropout=0.5
ARCH=transformer_wmt_en_de
ROOT=/apdcephfs/share_47076/elliottyan/co-work-projects/fairseq-bert
#### MODIFY ######
KD_ALPHA=3
DATA_SIG=wmt14_en_de-bert-or-bart-or-electra
MODEL_SIG=d512_electra_feature_task_generator_alpha_${KD_ALPHA}
#### MODIFY ######
DATAPATH=$ROOT/data-bin/$DATA_SIG
SAVEDIR=$ROOT/checkpoints/$DATA_SIG/$MODEL_SIG
mkdir -p $SAVEDIR
export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
#export CUDA_VISIBLE_DEVICES=0
LC_ALL=en_US.UTF-8 python $ROOT/fairseq_cli/train.py $DATAPATH \
-a $ARCH --optimizer adam --lr 0.0007 -s $src -t $tgt \
--no-epoch-checkpoints --save-interval-updates 5000 \
--dropout 0.1 --max-tokens 4000 --lr-scheduler inverse_sqrt --weight-decay 0.0001 \
--adam-betas '(0.9,0.98)' --save-dir $SAVEDIR --label-smoothing 0.1 \
--log-interval 100 --disable-validation \
--fp16 --update-freq 1 --ddp-backend=no_c10d \
--max-update 100000 --warmup-updates 4000 --warmup-init-lr '1e-07' \
--criterion new_electra_task_distillation_loss \
--left-pad-source \
--use-electrainput \
--kd-alpha $KD_ALPHA --electra-pretrain --electra-pretrain-task --electra-pretrain-task-generator \
--bert-model-name $ROOT/pretrain_models/bert-base-cased-new \
--electra-model-name $ROOT/pretrain_models/electra-base-discriminator \
--electra-generator $ROOT/pretrain_models/electra-base-generator
# --use-bertinput
# --share-all-embeddings
# --input-mapping
# --text-filling
# --bart-model-name $ROOT/pretrain_models/bart-base
# --denoising |
# Copyright 2020-2021 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid, datetime, pytz, os, requests
import configparser, difflib, hashlib
import DataCatalogUtils as dc
import BigQueryUtils as bq
from google.cloud import bigquery
from google.cloud import firestore
import constants
class TagEngineUtils:
def __init__(self):
self.db = firestore.Client()
config = configparser.ConfigParser()
config.read("tagengine.ini")
self.zeta = config['DEFAULT']['ZETA_URL']
def read_default_settings(self):
settings = {}
exists = False
doc_ref = self.db.collection('settings').document('default')
doc = doc_ref.get()
if doc.exists:
settings = doc.to_dict()
exists = True
return exists, settings
def read_export_settings(self):
settings = {}
exists = False
doc_ref = self.db.collection('settings').document('export')
doc = doc_ref.get()
if doc.exists:
settings = doc.to_dict()
exists = True
return exists, settings
def read_coverage_settings(self):
settings = {}
exists = False
doc_ref = self.db.collection('settings').document('coverage')
doc = doc_ref.get()
if doc.exists:
settings = doc.to_dict()
exists = True
return exists, settings
def read_propagation_settings(self):
settings = {}
exists = False
doc_ref = self.db.collection('settings').document('propagation')
doc = doc_ref.get()
if doc.exists:
settings = doc.to_dict()
exists = True
return exists, settings
def write_propagation_settings(self, source_project_ids, dest_project_ids, excluded_datasets, job_frequency):
report_settings = self.db.collection('settings')
doc_ref = report_settings.document('propagation')
doc_ref.set({
'source_project_ids': source_project_ids,
'dest_project_ids': dest_project_ids,
'excluded_datasets': excluded_datasets,
'job_frequency': job_frequency
})
print('Saved tag propagation settings.')
def write_default_settings(self, template_id, project_id, region):
report_settings = self.db.collection('settings')
doc_ref = report_settings.document('default')
doc_ref.set({
'template_id': template_id,
'project_id': project_id,
'region': region
})
print('Saved default settings.')
def write_export_settings(self, project_id, region, dataset):
export_settings = self.db.collection('settings')
doc_ref = export_settings.document('export')
doc_ref.set({
'project_id': project_id,
'region': region,
'dataset': dataset
})
print('Saved default settings.')
bqu = bq.BigQueryUtils()
bqu.create_dataset(project_id, region, dataset)
def write_coverage_settings(self, project_ids, datasets, tables):
report_settings = self.db.collection('settings')
doc_ref = report_settings.document('coverage')
doc_ref.set({
'project_ids': project_ids,
'excluded_datasets': datasets,
'excluded_tables': tables
})
print('Saved coverage settings.')
def generate_coverage_report(self):
summary_report = []
detailed_report = []
exists, settings = self.read_coverage_settings()
project_ids = settings['project_ids']
excluded_datasets = settings['excluded_datasets']
excluded_tables = settings['excluded_tables']
print('project_ids: ' + project_ids)
print('excluded_datasets: ' + excluded_datasets)
print('excluded_tables: ' + excluded_tables)
log_ref = self.db.collection('logs')
# list datasets and tables for chosen projects
for project in project_ids.split(','):
project_id = project.strip()
bq_client = bigquery.Client(project=project_id)
datasets = list(bq_client.list_datasets())
total_tags = 0
for dataset in datasets:
print("dataset: " + dataset.dataset_id)
if project_id + "." + dataset.dataset_id in excluded_datasets:
print('skipping ' + project_id + "." + dataset.dataset_id)
continue
qualified_dataset = project_id + "." + dataset.dataset_id
total_tags = 0
table_list = []
tables = list(bq_client.list_tables(dataset.dataset_id))
for table in tables:
print("full_table_id: " + str(table.full_table_id))
table_path_full = table.full_table_id.replace(':', '/datasets/').replace('.', '/tables/')
table_path_short = table.full_table_id.replace(':', '.')
table_name = table_path_full.split('/')[4]
print('table_path_full: ' + table_path_full)
print('table_path_short: ' + table_path_short)
print('table_name: ' + table_name)
if table_path_short in project_id + '.' + excluded_tables:
print('skipping ' + table_path_short)
continue
query = log_ref.where('res', '==', table_path_full).where('dc_op', '==', 'TAG_CREATED')
results = query.stream()
tag_count = len(list(results))
total_tags = total_tags + tag_count
print("tag_count = " + str(tag_count))
print("total_tags = " + str(total_tags))
# add the table name and tag count to a list
table_tag = (table_name, tag_count)
table_list.append(table_tag)
# add record to summary report
summary_record = (qualified_dataset, total_tags)
summary_report.append(summary_record)
detailed_record = {qualified_dataset: table_list}
detailed_report.append(detailed_record)
return summary_report, detailed_report
def run_propagation_job(self, source_project_ids, dest_project_ids, excluded_datasets):
#print('*** enter run_propagation_job ***')
#print("source_project_ids: " + source_project_ids)
#print("dest_project_ids: " + dest_project_ids)
#print("excluded_datasets: " + excluded_datasets)
for dest_project in dest_project_ids.split(','):
dest_project_id = dest_project.strip()
print("dest_project_id: " + dest_project_id)
bq_client = bigquery.Client(project=dest_project_id)
datasets = list(bq_client.list_datasets())
for dataset in datasets:
print("dataset_id: " + dataset.dataset_id)
# filter out excluded datasets
if dest_project_id + "." + dataset.dataset_id in excluded_datasets:
print("excluding " + dest_project_id + "." + dataset.dataset_id + " from propagation")
continue
query_str = """
select table_name as view_name, view_definition
from `""" + dest_project_id + "." + dataset.dataset_id + "." + """INFORMATION_SCHEMA.VIEWS`
"""
query_job = bq_client.query(query_str)
for view in query_job:
view_name = view["view_name"]
view_def = view["view_definition"]
print('view_name: ' + view_name)
print('view_def: ' + view_def)
view_res = dest_project_id + '/datasets/' + dataset.dataset_id + '/views/' + view_name
print('view_res: ' + view_res)
source_tables = self.source_tables_from_view(view_def)
print('source_tables: ' + str(source_tables))
view_source_tables_configs = [] # contains list of all source tag configs per view
for source in source_tables:
source_split = source.split(".")
if len(source_split) == 3:
source_project = source_split[0]
source_dataset = source_split[1]
source_table = source_split[2]
elif len(source_split) == 2:
source_project = dest_project_id
source_dataset = source_split[0]
source_table = source_split[1]
else:
print("Opps, something went wrong, couldn't parse view definition")
continue
source_res = source_project + '/datasets/' + source_dataset + '/tables/' + source_table
print('source_res: ' + source_res)
tag_configs = self.read_tag_configs_on_res(source_res)
if len(tag_configs) == 0:
self.write_unpropagated_log_entry(source_res, view_res, 'PROPAGATED', 'NONE', '')
else:
self.add_source_to_configs(tag_configs, source_res)
view_source_tables_configs.append(tag_configs)
# end for source in source tables
if len(view_source_tables_configs) == 0:
# source tables have zero tags, and we have already logged these events, move on to next view
continue
else:
# go through all tags attached to this view's source tables and triage them
# returns list of configs which are tagged as CONFLICT or AGREE
reconciled_configs = self.triage_tag_configs(view_res, view_source_tables_configs)
for config in reconciled_configs:
config_status = config['config_status']
source_tag_uuid = config['tag_uuid']
source_res = config['source_res']
if isinstance(source_res, list) == False:
source_res = [source_res]
tag_type = config['tag_type']
fields = config['fields']
included_uris = config['included_uris']
template_uuid = config['template_uuid']
print('source_res: ' + str(source_res))
print('fields: ' + str(fields))
# check if tag has been forked, we don't want to override it, if it has
if self.is_forked_tag(template_uuid, source_res, view_res):
continue
template_config = self.read_template_config(template_uuid)
template_id = template_config['template_id']
project_id = template_config['project_id']
region = template_config['region']
dcu = dc.DataCatalogUtils(template_id, project_id, region)
# parse the included_uris field, matching the source_res against it.
# extract table-level and column-level tags from the included_uris field
columns = self.extract_tagged_columns(source_res, view_res, included_uris)
#print('columns: ' + str(columns))
# create or update propagated_config
view_tag_uuid = self.create_or_update_propagated_config(source_tag_uuid, source_res, view_res, config_status, columns, view_def, \
tag_type, fields, template_uuid)
if config_status == 'CONFLICT':
self.write_unpropagated_log_entry(source_res, view_res, 'PROPAGATED', config_status, template_uuid)
elif config_status == 'PROPAGATED':
if tag_type == "STATIC":
dcu.create_update_static_propagated_tag('PROPAGATED', source_res, view_res, columns, fields, source_tag_uuid, view_tag_uuid,\
template_uuid)
if tag_type == "DYNAMIC":
dcu.create_update_dynamic_propagated_tag('PROPAGATED', source_res, view_res, columns, fields, source_tag_uuid, view_tag_uuid,\
template_uuid)
def add_source_to_configs(self, tag_configs, source_res):
for tag_config in tag_configs:
tag_config['source_res'] = source_res
def triage_tag_configs(self, view_res, source_tables_tag_configs):
#print('enter triage_tag_configs')
#print('view_res: ' + view_res)
#print('source_tables_tag_configs: ' + str(source_tables_tag_configs))
reconciled_tags = [] # tracks configs which conflict or/and agree
overlapping_tags = [] # tracks configs which overlap and still need to be reconciled
template_tag_mapping = {} # key == tag_template_uuid, val == [tag_uuid]
for source_table_tag_configs in source_tables_tag_configs:
for tag_config in source_table_tag_configs:
print('tag_config: ' + str(tag_config))
template_uuid = tag_config['template_uuid']
tag_uuid = tag_config['tag_uuid']
if template_uuid in template_tag_mapping:
tag_uuid_list = template_tag_mapping[template_uuid]
tag_uuid_list.append(tag_uuid)
reconciled_tags, overlapping_tags = self.swap_elements(template_uuid, reconciled_tags, overlapping_tags)
tag_config['config_status'] = 'OVERLAP'
overlapping_tags.append(tag_config)
else:
tag_uuid_list = []
tag_uuid_list.append(tag_uuid)
template_tag_mapping[template_uuid] = tag_uuid_list
tag_config['config_status'] = 'PROPAGATED'
reconciled_tags.append(tag_config)
if len(overlapping_tags) == 0:
print('we have no overlapping tags')
print('reconciled_tags: ' + str(reconciled_tags))
return reconciled_tags
# we have some overlapping tags
tag_uuid_lists = template_tag_mapping.values()
for tag_uuid_list in tag_uuid_lists:
if len(tag_uuid_list) > 1:
agreeing_tag, conflicting_tag = self.run_diff(tag_uuid_list, overlapping_tags)
if len(conflicting_tag) > 0:
print('we have a conflicting tag')
print('conflictings_tag: ' + str(conflicting_tag))
conflicting_tag['config_status'] = 'CONFLICT'
conflicting_tag['tag_uuid'] = tag_uuid_list
reconciled_tags.append(conflicting_tag)
if len(agreeing_tag) > 0:
print('we have an agreeing tag')
print('agreeing_tag: ' + str(agreeing_tag))
agreeing_tag['config_status'] = 'PROPAGATED'
agreeing_tag['tag_uuid'] = tag_uuid_list
reconciled_tags.append(agreeing_tag)
print('reconciled_tags: ' + str(reconciled_tags))
return reconciled_tags
def extract_source_res_list(self, tag_configs):
source_res_list = []
for tag_config in tag_configs:
source_res_list.append(tag_config['source_res'])
return source_res_list
def swap_elements(self, template_uuid, reconciled_tags, overlapping_tags):
purge_tag_configs = []
# reconciled_tags and overlapping_tags may both contain more than one config
for tag_config in reconciled_tags:
if template_uuid in tag_config['template_uuid']:
overlapping_tags.append(tag_config)
purge_tag_configs.append(tag_config)
for purge_config in purge_tag_configs:
reconciled_tags.remove(purge_config)
for tag_config in reconciled_tags:
if template_uuid in tag_config['template_uuid']:
tag_config['config_status'] = 'OVERLAP'
return reconciled_tags, overlapping_tags
def run_diff(self, tag_uuid_list, overlapping_tags):
#print('enter run_diff')
#print('tag_uuid_list: ' + str(tag_uuid_list))
#print('overlapping_tags: ' + str(overlapping_tags))
# get template fields
template_uuid = overlapping_tags[0]['template_uuid']
template_config = self.read_template_config(template_uuid)
dcu = dc.DataCatalogUtils(template_config['template_id'], template_config['project_id'], template_config['region'])
template_fields = dcu.get_template()
status = constants.TAGS_AGREE
tag_type = ""
output_fields = []
# for each template field
for field in template_fields:
field_id = field['field_id']
field_type = field['field_type']
field_values = []
for tag in overlapping_tags:
for tagged_field in tag['fields']:
if field_id in tagged_field['field_id']:
if tag['tag_type'] in 'DYNAMIC':
tag_type = constants.DYNAMIC_TAG
field_values.append(tagged_field['query_expression'])
if tag['tag_type'] in 'STATIC':
field_values.append(tagged_field['field_value'])
tag_type = constants.STATIC_TAG
#print('field_values: ' + str(field_values))
continue
# we've collected all the values for a given field and added them to field_values
# values assigned to a field are all equal if the set has one element
if len(set(field_values)) == 1:
# field values all match
if tag_type == constants.DYNAMIC_TAG:
matching_field = {'field_id': field_id, 'field_type': field_type, 'status': 'AGREE', 'query_expression': field_values[0]}
if tag_type == constants.STATIC_TAG:
matching_field = {'field_id': field_id, 'field_type': field_type, 'status': 'AGREE', 'field_value': field_values[0]}
output_fields.append(matching_field)
else:
if len(field_values) > 0:
if tag_type == constants.DYNAMIC_TAG:
conflicting_field = {'field_id': field_id, 'field_type': field_type, 'status': 'CONFLICT', 'query_expression': ', '.join(field_values)}
if tag_type == constants.STATIC_TAG:
conflicting_field = {'field_id': field_id, 'field_type': field_type, 'status': 'CONFLICT', 'field_value': ', '.join(field_values)}
output_fields.append(conflicting_field)
status = constants.TAGS_CONFLICT
#print('output_fields: ' + str(output_fields))
agreeing_tag = [] # output
conflicting_tag = [] # output
source_res_list = self.extract_source_res_list(overlapping_tags)
overlapping_tags[0]['source_res'] = source_res_list
overlapping_tags[0]['fields'] = output_fields
if status == constants.TAGS_CONFLICT:
conflicting_tag = overlapping_tags[0]
if status == constants.TAGS_AGREE:
agreeing_tag = overlapping_tags[0]
return agreeing_tag, conflicting_tag
def extract_tagged_columns(self, source_res_list, view_res, included_uris):
#print('enter extract_tagged_columns')
#print('source_res_list: ' + str(source_res_list))
#print('view_res: ' + view_res)
#print('included: ' + included_uris)
view_res_split = view_res.split("/")
project = view_res_split[0]
dataset = view_res_split[2]
view = view_res.split("/")[4]
tagged_columns = []
for source_res in source_res_list:
print('source_res: ' + source_res)
source_res_full = "bigquery/project/" + source_res.replace('datasets', 'dataset').replace('tables/', '')
print('source_res_full: ' + source_res_full)
included_uri_split = included_uris.split(",")
for included_uri in included_uri_split:
uri = included_uri.strip()
print('uri: ' + uri)
# we may have a column
if len(uri) > len(source_res_full):
start_index = uri.rfind("/")
column = uri[start_index+1:]
exists = self.column_exists(project, dataset, view, column)
if exists:
tagged_columns.append(column)
return tagged_columns
def column_exists(self, project, dataset, view, column):
column_exists = False
query_str = """
select count(*) as count
from `""" + project + "." + dataset + "." + """INFORMATION_SCHEMA.COLUMN_FIELD_PATHS`
where table_name='""" + view + """'
and column_name='""" + column + """'
"""
#print("query_str: " + query_str)
bq_client = bigquery.Client(project=project)
query_job = bq_client.query(query_str)
for row in query_job:
count = row["count"]
if count == 1:
column_exists = True
return column_exists
def is_forked_tag(self, template_uuid, source_res, view_res):
config_ref = self.db.collection('propagated_config')
query = config_ref.where('template_uuid', '==', template_uuid).where('source_res', '==', source_res).where('view_res', '==', view_res).where('config_status', 'in', ['PROPAGATED AND FORKED', 'CONFLICT AND FORKED'])
results = query.stream()
for doc in results:
if doc.exists:
return True
return False
def create_or_update_propagated_config(self, source_tag_uuid, source_res, view_res, config_status, columns, view_def, tag_type, fields,\
template_uuid):
#print('enter create_or_update_propagated_config')
# check to see if we have an active config
tag_ref = self.db.collection('propagated_config')
query = tag_ref.where('template_uuid', '==', template_uuid).where('view_res', '==', view_res).where('source_res', 'array_contains_any', source_res)
results = query.stream()
doc_exists = False
for doc in results:
if doc.exists:
prop_config = doc.to_dict()
if len(columns) > 0:
if prop_config['cols'] == columns:
doc_exists = True
else:
doc_exists = True
if doc_exists == False:
break
view_tag_uuid = doc.id
print('Config already exists. Tag_uuid: ' + str(view_tag_uuid))
if prop_config['config_status'] == 'FORKED':
return view_tag_uuid
if prop_config['fields'] != fields:
self.db.collection('propagated_config').document(view_tag_uuid).update({
'config_status' : config_status,
'fields' : fields,
'last_modified_time' : datetime.datetime.utcnow()
})
print('Updated propagated_config.')
else:
self.db.collection('propagated_config').document(view_tag_uuid).update({
'last_modified_time' : datetime.datetime.utcnow()
})
print('Propagated config fields are equal, updated last_modified_time only.')
if doc_exists == False:
view_tag_uuid = uuid.uuid1().hex
prop_config = self.db.collection('propagated_config')
doc_ref = prop_config.document(view_tag_uuid)
doc = {
'view_tag_uuid': view_tag_uuid,
'source_tag_uuid': source_tag_uuid,
'tag_type': tag_type,
'config_status': config_status,
'creation_time': datetime.datetime.utcnow(),
'fields': fields,
'source_res': source_res,
'view_res': view_res,
'view_def': view_def,
'template_uuid': template_uuid}
if len(columns) > 0:
doc['cols'] = columns
doc_ref.set(doc)
print('Created new propagated tag config.')
return view_tag_uuid
def write_propagated_log_entry(self, config_status, dc_op, res_type, source_res, view_res, column, tag_type, source_tag_uuid, view_tag_uuid, tag_id, template_uuid):
log_entry = {}
log_entry['ts'] = datetime.datetime.utcnow()
log_entry['dc_op'] = dc_op
log_entry['res_type'] = res_type
log_entry['config_type'] = 'PROPAGATED'
log_entry['config_status'] = config_status
log_entry['tag_type'] = tag_type
log_entry['source_res'] = source_res
log_entry['view_res'] = view_res
if len(column) > 0:
log_entry['col'] = column
log_entry['tag_type'] = tag_type
log_entry['source_tag_uuid'] = source_tag_uuid
log_entry['view_tag_uuid'] = view_tag_uuid
log_entry['dc_tag_id'] = tag_id
log_entry['template_uuid'] = template_uuid
self.db.collection('logs').add(log_entry)
print('Wrote log entry.')
def write_unpropagated_log_entry(self, source_res_list, view_res, config_type, config_status, template_uuid):
log_entry = {}
log_entry['source_res'] = source_res_list
log_entry['view_res'] = view_res
log_entry['config_type'] = config_type
log_entry['config_status'] = config_status
if template_uuid != "":
log_entry['template_uuid'] = template_uuid
log_entry['ts'] = datetime.datetime.utcnow()
self.db.collection('logs').add(log_entry)
print('Wrote log entry.')
def generate_propagation_report(self):
#print("*** enter generate_propagation_report ***")
report = []
last_run = None
source_view_set = set()
prop_configs = self.db.collection('propagated_config').stream()
for config in prop_configs:
prop_entry = config.to_dict()
print("prop_entry: " + str(prop_entry))
view_res_pretty = prop_entry['view_res'].replace('/datasets', '').replace('/views', '')
prop_entry['view_res'] = view_res_pretty
source_res_list = prop_entry['source_res']
source_res_pretty = source_res_list[0]
if len(source_res_list) > 1:
source_res_pretty = source_res_pretty + '...'
source_res_pretty = source_res_pretty.replace('/datasets', '').replace('/tables', '')
prop_entry['source_res'] = source_res_pretty
template_config = self.read_template_config(prop_entry['template_uuid'])
prop_entry['template_id'] = template_config['template_id']
if last_run is None:
if 'last_modified_time' in prop_entry:
last_run = prop_entry['last_modified_time']
else:
last_run = prop_entry['creation_time']
#print('last_run: ' + str(last_run))
report.append(prop_entry)
last_hour_ts = datetime.datetime.utcnow() - datetime.timedelta(hours = 1)
logs = self.db.collection('logs').where('config_type', '==', 'PROPAGATED').where('config_status', '==', 'NONE').where('ts', '>=',\
last_hour_ts).order_by('ts', direction=firestore.Query.DESCENDING).stream()
for log in logs:
has_missing = True
log_entry = log.to_dict()
view_res_pretty = log_entry['view_res'].replace('/datasets', '').replace('/tables', '')
source_res_pretty = log_entry['source_res'].replace('/datasets', '').replace('/tables', '')
source_view_pair = source_res_pretty + '&' + view_res_pretty
if source_view_pair not in source_view_set:
source_view_set.add(source_view_pair)
report_entry = {}
report_entry['config_status'] = log_entry['config_status']
report_entry['source_res'] = source_res_pretty
report_entry['view_res'] = view_res_pretty
report.append(report_entry)
if last_run is None:
last_run = log_entry['ts']
#print('last_run: ' + str(last_run))
return report, last_run
def read_propagated_configs_on_res(self, source_res, view_res, template_id):
tag_config_results = []
view_res_full = view_res.replace('.', '/datasets/', 1).replace('.', '/tables/', 1)
print('view_res_full: ' + view_res_full)
prop_config_ref = self.db.collection('propagated_config')
tag_config_ref = self.db.collection('tag_config')
log_ref = self.db.collection('logs')
query1 = log_ref.where('template_uuid', '==', template_uuid).where('source_res', '==', source_res_full).where('view_res', '==',\
view_res_full).order_by('ts', direction=firestore.Query.DESCENDING).limit(1)
prop_results = query1.stream()
for prop_record in prop_results:
print('found prop log id ' + prop_record.id)
record = prop_record.to_dict()
view_tag_uuid = record['tag_uuid']
view_config = prop_config_ref.document(view_tag_uuid)
if view_config.exists:
view_record = view_config.to_dict()
# get tag_config for parent
source_config = self.read_tag_configs_on_res(source_res)
return view_config, source_config, template_id
def source_tables_from_view(self, view_def):
source_tables = []
payload = {"sql": view_def}
response = requests.post(self.zeta, json=payload)
print('zeta response: ' + str(response))
resp_dict = response.json()
for resp in resp_dict:
source_res = '.'.join(resp)
source_tables.append(source_res)
return source_tables
def read_propagated_config(self, tag_uuid):
propagated_config = {}
propagated_ref = self.db.collection('propagated_config').document(tag_uuid)
doc = propagated_ref.get()
if doc.exists:
propagated_config = doc.to_dict()
print("propagated_config: " + str(propagated_config))
return propagated_config
@firestore.transactional
def update_in_transaction(transaction, config_ref, config_status, fields, refresh_frequency):
snapshot = config_ref.get(transaction=transaction)
if refresh_frequency != None:
transaction.update(config_ref, {
'fields': fields,
'refresh_frequency': refresh_frequency,
'config_status': config_status
})
else:
transaction.update(config_ref, {
'fields': fields,
'config_status': config_status
})
def fork_propagated_tag(self, tag_uuid, config_status, fields, refresh_frequency):
transaction = self.db.transaction()
config_ref = self.db.collection('propagated_config').document(tag_uuid)
self.update_in_transaction(transaction, config_ref, config_status, fields, refresh_frequency)
updated_config = config_ref.get().to_dict()
return updated_config
def read_tag_configs_on_res(self, res):
#print("*** enter read_tag_configs_on_res ***")
template_uuid_set = set()
tag_config_results = []
table_path_full = res.replace('.', '/datasets/', 1).replace('.', '/tables/', 1)
#print('table_path_full: ' + table_path_full)
log_ref = self.db.collection('logs')
query = log_ref.where('res', '==', table_path_full).where('config_type', '==', 'MANUAL').order_by('ts', direction=firestore.Query.DESCENDING)
create_update_entries = query.stream()
for create_update_entry in create_update_entries:
entry = create_update_entry.to_dict()
template_uuid = entry['template_uuid']
if template_uuid not in template_uuid_set:
template_uuid_set.add(template_uuid)
tag_uuid = entry['tag_uuid']
tag_config = self.read_tag_config(tag_uuid)
template_config = self.read_template_config(template_uuid)
tag_config['template_id'] = template_config['template_id']
tag_config_results.append(tag_config)
else:
continue
return tag_config_results
def read_template_config(self, template_uuid):
tag_config = {}
template_ref = self.db.collection('tag_template').document(template_uuid)
doc = template_ref.get()
if doc.exists:
template_config = doc.to_dict()
#print(str(tag_config))
return template_config
def read_tag_template(self, template_id, project_id, region):
template_exists = False
template_uuid = ""
# check to see if this template already exists
template_ref = self.db.collection('tag_template')
query = template_ref.where('template_id', '==', template_id).where('project_id', '==', project_id).where('region', '==', region)
matches = query.get()
# should either be a single matching template or no matching templates
if len(matches) == 1:
if matches[0].exists:
print('Tag Template exists. Template uuid: ' + str(matches[0].id))
template_uuid = matches[0].id
template_exists = True
return (template_exists, template_uuid)
def write_tag_template(self, template_id, project_id, region):
template_exists, template_uuid = self.read_tag_template(template_id, project_id, region)
if template_exists == False:
print('Tag Template doesn\'t exist. Creating new template')
template_uuid = uuid.uuid1().hex
doc_ref = self.db.collection('tag_template').document(template_uuid)
doc_ref.set({
'template_uuid': template_uuid,
'template_id': template_id,
'project_id': project_id,
'region': region
})
return template_uuid
def write_static_tag(self, config_status, fields, included_uris, excluded_uris, template_uuid, tag_export):
# hash the included_uris string
included_uris_hash = hashlib.md5(included_uris.encode()).hexdigest()
# check to see if this tag config already exists
tag_ref = self.db.collection('tag_config')
query = tag_ref.where('template_uuid', '==', template_uuid).where('included_uris_hash', '==', included_uris_hash).where('tag_type', '==', 'STATIC').where('config_status', '==', config_status)
matches = query.get()
for match in matches:
if match.exists:
tag_uuid_match = match.id
print('Tag config already exists. Tag_uuid: ' + str(tag_uuid_match))
# update status to INACTIVE
self.db.collection('tag_config').document(tag_uuid_match).update({
'config_status' : "INACTIVE"
})
print('Updated status to INACTIVE.')
tag_uuid = uuid.uuid1().hex
tag_config = self.db.collection('tag_config')
doc_ref = tag_config.document(tag_uuid)
doc_ref.set({
'tag_uuid': tag_uuid,
'tag_type': 'STATIC',
'config_status': config_status,
'creation_time': datetime.datetime.utcnow(),
'fields': fields,
'included_uris': included_uris,
'included_uris_hash': included_uris_hash,
'excluded_uris': excluded_uris,
'template_uuid': template_uuid,
'tag_export': tag_export
})
print('Created new static tag config.')
return tag_uuid
def write_dynamic_tag(self, config_status, fields, included_uris, excluded_uris, template_uuid, refresh_mode, refresh_frequency, refresh_unit, tag_export):
included_uris_hash = hashlib.md5(included_uris.encode()).hexdigest()
# check to see if this tag config already exists
tag_ref = self.db.collection('tag_config')
query = tag_ref.where('template_uuid', '==', template_uuid).where('included_uris_hash', '==', included_uris_hash).where('tag_type', '==', 'DYNAMIC').where('config_status', '==', config_status)
matches = query.get()
for match in matches:
if match.exists:
tag_uuid_match = match.id
print('Tag config already exists. Tag_uuid: ' + str(tag_uuid_match))
# update status to INACTIVE
self.db.collection('tag_config').document(tag_uuid_match).update({
'config_status' : "INACTIVE"
})
print('Updated status to INACTIVE.')
tag_uuid = uuid.uuid1().hex
tag_config = self.db.collection('tag_config')
doc_ref = tag_config.document(tag_uuid)
if refresh_mode == 'AUTO':
if refresh_frequency.isdigit():
delta = int(refresh_frequency)
else:
delta = 24
if refresh_unit == 'hours':
next_run = datetime.datetime.utcnow() + datetime.timedelta(hours=delta)
if refresh_unit == 'days':
next_run = datetime.datetime.utcnow() + datetime.timedelta(days=delta)
doc_ref.set({
'tag_uuid': tag_uuid,
'tag_type': 'DYNAMIC',
'config_status': config_status,
'creation_time': datetime.datetime.utcnow(),
'fields': fields,
'included_uris': included_uris,
'included_uris_hash': included_uris_hash,
'excluded_uris': excluded_uris,
'template_uuid': template_uuid,
'refresh_mode': refresh_mode, # AUTO
'refresh_frequency': delta,
'refresh_unit': refresh_unit,
'tag_export': tag_export,
'scheduling_status': 'READY',
'next_run': next_run,
'version': 1
})
else:
doc_ref.set({
'tag_uuid': tag_uuid,
'tag_type': 'DYNAMIC',
'config_status': config_status,
'creation_time': datetime.datetime.utcnow(),
'fields': fields,
'included_uris': included_uris,
'included_uris_hash': included_uris_hash,
'excluded_uris': excluded_uris,
'template_uuid': template_uuid,
'refresh_mode': refresh_mode, # ON_DEMAND
'refresh_frequency': 0,
'tag_export': tag_export,
'version': 1
})
print('Created new dynamic tag config.')
return tag_uuid, included_uris_hash
def write_log_entry(self, dc_op, resource_type, resource, column, tag_type, tag_uuid, tag_id, template_uuid):
log_entry = {}
log_entry['ts'] = datetime.datetime.utcnow()
log_entry['dc_op'] = dc_op
log_entry['res_type'] = resource_type
log_entry['config_type'] = 'MANUAL'
log_entry['res'] = resource
if len(column) > 0:
log_entry['col'] = column
log_entry['tag_type'] = tag_type
log_entry['tag_uuid'] = tag_uuid
log_entry['dc_tag_id'] = tag_id
log_entry['template_uuid'] = template_uuid
self.db.collection('logs').add(log_entry)
print('Wrote log entry.')
def write_error_entry(self, msg):
error_entry = {}
error_entry['ts'] = datetime.datetime.utcnow()
error_entry['msg'] = msg
self.db.collection('errors').add(error_entry)
print('Wrote error entry.')
def read_tag_configs(self, template_id, project_id, region):
tag_configs = []
template_exists, template_uuid = self.read_tag_template(template_id, project_id, region)
tag_ref = self.db.collection('tag_config')
docs = tag_ref.where('template_uuid', '==', template_uuid).where('config_status', '==', 'ACTIVE').stream()
for doc in docs:
tag_config = doc.to_dict()
tag_configs.append(tag_config)
#print(str(tag_configs))
return tag_configs
def read_tag_config(self, tag_uuid):
tag_config = {}
tag_ref = self.db.collection('tag_config').document(tag_uuid)
doc = tag_ref.get()
if doc.exists:
tag_config = doc.to_dict()
#print(str(tag_config))
return tag_config
def read_propagated_tag_config(self, tag_uuid):
propagated_tag_config = {}
propagated_tag_ref = self.db.collection('propagated_config').document(tag_uuid)
doc = propagated_tag_ref.get()
if doc.exists:
propagated_tag_config = doc.to_dict()
#print(str(tag_config))
return propagated_tag_config
def lookup_tag_config_by_uris(self, template_uuid, included_uris, included_uris_hash):
success = False
tag_config = {}
tag_ref = self.db.collection('tag_config')
if included_uris is not None:
docs = tag_ref.where('template_uuid', '==', template_uuid).where('refresh_mode', '==', 'ON-DEMAND').where('included_uris', '==', included_uris).stream()
if included_uris_hash is not None:
docs = tag_ref.where('template_uuid', '==', template_uuid).where('refresh_mode', '==', 'ON-DEMAND').where('included_uris_hash', '==', included_uris_hash).stream()
for doc in docs:
tag_config = doc.to_dict()
break
print('tag_config: ' + str(tag_config))
if tag_config:
success = True
return success, tag_config
def increment_tag_config_version(self, tag_uuid, version):
self.db.collection('tag_config').document(tag_uuid).update({
'version' : version + 1
})
def update_tag_config(self, old_tag_uuid, tag_type, config_status, fields, included_uris, excluded_uris, template_uuid, \
refresh_mode, refresh_frequency, refresh_unit, tag_export):
self.db.collection('tag_config').document(old_tag_uuid).update({
'config_status' : "INACTIVE"
})
if tag_type == 'STATIC':
new_tag_uuid = self.write_static_tag(config_status, fields, included_uris, excluded_uris, template_uuid, tag_export)
if tag_type == 'DYNAMIC':
new_tag_uuid, included_uris_hash = self.write_dynamic_tag(config_status, fields, included_uris, excluded_uris, \
template_uuid, refresh_mode, refresh_frequency, refresh_unit, tag_export)
# note: we do not need to return the included_uris_hash
return new_tag_uuid
if __name__ == '__main__':
te = TagEngineUtils();
te.write_template('quality_template', 'tag-engine-283315', 'us', 'ACTIVE') |
<reponame>maciej-plonka/tournament-app<gh_stars>0
import React from 'react';
import styles from '../styles/PageCard.module.css';
interface PageCardProps {
children: React.ReactNode
}
export function PageCard(props: PageCardProps) {
return (
<div className={styles.pageCard}>
{props.children}
</div>
)
}
|
<reponame>zulhilmizainuddin/partial-download
import events = require('events');
import request = require('request');
export interface Operation {
start(url: string, contentLength: number, numOfConnections: number, headers?: request.Headers): events.EventEmitter;
}
|
import crypto from 'crypto';
import { Router } from 'express';
import { PASSWORD_SALT } from '../constants';
import { generateAccessToken } from '../utils';
export const createSessionRoute = sequelize => {
const router = Router();
router.put('/refresh', (req, res, next) => {
// poyo
});
router.post('/signin', (req, res, next) => {
const SessionDB = sequelize.models.Session;
const UserDB = sequelize.models.User;
const hash = crypto.createHash('sha512');
const email = req.body.email;
const password = req.body.password;
hash.update(`${password}${PASSWORD_SALT}`, 'utf8');
const hashedPassword = hash.digest('hex');
UserDB.findOne({ where: { email } })
.then(user => {
if (!user) {
throw new Error('USER_NOT_FOUND');
}
if (user.hashedPassword !== hashedPassword) {
throw new Error('USER_PASSWORD_INCORRECT');
}
return SessionDB.destroy({ where: { userId: user.id } })
.then(() => {
const session = new Session({
userId: user.id,
accessToken: generateAccessToken(),
expiredAt: moment().add(14, 'days').format(),
});
delete session.id;
return SessionDB.create(session);
});
})
.then(session => {
res.results = {
session,
};
next();
})
.catch(err => {
next(err);
});
});
router.delete('/signout', (req, res, next) => {
// poyo
});
return router;
};
|
import { localeConfig } from '@/config/locale'
import { ConfigProvider } from 'antd'
import enUS from 'antd/es/locale/en_US'
import zhCN from 'antd/es/locale/zh_CN'
import { createBrowserHistory } from 'history'
import moment from 'moment'
import 'moment/locale/zh-cn'
import React, { useEffect } from 'react'
import { IntlProvider } from 'react-intl'
import { BrowserRouter } from 'react-router-dom'
import { useRecoilState } from 'recoil'
import { useGetCurrentUser } from './api'
import './App.less'
import RenderRouter from './routes'
import { userState } from './stores/user'
const history = createBrowserHistory()
const App: React.FC = () => {
const [user, setUser] = useRecoilState(userState)
const { locale } = user
// const { data: currentUser, error } = useGetCurrentUser()
// useEffect(() => {
// console.log('currentUser: ', currentUser)
// setUser({ ...user, username: currentUser?.username || '', logged: true })
// }, [currentUser])
useEffect(() => {
if (locale.toLowerCase() === 'en-us') {
moment.locale('en')
} else if (locale.toLowerCase() === 'zh-cn') {
moment.locale('zh')
}
}, [locale])
const getAntdLocale = () => {
if (locale.toLowerCase() === 'en-us') {
return enUS
} else if (locale.toLowerCase() === 'zh-cn') {
return zhCN
}
}
const getLocale = () => {
const lang = localeConfig.find((item) => {
return item.key === locale.toLowerCase()
})
return lang?.messages
}
return (
<ConfigProvider locale={getAntdLocale()} componentSize='middle'>
<IntlProvider locale={locale.split('-')[0]} messages={getLocale()}>
<BrowserRouter>
<RenderRouter/>
</BrowserRouter>
</IntlProvider>
</ConfigProvider>
)
}
export default App
|
// 2456. 나는 학급회장이다
// 2021.05.17
// 구현
#include<iostream>
using namespace std;
int counts[4][4]; // counts[i][j] : i번 학생이 j점을 받은 개수
int main()
{
int n;
cin >> n;
int a, b, c;
for (int i = 0; i < n; i++)
{
cin >> a >> b >> c;
counts[1][a]++;
counts[2][b]++;
counts[3][c]++;
}
int scores[4] = { 0,0,0,0 };
int maxScore = 0;
for (int i = 1; i <= 3; i++)
{
for (int j = 1; j <= 3; j++)
{
scores[i] += j * counts[i][j];
}
}
for (int i = 1; i <= 3; i++)
{
maxScore = max(maxScore, scores[i]);
}
bool dup = false;
int ansCount[4] = { 0,0,0,0 };
int ans = 0;
for (int i = 1; i <= 3; i++)
{
if (maxScore == scores[i])
{
if (ansCount[3] < counts[i][3])
{
ansCount[1] = counts[i][1];
ansCount[2] = counts[i][2];
ansCount[3] = counts[i][3];
ans = i;
dup = false;
}
else if ((ansCount[3] == counts[i][3]) && (ansCount[2] < counts[i][2]))
{
ansCount[1] = counts[i][1];
ansCount[2] = counts[i][2];
ansCount[3] = counts[i][3];
ans = i;
dup = false;
}
else if ((ansCount[3] == counts[i][3]) && (ansCount[2] == counts[i][2]))
{
dup = true;
}
}
}
if (dup)
{
cout << 0 << " " << maxScore << endl;
}
else
{
cout << ans << " " << maxScore << endl;
}
return 0;
}
|
def parse_readme(app, text):
if app == "AppA":
# Extract installation instructions
start_index = text.find("INSTALLATION")
if start_index != -1:
return text[start_index:]
elif app == "AppB":
# Extract list of features
# Assuming features are listed under a "FEATURES" section
start_index = text.find("FEATURES")
if start_index != -1:
end_index = text.find("\n", start_index)
return text[start_index:end_index]
elif app == "AppC":
# Extract contact information (email addresses)
# Assuming email addresses are mentioned in a "CONTACT" section
start_index = text.find("CONTACT")
if start_index != -1:
return " ".join(re.findall(r'[\w\.-]+@[\w\.-]+', text[start_index:]))
return "" |
<filename>seminar/threads4.cpp
#include <thread>
#include <mutex>
#include <iostream>
#include <atomic>
#define ANZAHL 100000
class Zaehler
{
public:
Zaehler() : n(0) {}
long wert()
{
std::lock_guard<std::mutex> lock( mn );
return n;
}
void inc()
{
std::lock_guard<std::mutex> lock( mn );
n++;
}
void reset()
{
std::lock_guard<std::mutex> lock( mn );
n = 0;
}
private:
volatile long n = 0;
std::mutex mn;
};
Zaehler z;
void threadFunction()
{
for(int i = 0; i < ANZAHL; i++ )
{
z.inc();
}
}
int main()
{
std::thread t1( threadFunction );
std::thread t2( threadFunction );
t1.join();
t2.join();
std::cout << z.wert() << std::endl;
}
|
<gh_stars>0
package com.blackti.oauth.resources;
import com.blackti.oauth.dto.UsuarioDTO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.blackti.oauth.services.UsuarioService;
@RestController
@RequestMapping(value = "/usuario-oauth")
public class UsuarioResouces {
@Autowired
private UsuarioService service;
@GetMapping(value = "/search")
public ResponseEntity<UsuarioDTO> findByEmail(@RequestParam String email){
try {
return ResponseEntity.ok(service.findByEmail(email));
}catch (IllegalAccessException e){
return ResponseEntity.status(HttpStatus.NOT_FOUND).build();
}
}
}
|
<filename>chest/gui/swing/src/main/java/net/community/chest/swing/component/table/DefaultTableScroll.java
/*
*
*/
package net.community.chest.swing.component.table;
import javax.swing.JTable;
import net.community.chest.swing.component.scroll.HorizontalPolicy;
import net.community.chest.swing.component.scroll.ScrolledComponent;
import net.community.chest.swing.component.scroll.VerticalPolicy;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since Apr 1, 2009 9:31:27 AM
*/
public class DefaultTableScroll extends ScrolledComponent<JTable> {
/**
*
*/
private static final long serialVersionUID = -4708794478571367861L;
public DefaultTableScroll (JTable view, VerticalPolicy vp, HorizontalPolicy hp)
{
super(JTable.class, view, vp, hp);
}
public DefaultTableScroll (VerticalPolicy vp, HorizontalPolicy hp)
{
this(null, vp, hp);
}
public DefaultTableScroll (JTable view)
{
this(view, VerticalPolicy.BYNEED, HorizontalPolicy.BYNEED);
}
public DefaultTableScroll ()
{
this(null);
}
}
|
#!/bin/bash
arg_1=$0
filename=$(basename $arg_1)
filepath=$(cd `dirname $0`; pwd)
cmdpath=$(pwd)
echo $filename
echo $arg_1
echo $filepath
echo $cmdpath
|
package fr.unice.polytech.si3.qgl.soyouz.classes.objectives.sailor.helper;
import fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities.onboard.Gouvernail;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
class RudderConfigHelperTest
{
RudderConfigHelper inferiorRotation;
RudderConfigHelper inRangeRotation;
RudderConfigHelper superiorRotation;
@BeforeEach
void setUp()
{
inferiorRotation = new RudderConfigHelper(-Math.PI);
inRangeRotation = new RudderConfigHelper(Math.PI / 5);
superiorRotation = new RudderConfigHelper(Math.PI);
}
@Test
void findOptRudderRotation()
{
assertEquals(-Gouvernail.ALLOWED_ROTATION, inferiorRotation.findOptRudderRotation());
assertEquals(Math.PI / 5, inRangeRotation.findOptRudderRotation());
assertEquals(Gouvernail.ALLOWED_ROTATION, superiorRotation.findOptRudderRotation());
}
} |
<gh_stars>0
package com.example.country.services;
import at.favre.lib.crypto.bcrypt.BCrypt;
import com.example.common.errors.WSException;
import com.example.country.dto.CountryDTO;
import com.example.country.models.Country;
import com.example.users.dto.UserDTO;
import com.example.users.models.User;
import javax.enterprise.context.ApplicationScoped;
import javax.transaction.Transactional;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
@Transactional
@ApplicationScoped
public class CountryService {
public List<CountryDTO> getCountry() {
List<Country> countries = Country.listAll();
return toCountryDTOList(countries);
}
public List<CountryDTO> toCountryDTOList (List<Country> list){
List<CountryDTO> countryDTOList = new ArrayList<CountryDTO>();
if (!list.isEmpty()) {
for (Country element: list) {
countryDTOList.add(new CountryDTO(
element.getId(),
element.getName()
));
}
return countryDTOList;
}
return null;
}
}
|
// Organization Endpoint
app.get('/organizations', (req, res) => {
const { organization_name } = req.query;
const filter = {};
if(organization_name) {
filter.organization_name = organization_name;
}
Organization.find(filter)
.then(organizations => {
res.json(organizations);
})
.catch(err => {
console.error(err);
res.status(500).json({ message: 'Internal server error' });
});
});
// Person Endpoint
app.get('/people', (req, res) => {
const { first_name, last_name, email } = req.query;
const filter = {};
if(first_name) {
filter.first_name = first_name;
}
if(last_name) {
filter.last_name = last_name;
}
if(email) {
filter.email = email;
}
Person.find(filter)
.then(people => {
res.json(people);
})
.catch(err => {
console.error(err);
res.status(500).json({ message: 'Internal server error' });
});
});
// Jobs Endpoint
app.get('/jobs', (req, res) => {
const { position, company } = req.query;
const filter = {};
if(position) {
filter.position = position;
}
if(company) {
filter.company = company;
}
Job.find(filter)
.then(jobs => {
res.json(jobs);
})
.catch(err => {
console.error(err);
res.status(500).json({ message: 'Internal server error' });
});
}); |
#!/bin/bash
################################################################################
# Copyright 2020 The Magma Authors.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
set -e # exit on any error
USAGE="generateAPIFromSwagger.sh — generates NMS API bindings for swagger spec
Usage:
generateAPIFromSwagger.sh <input> <output>
Options:
<input> Input swagger.yml file to read.
<input> Output file for js bindings.
-h Show this message.
"
help() {
echo "$USAGE"
}
while getopts ':hs:' option; do
case "$option" in
h) echo "$USAGE"
exit
;;
\?) printf "illegal option: -%s\n" "$OPTARG" >&2
echo "$USAGE" >&2
exit 1
;;
esac
done
INPUT=${1:-swagger.yml}
OUTPUT=${2:-generated/MagmaAPIBindings.js}
echo "Input Swagger file: $INPUT";
echo "Output file: $OUTPUT";
TEMP_FILE=$(mktemp)
yarn --silent swagger2js gen "$INPUT" -t flow -c MagmaAPIBindings -b > "$TEMP_FILE"
HEADER='/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow
* @generated
* This file is generated by generateAPIFromSwagger.sh .
*/
'
(echo "$HEADER"; cat "$TEMP_FILE") > $OUTPUT
|
#!/bin/bash
#= sudo_unmount_force.sh
# (c)2021 John@de-Graaff.net
#
BASENAME=`basename $0`
usage() {
echo "# usage: $BASENAME [ -h hostname ] " 1>&2
exit 1
}
#
MYID=$( id -u )
if [ $MYID != 0 ]; then
#echo "# provide your password for 'sudo':" ; sudo "$0" "$*" ; exit 1 ;
echo "# provide your password for 'sudo':" ; sudo "$0" "$@" ; exit 1 ;
fi
#
HOST=$1
#
echo "# > /usr/bin/sudo diskutil unmount force ${HOST} "
/usr/bin/sudo diskutil unmount force ${HOST}
#
exit 0
#
#-EOF
|
<gh_stars>0
package org.bf2.cos.fleetshard.sync;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import org.bf2.cos.fleetshard.sync.client.FleetShardClient;
import org.bf2.cos.fleetshard.sync.connector.ConnectorDeploymentSync;
import org.bf2.cos.fleetshard.sync.connector.ConnectorStatusSync;
@ApplicationScoped
public class FleetShardSync {
@Inject
FleetShardClient fleetShardClient;
@Inject
ConnectorDeploymentSync deploymentSync;
@Inject
ConnectorStatusSync statusSync;
public void start() {
try {
fleetShardClient.getOrCreateManagedConnectorCluster();
fleetShardClient.start();
deploymentSync.start();
statusSync.start();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void stop() {
try {
deploymentSync.stop();
statusSync.stop();
fleetShardClient.stop();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
#!/bin/bash
initialize_elasticsearch() {
USERNAME=aptible PASSPHRASE=password run-database.sh --initialize
}
wait_for_elasticsearch() {
# We pass the ES_PID via a global variable because we can't rely on
# $(wait_for_elasticsearch) as it would result in orpahning the ES process
# (which makes us unable to `wait` it).
run-database.sh "$@" >> "$ES_LOG" 2>&1 &
ES_PID="$!"
for _ in $(seq 1 60); do
if grep -q "started" "$ES_LOG" 2>/dev/null; then
return 0
fi
sleep 1
done
echo "Database timed out"
return 1
}
start_elasticsearch() {
initialize_elasticsearch
wait_for_elasticsearch
}
setup() {
export OLD_DATA_DIRECTORY="$DATA_DIRECTORY"
export OLD_SSL_DIRECTORY="$SSL_DIRECTORY"
export DATA_DIRECTORY=/tmp/datadir
export SSL_DIRECTORY=/tmp/ssldir
export ES_LOG="$BATS_TEST_DIRNAME/elasticsearch.log"
rm -rf "$DATA_DIRECTORY"
rm -rf "$SSL_DIRECTORY"
mkdir -p "$DATA_DIRECTORY"
mkdir -p "$SSL_DIRECTORY"
}
shutdown_elasticsearch() {
JAVA_PID=$(pgrep java) || return 0
run pkill java
while [ -n "$JAVA_PID" ] && [ -e "/proc/${JAVA_PID}" ]; do sleep 0.1; done
}
teardown() {
shutdown_elasticsearch
export DATA_DIRECTORY="$OLD_DATA_DIRECTORY"
export SSL_DIRECTORY="$OLD_SSL_DIRECTORY"
unset OLD_DATA_DIRECTORY
unset OLD_SSL_DIRECTORY
echo "---- BEGIN LOGS ----"
cat "$ES_LOG" || true
echo "---- END LOGS ----"
rm -f "$ES_LOG"
}
|
#!/bin/bash
set -e
#
# read common settings
source ./config.sh
echo
echo "==== $0: Feature Flags Information:"
echo "SLACK_ENABLE: ${SLACK_ENABLE} - controls if Slack is used for alerting. Slack account is required."
echo "GRAFANA_CLOUD_ENABLE: ${GRAFANA_CLOUD_ENABLE} - controls if grafana-cloud is deployed. Grafana Cloud account is required."
echo "GOLDILOCKS_ENABLE: ${GOLDILOCKS_ENABLE} - controls if goldilocks is deployed."
echo "KUBERNETES_DASHBOARD_ENABLE: ${KUBERNETES_DASHBOARD_ENABLE} - controls if Kubernetes Dashboard is deployed."
echo "KEDA_ENABLE: ${KEDA_ENABLE} - controls if keda is deployed."
echo "RESOURCEPATCH: ${RESOURCEPATCH} - controls if resource settings are patched everywhere."
echo "configuration is defined in ./config.sh"
#
# remove cluster if it exists
if [[ ! -z $(k3d cluster list | grep "^${CLUSTER}") ]]; then
echo
echo "==== $0: remove existing cluster"
read -p "K3D cluster \"${CLUSTER}\" exists. Ok to delete it and restart? (y/n) " -n 1 -r
echo
if [[ ! ${REPLY} =~ ^[Yy]$ ]]; then
echo "bailing out..."
exit 1
fi
k3d cluster delete ${CLUSTER}
fi
echo
echo "==== $0: Create new cluster ${CLUSTER} for app ${APP}:${VERSION}"
if [ ${SLACK_ENABLE} == "yes" ]; then
echo -n "sending Slack message to announce the setup..."
./slack.sh "Cluster ${CLUSTER} setup in progress...."
fi
cat k3d-config.yaml.template | envsubst "${ENVSUBSTVAR}" > /tmp/k3d-config.yaml
k3d cluster create --config /tmp/k3d-config.yaml
rm /tmp/k3d-config.yaml
export KUBECONFIG=$(k3d kubeconfig write ${CLUSTER})
echo "export KUBECONFIG=${KUBECONFIG}"
#
# Patch resources when requested for metrics server and local path provider
if [ "${RESOURCEPATCH}" == "yes" ]; then
echo
echo "==== $0: Patching coredns resource settings into the Deployment"
kubectl rollout status deployment.apps coredns -n kube-system --request-timeout 5m
kubectl patch deployment coredns -n kube-system -p '{"spec":{"template":{"spec":{"containers":[{"name":"coredns","resources":{"limits":{"cpu":"300m","memory":"200M"},"requests":{"cpu":"100m","memory":"70M"}}}]}}}}'
echo
echo "==== $0: Patching metrics server resource settings into the Deployment"
kubectl rollout status deployment.apps metrics-server -n kube-system --request-timeout 5m
kubectl patch deployment metrics-server -n kube-system -p '{"spec":{"template":{"spec":{"containers":[{"name":"metrics-server","resources":{"limits":{"cpu":"250m","memory":"300M"},"requests":{"cpu":"15m","memory":"100M"}}}]}}}}'
echo
echo "==== $0: Patching local-path-provisioner resource settings into the Deployment"
kubectl rollout status deployment.apps local-path-provisioner -n kube-system --request-timeout 5m
kubectl patch deployment local-path-provisioner -n kube-system -p '{"spec":{"template":{"spec":{"containers":[{"name":"local-path-provisioner","resources":{"limits":{"cpu":"50m","memory":"160M"},"requests":{"cpu":"10m","memory":"80M"}}}]}}}}'
echo
echo "==== $0: Wait for metrics server and local path provisioner to be ready."
kubectl rollout status deployment.apps coredns -n kube-system --request-timeout 5m
kubectl rollout status deployment.apps local-path-provisioner -n kube-system --request-timeout 5m
kubectl rollout status deployment.apps metrics-server -n kube-system --request-timeout 5m
fi
echo
echo "==== $0: Loading helm repositories"
helm repo add prometheus-community https://prometheus-community.github.io/helm-charts # prometheus, etc
helm repo add fluent https://fluent.github.io/helm-charts # fluentbit
helm repo add influxdata https://helm.influxdata.com/ # influxdb v1
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx # ingress-nginx
helm repo add fairwinds-stable https://charts.fairwinds.com/stable # Golidlocks
helm repo add kedacore https://kedacore.github.io/charts # Keda
helm repo add kubernetes-dashboard https://kubernetes.github.io/dashboard # kubernetes-dashboard
helm repo update
echo
echo "==== $0: Installing Prometheus CRDs version ${PROMOPERATOR} before installing Prometheus itself"
echo "Note that the Promoperator CRDs must fit to the Kube-Prometheus-Stack helm chart, currently version ${KUBEPROMETHEUSSTACKCHART}."
echo "If you change the versions in ./config.sh, make sure that there is compatibility by looking at the helm chart release notes."
BASE="https://raw.githubusercontent.com/prometheus-operator/prometheus-operator/v${PROMOPERATOR}/example/prometheus-operator-crd"
CRDS="alertmanagerconfigs alertmanagers podmonitors probes prometheuses prometheusrules servicemonitors thanosrulers"
for crd in ${CRDS}
do
kubectl create -f ${BASE}/monitoring.coreos.com_${crd}.yaml
done
#
# deploy ingress-nginx
./ingress-nginx-deploy.sh
#
# deploy kubernetes dashboard
if [ "${KUBERNETES_DASHBOARD_ENABLE}" == "yes" ]; then
#
# deploy Kubernetes Dashboard
./kubernetes-dashboard-deploy.sh
#
# get the token for display at the end
SECRET=$(kubectl get secret -n kubernetes-dashboard | grep kubernetes-dashboard-token- | cut -d " " -f 1)
TOKEN=$(kubectl -n kubernetes-dashboard describe secret ${SECRET} | grep ^token | cut -d " " -f 7)
fi
#
# undeploy and deploy influxdb
./influxdb-deploy.sh
#
# undeploy and deploy fluentbit
./fluentbit-deploy.sh
#
# build and deploy the application
./app-deploy.sh
#
# deploy prometheus/alertmanager/grafana
./prom-deploy.sh
#
# deploy grafana cloud agent
[ "${GRAFANA_CLOUD_ENABLE}" == "yes" ] && ./grafana-cloud-deploy.sh
#
# deploy golidlock
[ "${GOLDILOCKS_ENABLE}" == "yes" ] && ./goldilocks-deploy.sh
#
# deploy keda
[ "${KEDA_ENABLE}" == "yes" ] && ./keda-deploy.sh
#
# generate a little random traffic. The app should be ready in the meantime to receive calls.
./app-traffic.sh 4 1 1 # four calls with delay between 1 and 2 seconds between calls
echo
echo "==== $0: Various information"
if [ ${SLACK_ENABLE} == "yes" ]; then
echo -n "Sending Slack message to announce deployment. "
./slack.sh "Cluster ${CLUSTER} running."
fi
echo "export KUBECONFIG=${KUBECONFIG}"
if [ "${KUBERNETES_DASHBOARD_ENABLE}" == "yes" ]; then
echo "kubernetes dashboard:"
echo " visit http://localhost:${HTTPPORT}/dashboard/#/workloads?namespace=_all"
echo " use token: ${TOKEN}"
fi
echo "Lens metrics setting: monitoring/prom-kube-prometheus-stack-prometheus:9090/prom"
echo "${APP} info API: http://localhost:${HTTPPORT}/service/info"
echo "${APP} random API: http://localhost:${HTTPPORT}/service/random"
echo "${APP} metrics API: http://localhost:${HTTPPORT}/service/metrics"
echo "influxdb ui: http://localhost:${INFLUXUIPORT} (configure influx server at http://localhost:${INFLUXPORT})"
[ "${GOLDILOCKS_ENABLE}" == "yes" ] && echo "goldilocks: http://localhost:${GOLDILOCKSPORT}"
echo "prometheus: http://localhost:${HTTPPORT}/prom/targets"
echo "grafana: http://localhost:${HTTPPORT}/?orgId=1 (use admin/${GRAFANA_LOCAL_ADMIN_PASS} to login)"
echo "alertmanager: http://localhost:${HTTPPORT}/alert"
if [ "${GRAFANA_CLOUD_ENABLE}" == "yes" ]; then
echo "grafanacloud portal: https://grafana.com/orgs/${GRAFANA_CLOUD_ORG}"
echo "grafana cloud instance: https://${GRAFANA_CLOUD_ORG}.grafana.net"
fi
[ -x ${AMTOOL} ] && sleep 4 && echo -n "Alertmanager " && ${AMTOOL} config routes
|
#include<iostream>
using namespace std;
// Function that generates an array of prime numbers in the given range
int* primeNumberGenerator(int n){
int *primeNumberArray = new int[n-1];
int index = 0;
// Traversing the array
for (int i=2; i<=n; i++) {
// Check if the number is prime or not
bool isPrime = true;
for (int j=2; j*j<=i; j++) {
if (i % j == 0) {
isPrime = false;
break;
}
}
// If the number is prime, add it to the array
if (isPrime) {
primeNumberArray[index] = i;
index++;
}
}
return primeNumberArray;
}
// Driver code
int main(){
int n;
// Input range to generate prime numbers
cin >> n;
// Find prime numbers in the range
int* primeArray = primeNumberGenerator(n);
// Print array
for (int i=0; i<n-1; i++)
cout << primeArray[i] << " ";
return 0;
} |
#!/bin/bash
KERNEL=`uname -r`
BENCH=hackbench
OUTPUT=output.cyc/$BENCH
mkdir -p $OUTPUT
hackbench -s 512 -l 1024 -P > $OUTPUT/$BENCH.out &
cyclictest -mnq -p 90 -h 1000 -i 1000 -l 10000 > $OUTPUT/$BENCH.cyc.out &
ps > $OUTPUT/$BENCH.ps
sleep 1
insmod /lib/modules/$KERNEL/extra/sched_profiler.ko
sleep 3
cat /proc/sched_profiler > $OUTPUT/$BENCH.prof
fg
fg
|
#!/bin/bash
# ========== Experiment Seq. Idx. 1156 / 20.0.4.0 / N. 0 - _S=20.0.4.0 D1_N=4 a=-1 b=-1 c=1 d=-1 e=-1 f=1 D3_N=7 g=1 h=1 i=1 D4_N=2 j=2 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 1156 / 20.0.4.0 / N. 0 - _S=20.0.4.0 D1_N=4 a=-1 b=-1 c=1 d=-1 e=-1 f=1 D3_N=7 g=1 h=1 i=1 D4_N=2 j=2 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "Yes" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.4.layer.7.test.2.index.1156.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "4;7;2;" >> "$METRICS_PATH"
echo -n "-1;-1;1;-1;-1;1;1;1;1;2;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
<filename>lang/py/cookbook/v2/19/mytee.py
#! /usr/bin/env python
# -*- coding:UTF-8 -*-
import collections
import itertools
def tee1(iterable):
def yield_with_cache(next, cache={}): # cache只对相邻的一次迭代起作用?
pop = cache.pop
for i in itertools.count():
try:
yield pop(i)
except KeyError:
cache[i] = next()
yield cachep[i]
it = iter(iterable)
return yield_with_cache(it.next), yield_with_cache(it.next)
def tee2(iterable):
def yield_with_cache(it, cache=collections.deque):
while True:
if cache:
yield cache.popleft()
else:
result = it.next()
cache.append(result)
yield result
it = iter(iterable)
return yield_with_cache(it), yield_with_cache(it)
|
#!/bin/bash
# Load shared libraries from /opt/vc/lib
echo /opt/vc/lib > /etc/ld.so.conf.d/00-vmcs.conf
ldconfig
# Link /opt/vc/bin binaries to /usr/bin
ln -s /opt/vc/bin/raspividyuv /usr/bin/raspividyuv
ln -s /opt/vc/bin/dtmerge /usr/bin/dtmerge
ln -s /opt/vc/bin/raspistill /usr/bin/raspistill
ln -s /opt/vc/bin/vcgencmd /usr/bin/vcgencmd
ln -s /opt/vc/bin/vcdbg /usr/bin/vcdbg
ln -s /opt/vc/bin/dtoverlay-pre /usr/bin/dtoverlay-pre
ln -s /opt/vc/bin/raspiyuv /usr/bin/raspiyuv
ln -s /opt/vc/bin/vchiq_test /usr/bin/vchiq_test
ln -s /opt/vc/bin/tvservice /usr/bin/tvservice
ln -s /opt/vc/bin/edidparser /usr/bin/edidparser
ln -s /opt/vc/bin/raspivid /usr/bin/raspivid
ln -s /opt/vc/bin/dtoverlay-post /usr/bin/dtoverlay-post
ln -s /opt/vc/bin/dtoverlay /usr/bin/dtoverlay
ln -s /opt/vc/bin/dtparam /usr/bin/dtparam
# Insert Docker Host hostname into raspbian.conf
RASPBIAN=/etc/rpimonitor/template/raspbian.conf
DOCKERHOST=$(cat /dockerhost/etc/hostname)
if grep "web.page.menutitle" $RASPBIAN; then
sed -i "s/'+data.hostname+'/$DOCKERHOST/g" $RASPBIAN
else
echo "web.page.menutitle='RPi-Monitor <sub>($DOCKERHOST)</sub>'" >> $RASPBIAN
echo "web.page.pagetitle='RPi-Monitor ($DOCKERHOST)'" >> $RASPBIAN
fi
# Update RPI Monitor Package Status
/etc/init.d/rpimonitor install_auto_package_status_update
/usr/share/rpimonitor/scripts/updatePackagesStatus.pl
# Start RPI Monitor
/usr/bin/rpimonitord -v
|
pkg_name=R
pkg_origin=core
pkg_version="3.5.0"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=('GPL-2.0+')
pkg_source="https://cran.r-project.org/src/base/R-3/${pkg_name}-${pkg_version}.tar.gz"
pkg_shasum="fd1725535e21797d3d9fea8963d99be0ba4c3aecadcf081b43e261458b416870"
pkg_upstream_url="https://www.r-project.org"
pkg_description="R is a free software environment for statistical computing and graphics."
pkg_build_deps=(
core/coreutils
core/diffutils
core/file
core/make
core/perl
core/pkg-config
core/texinfo
)
pkg_deps=(
core/bzip2
core/cairo
core/curl
core/gcc
core/harfbuzz
core/icu
core/expat
core/fontconfig
core/freetype
core/glib
core/libjpeg-turbo
core/liberation-fonts-ttf
core/libpng
core/libtiff
core/pango
core/pcre
core/pixman
core/readline
core/xz
core/zlib
)
pkg_bin_dirs=(lib64/R/bin)
pkg_include_dirs=(lib64/R/include)
pkg_lib_dirs=(lib64/R/lib)
do_build() {
sed -i '/#include.*<cairo-xlib.h>/d' ./configure
./configure --prefix="${pkg_prefix}" \
--with-x=no \
--disable-java \
--enable-memory-profiling
make
}
do_check() {
make test
}
|
<reponame>superhawk610/site
import React from 'react';
import { Link } from 'gatsby';
import styled from 'styled-components';
import owl from '../images/owl.png';
import ThemeToggle from '../components/theme-toggle';
import Spacer from './spacer';
import { breakpoints } from '../constants';
const Sidebar = () => (
<Container>
<Link to="/" className="logo">
<img src={owl} aria-hidden="true" />
</Link>
<Link to="/">home</Link>
<a href="https://github.com/superhawk610">github</a>
<a href="https://twitter.com/superhawk610">twitter</a>
<a href="mailto:<EMAIL>">inquiries</a>
{/* <Link to="/resume">resume</Link> */}
{/* <Link to="/portfolio">portfolio</Link> */}
{/* TODO: add hamburger menu on mobile */}
<Spacer />
<ThemeToggle />
</Container>
);
const Container = styled.section`
position: sticky;
top: 2rem;
display: flex;
flex-direction: column;
align-items: center;
justify-content: flex-start;
flex: 0 0 250px;
&::after {
content: '';
display: block;
position: absolute;
top: 1.5rem;
right: 0;
width: 2px;
height: 400px;
background: ${props => props.theme.divider};
}
.logo {
margin: 0;
padding: 0;
border: 0;
> img {
margin: 2rem 0;
height: 72px;
}
}
> a {
font-weight: 700;
margin-bottom: 5px;
}
@media screen and (max-width: ${breakpoints.tablet}) {
position: relative;
top: auto;
padding: 0 2rem;
flex: 1;
flex-direction: row;
align-items: center;
justify-content: space-between;
.logo > img {
margin: 0;
height: auto;
width: 60px;
}
&::after {
display: none;
}
> a:not(.logo) {
display: none;
}
}
@media screen and (max-width: ${breakpoints.mobile}) {
padding: 0;
padding-bottom: 2rem;
}
`;
export default Sidebar;
|
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[40],{
/***/ "./node_modules/@react-page/ui/lib-es/ThemeProvider/index.js":
/*!*******************************************************************!*\
!*** ./node_modules/@react-page/ui/lib-es/ThemeProvider/index.js ***!
\*******************************************************************/
/*! exports provided: darkTheme, default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "./node_modules/react/index.js");
/* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _material_ui_core_styles__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @material-ui/core/styles */ "./node_modules/@material-ui/core/esm/styles/index.js");
/* harmony import */ var _material_ui_styles__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @material-ui/styles */ "./node_modules/@material-ui/styles/esm/index.js");
/* harmony import */ var _DarkTheme_index__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./DarkTheme/index */ "./node_modules/@react-page/ui/lib-es/ThemeProvider/DarkTheme/index.js");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "darkTheme", function() { return _DarkTheme_index__WEBPACK_IMPORTED_MODULE_3__["default"]; });
/* harmony import */ var _themeOptions__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./themeOptions */ "./node_modules/@react-page/ui/lib-es/ThemeProvider/themeOptions.js");
var __extends = (undefined && undefined.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
/*
* This file is part of ORY Editor.
*
* ORY Editor is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORY Editor is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with ORY Editor. If not, see <http://www.gnu.org/licenses/>.
*
* @license LGPL-3.0
* @copyright 2016-2018 <NAME>
* @author <NAME> <<EMAIL>>
*
*/
var generateClassName = Object(_material_ui_styles__WEBPACK_IMPORTED_MODULE_2__["createGenerateClassName"])({
disableGlobal: true,
productionPrefix: 'ory',
});
var theme = Object(_material_ui_core_styles__WEBPACK_IMPORTED_MODULE_1__["createMuiTheme"])(_themeOptions__WEBPACK_IMPORTED_MODULE_4__["themeOptions"]);
var ThemeProvider = /** @class */ (function (_super) {
__extends(ThemeProvider, _super);
function ThemeProvider() {
return _super !== null && _super.apply(this, arguments) || this;
}
ThemeProvider.prototype.render = function () {
return (react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_styles__WEBPACK_IMPORTED_MODULE_2__["StylesProvider"], { injectFirst: true, generateClassName: generateClassName },
react__WEBPACK_IMPORTED_MODULE_0__["createElement"](_material_ui_styles__WEBPACK_IMPORTED_MODULE_2__["ThemeProvider"], { theme: this.props.theme || theme }, this.props.children)));
};
return ThemeProvider;
}(react__WEBPACK_IMPORTED_MODULE_0__["Component"]));
/* harmony default export */ __webpack_exports__["default"] = (ThemeProvider);
//# sourceMappingURL=index.js.map
/***/ }),
/***/ "./node_modules/@react-page/ui/lib-es/ThemeProvider/themeOptions.js":
/*!**************************************************************************!*\
!*** ./node_modules/@react-page/ui/lib-es/ThemeProvider/themeOptions.js ***!
\**************************************************************************/
/*! exports provided: themeOptions */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "themeOptions", function() { return themeOptions; });
var themeOptions = {};
//# sourceMappingURL=themeOptions.js.map
/***/ })
}]); |
python CODE/run_csqa_task.py\
--task_name OMWKCS_MultiSourceFusion\
--mission predict\
--fp16 0\
--seed 42\
--gpu_ids 5\
--evltest_batch_size 12\
--processor_batch_size 24\
--eval_after_tacc 0.79\
\
--without_PTM\
--model_list Origin WKDT OMCS\
--max_seq_len 140\
--max_qa_len 58\
--WKDT_version 5.0\
--max_desc_len 45\
--OMCS_version 3.0\
--cs_num 3\
\
--dataset_dir /home/zhifli/DATA\
--result_dir /data/zhifli/model_save\
--saved_model_dir /data/zhifli/model_save/albert-base-v2/OMWKCS_MultiSourceFusion/30May-2146_seed42_Origin+WKDT+OMCS_62.00%/\
--encoder_dir_list\
/data/zhifli/model_save/albert-base-v2/Origin_Albert_Baseline/2030-May17_seed5017_58.31/\
/data/zhifli/model_save/albert-base-v2/WKDT_Albert_Baseline/0027-May18_seed5017_wkdtv4.0_59.05/\
/data/zhifli/model_save/albert-base-v2/OMCS_Albert_Baseline/2152-May19_seed42_cs3_omcsv3.0/\
--PTM_model_vocab_dir /home/zhifli/DATA/transformers-models/albert-base-v2 |
import React from 'react'
import Screenshots from './screenshots'
import { Th, Td, Table } from '~components/common/table'
import { FormatNumber, FormatDate } from '~components/utils/format'
import Timezone from '~components/common/timezone'
import stateHistoryStyle from './state-history.module.scss'
export default ({ history, screenshots }) => (
<Table>
<thead>
<tr>
<Th alignLeft>Date</Th>
<Th alignLeft>
Screenshots (<Timezone />)
</Th>
<Th>New Tests</Th>
<Th>Cases</Th>
<Th>Negative</Th>
<Th>Pending</Th>
<Th>Hospitalized</Th>
<Th>Deaths</Th>
<Th>Total</Th>
</tr>
</thead>
<tbody className={`state-history-table ${stateHistoryStyle.history}`}>
{history.map(node => (
<tr key={`history-${node.date}`}>
<Td alignLeft>
<FormatDate
date={node.date}
format="ccc LLL d yyyy"
timezone={false}
/>
</Td>
<Td alignLeft>
<Screenshots date={node.date} screenshots={screenshots} />
</Td>
<Td>
<FormatNumber number={node.totalTestResultsIncrease} />
</Td>
<Td>
<FormatNumber number={node.positive} />
</Td>
<Td>
<FormatNumber number={node.negative} />
</Td>
<Td>
<FormatNumber number={node.pending} />
</Td>
<Td>
<FormatNumber number={node.hospitalized} />
</Td>
<Td>
<FormatNumber number={node.death} />
</Td>
<Td>
<FormatNumber number={node.totalTestResults} />
</Td>
</tr>
))}
</tbody>
</Table>
)
|
-- phpMyAdmin SQL Dump
-- version 5.0.4
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: Dec 30, 2020 at 12:35 PM
-- Server version: 10.5.8-MariaDB-log
-- PHP Version: 7.4.13
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `crud_pdo`
--
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`user_id` int(11) NOT NULL,
`user_name` varchar(50) NOT NULL,
`user_email` varchar(100) NOT NULL,
`user_password` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`user_id`, `user_name`, `user_email`, `user_password`) VALUES
(1, 'Mark', '<EMAIL>', '<PASSWORD>'),
(2, 'Wolpe', '<EMAIL>', '<PASSWORD>'),
(5, 'John', '<EMAIL>', 'SECRET'),
(6, 'Mayer', '<EMAIL>', 'SECRET'),
(7, 'Micky', '<EMAIL>', 'SECRET');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`user_id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `user_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
set -eo nounset
cd /mnt/lfs/sources
rm -rf texinfo-6.5
tar xf texinfo-6.5.tar.xz
pushd texinfo-6.5
./configure --prefix=/tools
make
#make check
make install
popd
rm -rf texinfo-6.5
|
<reponame>geopm/geopm.github.io<filename>dox/search/variables_4.js
var searchData=
[
['t',['t',['../structgeopm__time__s.html#acb1eaa1bbc844a3c6853b5849fb1c8a6',1,'geopm_time_s']]]
];
|
#!/bin/sh
if ! (mypy --show-error-codes \
--ignore-missing-imports \
--disallow-untyped-defs \
--disable-error-code override \
--disable-error-code attr-defined peekingduck); then
echo "TYPE CHECK FAIL"
exit 123
else
echo "TYPE CHECK SUCCESS!!"
fi |
<filename>src/scene/pick.js
import { ADDRESS_CLAMP_TO_EDGE, CLEARFLAG_COLOR, CLEARFLAG_DEPTH, FILTER_NEAREST, PIXELFORMAT_R8_G8_B8_A8 } from '../graphics/graphics.js';
import { GraphicsDevice } from '../graphics/device.js';
import { RenderTarget } from '../graphics/render-target.js';
import { Texture } from '../graphics/texture.js';
import { ASPECT_MANUAL, SHADER_PICK, SORTMODE_NONE } from './constants.js';
import { Camera } from './camera.js';
import { Command } from './mesh-instance.js';
import { Layer } from './layer.js';
import { LayerComposition } from './layer-composition.js';
import { Application } from '../framework/application.js';
var _deviceDeprecationWarning = false;
var _getSelectionDeprecationWarning = false;
var _prepareDeprecationWarning = false;
/**
* @class
* @name pc.Picker
* @classdesc Picker object used to select mesh instances from screen coordinates.
* @description Create a new instance of a Picker object.
* @param {pc.Application} app - The application managing this picker instance.
* @param {number} width - The width of the pick buffer in pixels.
* @param {number} height - The height of the pick buffer in pixels.
* @property {number} width Width of the pick buffer in pixels (read-only).
* @property {number} height Height of the pick buffer in pixels (read-only).
* @property {pc.RenderTarget} renderTarget The render target used by the picker internally (read-only).
*/
function Picker(app, width, height) {
if (app instanceof GraphicsDevice) {
app = Application.getApplication();
if (!_deviceDeprecationWarning) {
_deviceDeprecationWarning = true;
// #ifdef DEBUG
console.warn("pc.Picker now takes pc.Application as first argument. Passing pc.GraphicsDevice is deprecated.");
// #endif
}
}
this.app = app;
this.device = app.graphicsDevice;
var device = this.device;
this.library = device.getProgramLibrary();
this.pickColor = new Float32Array(4);
this.pickColor[3] = 1;
this.scene = null;
this.drawCalls = [];
this.layer = null;
this.layerComp = null;
this.clearOptions = {
color: [1, 1, 1, 1],
depth: 1,
flags: CLEARFLAG_COLOR | CLEARFLAG_DEPTH
};
var self = this;
this._clearDepthOptions = {
depth: 1.0,
flags: CLEARFLAG_DEPTH
};
this.clearDepthCommand = new Command(0, 0, function (){
device.clear(self._clearDepthOptions);
});
this.resize(width, height);
this._ignoreOpacityFor = null; // meshInstance
}
/**
* @function
* @name pc.Picker#getSelection
* @description Return the list of mesh instances selected by the specified rectangle in the
* previously prepared pick buffer.The rectangle using top-left coordinate system.
* @param {number} x - The left edge of the rectangle.
* @param {number} y - The top edge of the rectangle.
* @param {number} [width] - The width of the rectangle.
* @param {number} [height] - The height of the rectangle.
* @returns {pc.MeshInstance[]} An array of mesh instances that are in the selection.
* @example
* // Get the selection at the point (10,20)
* var selection = picker.getSelection(10, 20);
* @example
* // Get all models in rectangle with corners at (10,20) and (20,40)
* var selection = picker.getSelection(10, 20, 10, 20);
*/
Picker.prototype.getSelection = function (x, y, width, height) {
var device = this.device;
if (typeof x === 'object') {
// #ifdef DEBUG
if (!_prepareDeprecationWarning) {
_prepareDeprecationWarning = true;
console.warn("Picker.getSelection:param 'rect' is deprecated, use 'x, y, width, height' instead.");
}
// #endif
var rect = x;
x = rect.x;
y = rect.y;
width = rect.width;
height = rect.height;
} else {
y = this.layer.renderTarget.height - (y + (height || 1));
}
width = width || 1;
height = height || 1;
// Cache active render target
var prevRenderTarget = device.renderTarget;
// Ready the device for rendering to the pick buffer
device.setRenderTarget(this.layer.renderTarget);
device.updateBegin();
var pixels = new Uint8Array(4 * width * height);
device.readPixels(x, y, width, height, pixels);
device.updateEnd();
// Restore render target
device.setRenderTarget(prevRenderTarget);
var selection = [];
var drawCalls = this.layer.instances.visibleOpaque[0].list;
var r, g, b, index;
for (var i = 0; i < width * height; i++) {
r = pixels[4 * i + 0];
g = pixels[4 * i + 1];
b = pixels[4 * i + 2];
index = r << 16 | g << 8 | b;
// White is 'no selection'
if (index !== 0xffffff) {
var selectedMeshInstance = drawCalls[index];
if (selection.indexOf(selectedMeshInstance) === -1) {
selection.push(selectedMeshInstance);
}
}
}
return selection;
};
/**
* @function
* @name pc.Picker#prepare
* @description Primes the pick buffer with a rendering of the specified models from the point of view
* of the supplied camera. Once the pick buffer has been prepared, pc.Picker#getSelection can be
* called multiple times on the same picker object. Therefore, if the models or camera do not change
* in any way, pc.Picker#prepare does not need to be called again.
* @param {pc.CameraComponent} camera - The camera component used to render the scene.
* @param {pc.Scene} scene - The scene containing the pickable mesh instances.
* @param {pc.Layer|pc.RenderTarget} [arg] - Layer or RenderTarget from which objects will be picked. If not supplied, all layers rendering to backbuffer before this layer will be used.
*/
Picker.prototype.prepare = function (camera, scene, arg) {
var device = this.device;
var i, j;
var self = this;
if (camera instanceof Camera) {
// #ifdef DEBUG
if (!_getSelectionDeprecationWarning) {
_getSelectionDeprecationWarning = true;
console.warn("pc.Picker#prepare now takes pc.CameraComponent as first argument. Passing pc.Camera is deprecated.");
}
// #endif
// Get the camera component
camera = camera.node.camera;
}
this.scene = scene;
var sourceLayer = null;
var sourceRt = null;
if (arg instanceof Layer) {
sourceLayer = arg;
} else {
sourceRt = arg;
}
// Setup picker rendering once
if (!this.layer) {
var pickColorId = device.scope.resolve('uColor');
this.layer = new Layer({
name: "Picker",
shaderPass: SHADER_PICK,
opaqueSortMode: SORTMODE_NONE,
onEnable: function () {
if (this.renderTarget) return;
var colorBuffer = new Texture(device, {
format: PIXELFORMAT_R8_G8_B8_A8,
width: self.width,
height: self.height
});
colorBuffer.name = 'pick';
colorBuffer.minFilter = FILTER_NEAREST;
colorBuffer.magFilter = FILTER_NEAREST;
colorBuffer.addressU = ADDRESS_CLAMP_TO_EDGE;
colorBuffer.addressV = ADDRESS_CLAMP_TO_EDGE;
this.renderTarget = new RenderTarget(device, colorBuffer, {
depth: true
});
},
onDisable: function () {
if (!this.renderTarget) return;
this.renderTarget._colorBuffer.destroy();
this.renderTarget.destroy();
this.renderTarget = null;
},
onDrawCall: function (meshInstance, index) {
self.pickColor[0] = ((index >> 16) & 0xff) / 255;
self.pickColor[1] = ((index >> 8) & 0xff) / 255;
self.pickColor[2] = (index & 0xff) / 255;
pickColorId.setValue(self.pickColor);
device.setBlending(false);
}
});
this.layerComp = new LayerComposition();
this.layerComp.pushOpaque(this.layer);
this.meshInstances = this.layer.opaqueMeshInstances;
this._instancesVersion = -1;
}
// Collect pickable mesh instances
var instanceList, instanceListLength, drawCall;
if (!sourceLayer) {
this.layer.clearMeshInstances();
var layers = scene.layers.layerList;
var subLayerEnabled = scene.layers.subLayerEnabled;
var isTransparent = scene.layers.subLayerList;
var layer;
var layerCamId, transparent;
for (i = 0; i < layers.length; i++) {
if (layers[i].overrideClear && layers[i]._clearDepthBuffer) layers[i]._pickerCleared = false;
}
for (i = 0; i < layers.length; i++) {
layer = layers[i];
if (layer.renderTarget !== sourceRt || !layer.enabled || !subLayerEnabled[i]) continue;
layerCamId = layer.cameras.indexOf(camera);
if (layerCamId < 0) continue;
if (layer.overrideClear && layer._clearDepthBuffer && !layer._pickerCleared) {
this.meshInstances.push(this.clearDepthCommand);
layer._pickerCleared = true;
}
transparent = isTransparent[i];
instanceList = transparent ? layer.instances.transparentMeshInstances : layer.instances.opaqueMeshInstances;
instanceListLength = instanceList.length;
for (j = 0; j < instanceListLength; j++) {
drawCall = instanceList[j];
if (drawCall.pick) {
this.meshInstances.push(drawCall);
}
}
}
} else {
if (this._instancesVersion !== sourceLayer._version) {
this.layer.clearMeshInstances();
instanceList = sourceLayer.instances.opaqueMeshInstances;
instanceListLength = instanceList.length;
for (j = 0; j < instanceListLength; j++) {
drawCall = instanceList[j];
if (drawCall.pick) {
this.meshInstances.push(drawCall);
}
}
instanceList = sourceLayer.instances.transparentMeshInstances;
instanceListLength = instanceList.length;
for (j = 0; j < instanceListLength; j++) {
drawCall = instanceList[j];
if (drawCall.pick) {
this.meshInstances.push(drawCall);
}
}
this._instancesVersion = sourceLayer._version;
}
}
// Setup picker camera if changed
if (this.layer.cameras[0] !== camera) {
this.layer.clearCameras();
this.layer.addCamera(camera);
}
// save old camera state
this.onLayerPreRender(this.layer, sourceLayer, sourceRt);
// Render
this.app.renderer.renderComposition(this.layerComp);
// restore old camera state
this.onLayerPostRender(this.layer);
};
Picker.prototype.onLayerPreRender = function (layer, sourceLayer, sourceRt) {
if (this.width !== layer.renderTarget.width || this.height !== layer.renderTarget.height) {
layer.onDisable();
layer.onEnable();
}
layer.oldClear = layer.cameras[0].camera._clearOptions;
layer.oldAspectMode = layer.cameras[0].aspectRatioMode;
layer.oldAspect = layer.cameras[0].aspectRatio;
layer.cameras[0].camera._clearOptions = this.clearOptions;
layer.cameras[0].aspectRatioMode = ASPECT_MANUAL;
var rt = sourceRt ? sourceRt : (sourceLayer ? sourceLayer.renderTarget : null);
layer.cameras[0].aspectRatio = layer.cameras[0].calculateAspectRatio(rt);
this.app.renderer.updateCameraFrustum(layer.cameras[0].camera);
};
Picker.prototype.onLayerPostRender = function (layer) {
layer.cameras[0].camera._clearOptions = layer.oldClear;
layer.cameras[0].aspectRatioMode = layer.oldAspectMode;
layer.cameras[0].aspectRatio = layer.oldAspect;
};
/**
* @function
* @name pc.Picker#resize
* @description Sets the resolution of the pick buffer. The pick buffer resolution does not need
* to match the resolution of the corresponding frame buffer use for general rendering of the
* 3D scene. However, the lower the resolution of the pick buffer, the less accurate the selection
* results returned by pc.Picker#getSelection. On the other hand, smaller pick buffers will
* yield greater performance, so there is a trade off.
* @param {number} width - The width of the pick buffer in pixels.
* @param {number} height - The height of the pick buffer in pixels.
*/
Picker.prototype.resize = function (width, height) {
this.width = width;
this.height = height;
};
Object.defineProperty(Picker.prototype, 'renderTarget', {
get: function () {
return this.layer.renderTarget;
}
});
export { Picker };
|
<gh_stars>10-100
#pragma once
#include <cstdint>
#include <memory>
#include <map>
#include <string>
#include <vector>
#include "capstone/capstone.h"
#include "analyzers_code/FuncBaseCA.hpp"
class SymResolver;
struct Block;
struct Symbol;
class FortifySourceCA : public FuncBaseCA {
public:
FortifySourceCA(cs_arch arch, cs_mode mode, std::shared_ptr<SymResolver> resolver, bool is_macho = false);
int run(cs_insn insn, const Block *block, const Symbol *call_sym) override;
int process_results() override;
private:
bool check_block(const Block *block, uint64_t stop_addr = 0);
bool check_alt_fort(const Block *block, cs_insn insn);
std::vector<std::string> m_fort_targets;
std::map<std::string, uint64_t> m_fort_funcs;
std::map<std::string, uint64_t> m_unfort_funcs;
bool m_is_macho;
};
|
from synapse.api.errors import SynapseError
class RequestTimedOutError(SynapseError):
"""Exception representing timeout of an outbound request"""
def __init__(self):
super().__init__("Request timed out") |
<reponame>stefb965/JRAW
/**
* Contains classes related to authorizing clients with OAuth2
*/
package net.dean.jraw.http.oauth;
|
<filename>com.dubture.symfony.ui/src/com/dubture/symfony/ui/search/SymfonySearchParticipant.java<gh_stars>10-100
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.ui.search;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.dltk.core.IModelElement;
import org.eclipse.dltk.core.IType;
import org.eclipse.dltk.core.search.IDLTKSearchConstants;
import org.eclipse.dltk.ui.search.ElementQuerySpecification;
import org.eclipse.dltk.ui.search.IMatchPresentation;
import org.eclipse.dltk.ui.search.IQueryParticipant;
import org.eclipse.dltk.ui.search.ISearchRequestor;
import org.eclipse.dltk.ui.search.QuerySpecification;
import org.eclipse.php.internal.core.typeinference.PHPModelUtils;
import org.eclipse.search.core.text.TextSearchEngine;
import org.eclipse.search.core.text.TextSearchMatchAccess;
import org.eclipse.search.core.text.TextSearchRequestor;
import org.eclipse.search.core.text.TextSearchScope;
import org.eclipse.search.ui.text.Match;
import com.dubture.symfony.core.model.Bundle;
import com.dubture.symfony.core.model.SymfonyModelAccess;
/**
* @author <NAME> (zulus)
*/
public class SymfonySearchParticipant implements IQueryParticipant {
@Override
public void search(final ISearchRequestor requestor, final QuerySpecification querySpecification,
IProgressMonitor monitor) throws CoreException {
if (querySpecification.getLimitTo() != IDLTKSearchConstants.REFERENCES
&& querySpecification.getLimitTo() != IDLTKSearchConstants.ALL_OCCURRENCES)
return;
if (querySpecification instanceof ElementQuerySpecification) {
IModelElement element = ((ElementQuerySpecification) querySpecification).getElement();
if (element instanceof IType) {
IType type = (IType) element;
final List<IResource> potentialResources = new ArrayList<IResource>();
potentialResources.add(type.getScriptProject().getProject().getFolder("app/config")); //$NON-NLS-1$
List<Bundle> findBundles = SymfonyModelAccess.getDefault().findBundles(type.getScriptProject());
for (Bundle bundle : findBundles) {
potentialResources.add(ResourcesPlugin.getWorkspace().getRoot()
.getFolder(bundle.getPath().append("Resources").append("config"))); //$NON-NLS-1$ //$NON-NLS-2$
}
TextSearchScope searchScope = TextSearchScope.newSearchScope(
potentialResources.toArray(new IResource[potentialResources.size()]), Pattern.compile(".*"),
true);
Pattern search = TextSearchEngine.createPattern(PHPModelUtils.getFullName(type), false, false);
TextSearchRequestor collector = new TextSearchRequestor() {
public boolean acceptPatternMatch(TextSearchMatchAccess matchAccess) throws CoreException {
IFile file = matchAccess.getFile();
if (matchAccess.getMatchOffset() == 0) {
return true;
}
char fileContentChar = matchAccess.getFileContentChar(matchAccess.getMatchOffset() - 1);
if (!(Character.isWhitespace(fileContentChar) || fileContentChar == '\''
|| fileContentChar == '"' || fileContentChar == ':' || fileContentChar == '>')) {
return false;
}
int end = matchAccess.getMatchOffset() + matchAccess.getMatchLength();
if (end < matchAccess.getFileContentLength()) {
fileContentChar = matchAccess.getFileContentChar(end);
if (!(Character.isWhitespace(fileContentChar) || fileContentChar == '\''
|| fileContentChar == '"' || fileContentChar == '<')) {
return false;
}
}
if (querySpecification.getScope().encloses(file.getFullPath().toString())) {
requestor.reportMatch(
new Match(file, matchAccess.getMatchOffset(), matchAccess.getMatchLength()));
}
return true;
}
};
TextSearchEngine.create().search(searchScope, collector, search, null);
}
}
}
@Override
public int estimateTicks(QuerySpecification specification) {
return 100;
}
@Override
public IMatchPresentation getUIParticipant() {
return null;
}
}
|
docker build -t majest/slim-upload-example .
|
<reponame>alexandremagro/tspga
#ifndef TSPLIB_H_INCLUDED
#define TSPLIB_H_INCLUDED
typedef struct {
char name[256], edge_weight_type[256], edge_weight_format[256];
int size;
double **cost_matrix;
Point *points;
} Map;
void free_map(Map *map);
Map read_tsp_lib(char *file_name);
#endif |
package pulse.io.readers;
import static pulse.properties.NumericProperties.def;
import static pulse.properties.NumericProperties.isValueSensible;
import static pulse.properties.NumericPropertyKeyword.TEST_TEMPERATURE;
import static pulse.properties.NumericPropertyKeyword.findAny;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.StringTokenizer;
import pulse.input.Metadata;
import pulse.properties.Property;
import pulse.ui.Messages;
import pulse.util.ImmutableDataEntry;
import pulse.util.InstanceDescriptor;
/**
* An {@code AbstractPopulator} capable of handling metafiles.
* <p>
* Metafiles are ASCII files storing various experimental parameters for
* different instances of {@code ExperimentalData}. The {@code Metadata (.met)}
* file should be formatted to include a header of arbitrary length, which
* defines global parameters, and a table where a series of metaproperties is
* defined for each laser shot.
* </p>
* <p>
* Metadata for each shot should be recorded during the experiment in a tab
* delimited ASCII format, with a {@code .met} file suffix. Constant data should
* be recorded in tab-separated pairs at the top of the file, such as
* {@code Sample_Name}, {@code Thickness} (of the sample, in mm),
* {@code Diameter} (of the sample, in mm), {@code Spot_Diameter} (diameter of
* laser spot, in mm), {@code TemporalShape} (e.g. {@code TrapezoidalPulse},
* {@code RectangularPulse}) and {@code Detector_Iris}. Two line breaks below, a
* tab-delimited table with headers for variables should contain variable data
* for each shot. These variables should include ID (which should relate to the
* final number of the file name for each shot), Test_Temperature (in deg. C),
* Pulse_Width (the time width of the laser pulse, in ms), {@code Laser_Energy}
* (the energy transmitted by the laser, in J), and Detector_Gain (gain of the
* detector). If any of the “constants” listed above are variable, then they
* should be included in the variable table, and vice versa.
* </p>
* The full list of keywords for the {@code .met} files are listed in the
* {@code NumericPropertyKeyword} enum.
*
* <p>
* An example content of a valid {@code .met} file is provided below.
* </p>
*
* <pre>
* <code>
* Thickness 2.034
* Diameter 9.88
* Spot_Diameter 10.0
*
* Test_Temperature Pulse_Width Spot_Diameter Laser_Energy Detector_Gain TemporalShape Detector_Iris
* 200 200 5 2 31.81 50 TrapezoidalPulse 1
* 201 196 5 2 31.81 100 TrapezoidalPulse 1
* 202 198 5 2 31.81 100 TrapezoidalPulse 1
* 203 199 5 2 31.81 50 TrapezoidalPulse 1
* 204 199 5 2 31.81 50 TrapezoidalPulse 1
* 205 199 5 2 31.81 50 TrapezoidalPulse 1
* 206 200 5 2 31.81 50 TrapezoidalPulse 1
* 207 200 5 2 31.81 50 TrapezoidalPulse 1
* 208 400 5 2 31.81 50 TrapezoidalPulse 1
* 209 400 5 2 31.81 20 TrapezoidalPulse 1
* 210 400 5 2 31.81 10 TrapezoidalPulse 1
* </code>
* </pre>
*
* @see pulse.properties.NumericPropertyKeyword
* @see pulse.problem.laser.PulseTemporalShape
*/
public class MetaFilePopulator implements AbstractPopulator<Metadata> {
private static MetaFilePopulator instance = new MetaFilePopulator();
private final static double TO_KELVIN = 273;
private MetaFilePopulator() {
// intentionally blank
}
/**
* Gets the single instance of this class.
*
* @return a static instance of {@code MetaFilePopulator}.
*/
public static MetaFilePopulator getInstance() {
return instance;
}
@Override
public void populate(File file, Metadata met) throws IOException {
Objects.requireNonNull(file, Messages.getString("MetaFileReader.1")); //$NON-NLS-1$
Map<Integer, String> metaFormat = new HashMap<>();
metaFormat.put(0, "ID"); // id must always be the first entry in the current row
List<String> tokens = new LinkedList<>();
try (BufferedReader reader = new BufferedReader(new FileReader(file))) {
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
tokens.clear();
for (StringTokenizer st = new StringTokenizer(line); st.hasMoreTokens();) {
tokens.add(st.nextToken());
}
int size = tokens.size();
if (size == 2) {
processPair(tokens, met);
} else if (size > 2) {
if (tokens.get(0).equalsIgnoreCase(metaFormat.get(0))) {
for (int i = 1; i < size; i++) {
metaFormat.put(i, tokens.get(i));
}
} else if (Integer.compare(Integer.valueOf(tokens.get(0)), met.getExternalID()) == 0) {
processList(tokens, met, metaFormat);
}
}
}
}
}
private void processPair(List<String> tokens, Metadata met) {
List<ImmutableDataEntry<String, String>> val = new ArrayList<>();
var entry = new ImmutableDataEntry<>(tokens.get(0), tokens.get(1));
val.add(entry);
try {
translate(val, met);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
System.err.println("Error changing property in Metadata object. Details below.");
e.printStackTrace();
}
}
private void processList(List<String> tokens, Metadata met, Map<Integer, String> metaFormat) {
int size = tokens.size();
List<ImmutableDataEntry<String, String>> values = new ArrayList<>(size);
for (int i = 1; i < size; i++) {
values.add(new ImmutableDataEntry<>(metaFormat.get(i), tokens.get(i)));
}
try {
translate(values, met);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
System.err.println("Error changing property in Metadata object. Details below.");
e.printStackTrace();
}
}
private void translate(List<ImmutableDataEntry<String, String>> data, Metadata met)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
for (var dataEntry : data) {
var optional = findAny(dataEntry.getKey());
// numeric properties
if (optional.isPresent()) {
var key = optional.get();
double value = Double.valueOf(dataEntry.getValue());
if (key == TEST_TEMPERATURE) {
value += TO_KELVIN;
}
var proto = def(key);
value /= proto.getDimensionFactor().doubleValue();
if (isValueSensible(proto, value)) {
proto.setValue(value);
met.set(key, proto);
}
} // generic properties
else {
for (Property genericEntry : met.genericProperties()) {
if (genericEntry instanceof InstanceDescriptor
|| dataEntry.getKey().equalsIgnoreCase(genericEntry.getClass().getSimpleName())) {
if (genericEntry.attemptUpdate(dataEntry.getValue())) {
met.updateProperty(instance, genericEntry);
}
}
}
}
}
}
/**
* @return {@code .met}, an internal PULsE meta-file format.
*/
@Override
public String getSupportedExtension() {
return Messages.getString("MetaFileReader.0"); //$NON-NLS-1$
}
}
|
#!/bin/bash
geth --testnet --rpc --rpcapi "net,eth,personal,admin,web3,db" --rpcport 8545 --verbosity 6
|
from rest_framework import serializers
from .models import Product # Assuming the Product model is defined in a separate file
class CustomSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ['id', 'name', 'price', 'description']
name = serializers.CharField(min_length=2, max_length=100)
price = serializers.DecimalField(max_digits=10, decimal_places=2)
description = serializers.CharField(required=False, allow_blank=True) |
#!/bin/sh
# 为 crystal lang 编译secp256k1库。 / Compile the secp256k1 library for crystal lang.
# 源码 / source:https://github.com/syalon/secp256k1-zkp.git
# by syalon
# 1、克隆源代码 / clone source code
SOURCE_CLONE_DIR="secp256k1-zkp"
rm -rf $SOURCE_CLONE_DIR
git clone https://github.com/syalon/secp256k1-zkp.git $SOURCE_CLONE_DIR && cd $SOURCE_CLONE_DIR
# 2、编译 / compile
./autogen.sh
./configure
make
sudo make install # for dynamic linking
# 3、完成
echo "compile done. target dir: $SOURCE_CLONE_DIR, lib dir: $SOURCE_CLONE_DIR/.libs"
|
#!/bin/bash
set -e
## uncomment to generate new baseline; will be created in qa/baselines/ ##
## python inference_perf.py -m Tacotron2 -bs=20 --input-text qa/text_padded.pt --create-benchmark
python inference_perf.py -m Tacotron2 -bs=1 --decoder-no-early-stopping --input-text qa/text_padded.pt
python qa/check_curves.py \
-b qa/tacotron2_fp32-infer-bs1.json \
Tacotron2_infer_BS1_FP32_DGX1_16GB_1GPU_single.json \
-g "iter" \
-k "items_per_sec" \
--eps 0.001 \
--damping 1 \
--sigma 12.0 \
--fail low
|
package com.limpoxe.fairy.core.android;
import android.view.LayoutInflater;
import android.view.View;
import com.limpoxe.fairy.util.RefInvoker;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.Map;
public class HackLayoutInflater {
private static final String ClassName = "android.view.LayoutInflater";
private static final String Field_sConstructorMap = "sConstructorMap";
private static final String Method_setPrivateFactory = "setPrivateFactory";
private Object instance;
public HackLayoutInflater(LayoutInflater instance) {
this.instance = instance;
}
public static Map getConstructorMap() {
return (Map)RefInvoker.getField(null, ClassName, Field_sConstructorMap);
}
public static void setConstructorMap(Map map) {
RefInvoker.setField(null, ClassName, Field_sConstructorMap, map);
}
public void setPrivateFactory(Object factory) {
RefInvoker.invokeMethod(instance, ClassName, Method_setPrivateFactory, new Class[]{LayoutInflater.Factory2.class}, new Object[]{factory});
}
private static final HashMap<String, Constructor<? extends View>> sConstructorMap =
new HashMap<String, Constructor<? extends View>>();
/**
* 添加这个方法的原因是,LayoutInflater类中有一个HashMap类型的全局静态View构造器缓存
* 这个缓存在下列场景下会引起一个问题:
* 1、插件是独立插件,包含supportV7包,则会包含一系列自定义View,比如android.support.v7.internal.widget.ActionBarOverlayLayout
* 2、宿主也包含supportV7包, 则也会包含一系列自定义View, 比如android.support.v7.internal.widget.ActionBarOverlayLayout
* 3、打开宿主的一个supportv7页面时,会通过LayoutInflater构造上述自定义控件,
* 此时LayoutInflater会先查找缓存的控件构造器,没有找到,再通过宿主classloader加载控件的构造器。再将构造器缓存。
* 4、再打开插件的一个supportv7页面,会通过LayoutInflater构造上述自定义控件,
* 此时LayoutInflater会先查找缓存的控件构造器,如果没有找到,再通过插件classloader加载控件的构造器。再将构造器缓存。
* 但是!!此时在缓存中查找构造器时,必然会找到宿主缓存的构造器。然后就会构造出一个来自宿主的同名自定义控件。
* 导致插件中包含的同名自定义控件被屏蔽了。
*
*/
public static void installPluginCustomViewConstructorCache() {
Map cache = getConstructorMap();
if (cache != null) {
ConstructorHashMap<String, Constructor<? extends View>> newCacheMap = new ConstructorHashMap<String, Constructor<? extends View>>();
newCacheMap.putAll(cache);
setConstructorMap(newCacheMap);
}
}
public static class ConstructorHashMap<K, V> extends HashMap<K, V> {
@Override
public V put(K key, V value) {
if (systemClassloader == null) {
systemClassloader = HackLayoutInflater.class.getClassLoader().getParent();
}
Constructor<? extends View> constructor = (Constructor<? extends View>)value;
// 如果是系统控件,才缓存。如果是自定义控件,无论是来自插件还是来自宿主,都不缓存
// 如果app里面使用大量自定义控件,可能会稍微影响效率
if (constructor.getDeclaringClass().getClassLoader() == systemClassloader) {
return super.put(key, value);
} else {
return super.put(key, null);
}
}
}
private static ClassLoader systemClassloader;
}
|
type FileHandle = string;
class FileManager implements FileManagerInterface {
private openFiles: Map<FileHandle, string>;
constructor() {
this.openFiles = new Map();
}
openFile(fileName: string): FileHandle {
const fileHandle = `file_${Date.now()}_${Math.random()}`;
this.openFiles.set(fileHandle, fileName);
return fileHandle;
}
closeFile(fileHandle: FileHandle): void {
if (this.openFiles.has(fileHandle)) {
this.openFiles.delete(fileHandle);
} else {
console.error(`Invalid file handle: ${fileHandle}`);
}
}
}
// Test the FileManager class
const fileManager = new FileManager();
const file1 = fileManager.openFile("example.txt");
const file2 = fileManager.openFile("data.csv");
console.log(file1); // Output: file_<timestamp>_<random>
console.log(file2); // Output: file_<timestamp>_<random>
fileManager.closeFile(file1);
fileManager.closeFile("invalid_handle"); // Output: Invalid file handle: invalid_handle |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.graph;
import java.util.Arrays ;
import java.util.List ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.atlas.junit.BaseTest ;
import org.apache.jena.atlas.lib.StrUtils ;
import org.apache.jena.graph.Graph ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.NodeFactory ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.graph.GraphUnionRead ;
import org.apache.jena.sparql.sse.Item ;
import org.apache.jena.sparql.sse.SSE ;
import org.apache.jena.sparql.sse.builders.BuilderGraph ;
import org.junit.Test ;
public class TestGraphUnionRead extends BaseTest
{
private static String dataStr = StrUtils.strjoinNL(
"(dataset" ,
" (graph" ,
" (triple <http://example/s> <http://example/p> 'dft')" ,
" (triple <http://example/s> <http://example/p> <http://example/o>)" ,
" )" ,
" (graph <http://example/g1>",
" (triple <http://example/s> <http://example/p> 'g1')",
" (triple <http://example/s> <http://example/p> <http://example/o>)",
" )",
" (graph <http://example/g2>",
" (triple <http://example/s> <http://example/p> 'g2')",
" (triple <http://example/s> <http://example/p> <http://example/o>)",
" )",
" (graph <http://example/g3>",
" (triple <http://example/s> <http://example/p> 'g3')",
" (triple <http://example/s> <http://example/p> <http://example/o>)",
" ))") ;
private static DatasetGraph dsg = null ;
static {
Item item = SSE.parse(dataStr) ;
dsg = BuilderGraph.buildDataset(item) ;
}
private static Node gn1 = SSE.parseNode("<http://example/g1>") ;
private static Node gn2 = SSE.parseNode("<http://example/g2>") ;
private static Node gn3 = SSE.parseNode("<http://example/g3>") ;
private static Node gn9 = SSE.parseNode("<http://example/g9>") ;
@Test public void gr_union_01()
{
List<Node> gnodes = list(gn1, gn2) ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
long x = Iter.count(g.find(null, null, null)) ;
assertEquals(3, x) ;
}
@Test public void gr_union_02()
{
List<Node> gnodes = list(gn1, gn2) ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
Node s = NodeFactory.createURI("http://example/s") ;
long x = Iter.count(g.find(s, null, null)) ;
assertEquals(3, x) ;
}
@Test public void gr_union_03()
{
List<Node> gnodes = list(gn1, gn2, gn9) ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
Node o = NodeFactory.createLiteral("g2") ;
long x = Iter.count(g.find(null, null, o)) ;
assertEquals(1, x) ;
}
@Test public void gr_union_04()
{
List<Node> gnodes = list(gn9) ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
long x = Iter.count(g.find(null, null, null)) ;
assertEquals(0, x) ;
}
@Test public void gr_union_05()
{
List<Node> gnodes = list() ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
long x = Iter.count(g.find(null, null, null)) ;
assertEquals(0, x) ;
}
@Test public void gr_union_06()
{
List<Node> gnodes = list(gn1, gn1) ;
Graph g = new GraphUnionRead(dsg, gnodes) ;
long x = Iter.count(g.find(null, null, null)) ;
assertEquals(2, x) ;
}
static <T> List<T> list(@SuppressWarnings("unchecked") T...x)
{
return Arrays.asList(x) ;
}
}
|
<reponame>andrewliebchen/reacticons-too
'use strict';
var React = require('react/addons');
var IconMixin = require('../icon_mixin');
module.exports = React.createClass({
mixins: [IconMixin],
renderGraphic: function(type) {
return (
<g>
{!type ? <path d="M 44.463,23.787L 24.00,45.00L 3.537,23.787c-4.686-4.686-4.686-12.285,0.00-16.971 c 4.686-4.686, 12.285-4.686, 16.971,0.00L 24.00,11.061l 3.492-4.242c 4.686-4.686, 12.285-4.686, 16.971,0.00C 49.149,11.502, 49.149,19.101, 44.463,23.787z M 31.734,11.061L 24.00,19.545L 16.266,11.061c-2.343-2.343-6.144-2.343-8.487,0.00c-2.34,2.343-2.34,6.141,0.00,8.484L 24.00,36.516l 16.221-16.971 c 2.34-2.343, 2.34-6.141,0.00-8.484C 37.878,8.715, 34.077,8.715, 31.734,11.061z" /> : null}
{type === 'full' ? <path d="M 44.463,23.787L 24.00,45.00L 3.537,23.787c-4.686-4.686-4.686-12.285,0.00-16.971 c 4.686-4.686, 12.285-4.686, 16.971,0.00L 24.00,11.061l 3.492-4.242c 4.686-4.686, 12.285-4.686, 16.971,0.00C 49.149,11.502, 49.149,19.101, 44.463,23.787z" /> : null}
{type === 'half' ? <path d="M 44.463,23.787L 24.00,45.00L 3.537,23.787c-4.686-4.686-4.686-12.285,0.00-16.971 c 4.686-4.686, 12.285-4.686, 16.971,0.00L 24.00,11.061l 3.492-4.242c 4.686-4.686, 12.285-4.686, 16.971,0.00C 49.149,11.502, 49.149,19.101, 44.463,23.787z M 24.00,36.516l 16.221-16.971c 2.34-2.343, 2.34-6.141,0.00-8.484c-2.343-2.343-6.144-2.343-8.487,0.00L 24.00,19.545L24.00,36.516 z" /> : null}
</g>
);
}
});
|
<reponame>NortonHua/vue-cms-miniprogram
const activityRouter = {
route: null,
name: null,
title: '活动管理',
type: 'folder',
icon: 'iconfont icon-huiyuanguanli',
filePath: 'view/activity/',
order: null,
inNav: true,
children: [
{
route: '/activity/list',
name: null,
title: '活动列表',
type: 'view', // 取 route 为默认加载页
icon: 'iconfont icon-huiyuanguanli',
filePath: 'view/activity/activity-list.vue',
inNav: true,
},
{
route: '/activity/create',
name: null,
title: '新建活动',
type: 'view', // 取 route 为默认加载页
icon: null,
filePath: 'view/activity/activity-create.vue',
inNav: true,
},
],
}
export default activityRouter
|
export const logout = () => ({
type: "LOG_OUT",
})
export const reducer = (prevState, action) => {
if (action.type === "LOG_OUT") {
return {
...prevState,
logged: false,
token: null,
}
}
return prevState
}
|
public class ExtractData {
public static void main(String[] args) {
String jsonString =
"{\"name\":\"bob\",\"age\":25,\"location\":\"san francisco\"}";
// Create a JSON Object from the JSON string
JSONObject jsonObject = new JSONObject(jsonString);
// Get the values from the JSONObject
String name = jsonObject.getString("name");
int age = jsonObject.getInt("age");
String location = jsonObject.getString("location");
// Print the values
System.out.println("Name: " + name);
System.out.println("Age: " + age);
System.out.println("Location: " + location);
}
} |
import asyncio
from multiprocessing import Queue
import pytest
from liualgotrader.common.types import QueueMapper, WSEventType
from liualgotrader.data.alpaca import AlpacaStream
alpaca_stream: AlpacaStream
queues: QueueMapper
@pytest.fixture
def event_loop():
global alpaca_stream
global queues
loop = asyncio.get_event_loop()
queues = QueueMapper()
alpaca_stream = AlpacaStream(queues)
yield loop
loop.close()
@pytest.mark.asyncio
@pytest.mark.devtest
async def test_apple_sec_agg():
global polygon_stream
await alpaca_stream.run()
print("going to subscribe")
queues["JNUG"] = Queue()
queues["GLD"] = Queue()
queues["AAPL"] = Queue()
status = await alpaca_stream.subscribe(
["JNUG", "GLD", "AAPL"], [WSEventType.MIN_AGG, WSEventType.TRADE]
)
print(f"subscribe result: {status}")
if not status:
raise AssertionError(f"Failed in alpaca_stream.subscribe w/ {status}")
await asyncio.sleep(1 * 60)
await alpaca_stream.close()
return True
|
/*****************************************************************************
*
* Copyright (c) 2000 - 2006, The Regents of the University of California
* Produced at the Lawrence Livermore National Laboratory
* LLNL-CODE-400124
* All rights reserved.
*
* This file is part of VisIt. For details, see https://visit.llnl.gov/. The
* full copyright notice is contained in the file COPYRIGHT located at the root
* of the VisIt distribution or at http://www.llnl.gov/visit/copyright.html.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the disclaimer below.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the disclaimer (as noted below) in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the LLNS/LLNL nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL LAWRENCE LIVERMORE NATIONAL SECURITY,
* LLC, THE U.S. DEPARTMENT OF ENERGY OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
*****************************************************************************/
// ************************************************************************* //
// avtDyna3DFileFormat.h //
// ************************************************************************* //
#ifndef AVT_DYNA3D_FILE_FORMAT_H
#define AVT_DYNA3D_FILE_FORMAT_H
#include <visitstream.h>
#include <avtSTSDFileFormat.h>
#include <string>
#include <vector>
class vtkFloatArray;
// ****************************************************************************
// Class: avtDyna3DFileFormat
//
// Purpose:
// Reads in Dyna3D files as a plugin to VisIt.
//
// Programmer: <NAME>
// Creation: Mon Nov 27 14:00:02 PST 2006
//
// Modifications:
// <NAME>, Fri Aug 1 11:41:18 PDT 2008
// Added support for material strength.
//
// <NAME>, Wed Mar 11 16:18:45 PDT 2009
// I changed how materials are read.
//
// ****************************************************************************
class avtDyna3DFileFormat : public avtSTSDFileFormat
{
public:
avtDyna3DFileFormat(const char *filename);
virtual ~avtDyna3DFileFormat();
//
// This is used to return unconvention data -- ranging from material
// information to information about block connectivity.
//
virtual void *GetAuxiliaryData(const char *var, const char *type,
void *args, DestructorFunction &);
//
// These are used to declare what the current time and cycle are for the
// file. These should only be defined if the file format knows what the
// time and/or cycle is.
//
// virtual bool ReturnsValidCycle() const { return true; };
// virtual int GetCycle(void);
// virtual bool ReturnsValidTime() const { return true; };
// virtual double GetTime(void);
//
virtual const char *GetType(void) { return "Dyna3D input data"; };
virtual void FreeUpResources(void);
virtual void ActivateTimestep(void);
virtual vtkDataSet *GetMesh(const char *);
virtual vtkDataArray *GetVar(const char *);
virtual vtkDataArray *GetVectorVar(const char *);
protected:
typedef struct
{
int nMaterials;
int nPoints;
int nSolidHexes;
int nBeamElements;
int nShellElements4;
int nShellElements8;
int nInterfaceSegments;
float interfaceInterval;
float shellTimestep;
} Card2_t;
typedef struct
{
Card2_t card2;
} ControlCards_t;
struct MaterialCard_t
{
MaterialCard_t();
int materialNumber;
std::string materialName;
double density;
double strength;
};
typedef std::vector<MaterialCard_t> MaterialCardVector;
void SkipComments(ifstream &ifile, const char *sectionName,
bool &, bool &);
bool SkipToSection(ifstream &ifile, const char *section);
void GetLine(ifstream &ifile);
bool ReadControlCards(ifstream &ifile);
bool ReadControlCard2(ifstream &);
void ReadControlCard3(ifstream &);
void ReadControlCard4(ifstream &);
void ReadControlCard5(ifstream &);
void ReadControlCard6(ifstream &);
void ReadControlCard7(ifstream &);
void ReadControlCard8(ifstream &);
void ReadControlCard9(ifstream &);
void ReadMaterialCards(ifstream &);
void ReadOneMaterialCard(ifstream &, MaterialCard_t &);
bool ReadFile(const char *, int nLines);
// DATA MEMBERS
vtkDataSet *meshDS;
ControlCards_t cards;
MaterialCardVector materialCards;
int *matNumbers;
char *line;
vtkFloatArray *velocity;
virtual void PopulateDatabaseMetaData(avtDatabaseMetaData *);
};
#endif
|
<filename>deno/methods/inspect.ts
import type { ForEachFn } from "../types/functions/mod.ts";
import { _curry } from "../lib/utils/mod.ts";
async function* _inspect_impl_fn<T>(
fn: ForEachFn<T>,
iter: AsyncIterable<T>,
): AsyncIterable<T> {
for await (const elem of iter) {
await fn(elem);
yield elem;
}
}
export interface Inspect {
<T>(fn: ForEachFn<T>, iter: AsyncIterable<T>): AsyncIterable<T>;
<T>(fn: ForEachFn<T>): (iter: AsyncIterable<T>) => AsyncIterable<T>;
}
export const inspect: Inspect = _curry(_inspect_impl_fn);
|
#!/bin/sh
#SBATCH -N 1 # nodes requested
#SBATCH -n 1 # tasks requested
#SBATCH --partition=Standard
#SBATCH --gres=gpu:8
#SBATCH --mem=72000 # memory in Mb
#SBATCH --time=0-08:00:00
export CUDA_HOME=/opt/cuda-9.0.176.1/
export CUDNN_HOME=/opt/cuDNN-7.0/
export STUDENT_ID=s1786991
export LD_LIBRARY_PATH=${CUDNN_HOME}/lib64:${CUDA_HOME}/lib64:$LD_LIBRARY_PATH
export LIBRARY_PATH=${CUDNN_HOME}/lib64:$LIBRARY_PATH
export CPATH=${CUDNN_HOME}/include:$CPATH
export PATH=${CUDA_HOME}/bin:${PATH}
export PYTHON_PATH=$PATH
mkdir -p /disk/scratch/${STUDENT_ID}
export TMPDIR=/disk/scratch/${STUDENT_ID}/
export TMP=/disk/scratch/${STUDENT_ID}/
mkdir -p ${TMP}/datasets/
export DATASET_DIR=${TMP}/datasets/
# Activate the relevant virtual environment:
source /home/${STUDENT_ID}/anaconda3/bin/activate I2T2I
cd ..
python train.py --exp_name 'AttnGAN-Flowers' --n_gpu 8 --dataset_name 'flowers' --batch_size 16
|
#!/bin/sh
#####################################################
# DSI-CSA
# Script para evaluación de Apache Httpd V1.0
# MODULO 6
# Fecha: 29/05/2022
# CSA: Maribel Hernandez Gtz
####################################################
declare RED='\e[1;31m'
declare WHITE='\e[1;0m'
declare GREEN='\e[1;32m'
declare YELLOW='\e[1;33m'
#Modulo 6
function Mod6_1(){
echo -e "\n\n\n 6.1 Ensure the Error Log Filename and Severity Level Are Configured Correctly"
echo "======================================================================================="
((++total))
loglevel=$(grep -i \^LogLevel "$dirConf" | awk '{print $2}')
loglevelcore=$(grep -i \^LogLevel.*core:info "$dirConf")
errorlog=$(grep -i \^ErrorLog "$dirConf")
local resultado
# we captured output of the subshell, let's interpret it
if ([ "$loglevel" == "info" ] || [ "$loglevelcore" != "" ]) && [ "$errorlog" != "" ]; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_61=$(jo id="The LogLevel directive is used to configure the severity level for the error logs. While the ErrorLog directive configures the error log file name. The log level values are the standard syslog levels of emerg, alert, crit, error, warn, notice, info and debug. The recommended level is notice for most modules, so that all errors from the emerg level through notice level will be logged. The recommended setting for the core module is info so that any not found requests will be included in the error logs." descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_61"
}
function Mod6_2(){
echo -e "\n\n\n 6.2 Ensure a Syslog Facility Is Configured for Error Logging"
echo "======================================================================="
((++total))
errorlog=$(grep -i \^\s*ErrorLog.*syslog:local1\" "$dirConf")
local resultado
# we captured output of the subshell, let's interpret it
if [ "$errorlog" != "" ]; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_62=$(jo id="2.2 Ensure the Log Config Module Is Enabled" descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_62"
}
function Mod6_3(){
echo -e "\n\n\n 6.3 Ensure the Server Access Log Is Configured Correctly"
echo "==================================================================="
((++total))
logformat=$(grep -i "^\s*LogFormat\s\"\(%[h,l,u,t]\s\)\{4\}\\\\\"\%r\\\\\"\s\%>s\s\%b\s\\\\\"\%{Referer}i\\\\\"\s\\\\\"\%{User-Agent}i\\\\\"\"\scombined" "$dirConf")
customlog=$(grep -i \^\\s*CustomLog.*combined "$dirConf")
local resultado
# we captured output of the subshell, let's interpret it
if [ "$logformat" != "" ] && [ "$customlog" != "" ]; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_63=$(jo id="2.2 Ensure the Log Config Module Is Enabled" descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_63"
}
function Mod6_4(){
echo -e "\n\n\n 6.4 Ensure Log Storage and Rotation Is Configured Correctly"
echo "======================================================================"
((++total))
logrotate=$(grep -i "^\s*/bin/kill -HUP 'cat /var/run/httpd.pid 2>/dev/null' 2> /dev/null || true" /etc/logrotate.d/httpd)
weekly=$(grep -i ^weekly /etc/logrotate.conf)
rotate=$(grep -i ^rotate /etc/logrotate.conf | awk '{print $2}')
local resultado
# we captured output of the subshell, let's interpret it
if [ "$logrotate" != "" ] && [ "$weekly" != "" ] && [ $rotate -ge 13 ]; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_64=$(jo id="2.2 Ensure the Log Config Module Is Enabled" descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_64"
}
function Mod6_6(){
echo -e "\n\n\n 6.6 Ensure ModSecurity Is Installed and Enabled"
echo "=========================================================="
((++total))
output=$(httpd -M | grep -i security2_module)
#echo $output
local resultado
# we captured output of the subshell, let's interpret it
if [ "$output" != "" ] ; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_66=$(jo id="2.2 Ensure the Log Config Module Is Enabled" descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_66"
}
function Mod6_7(){
echo -e "\n\n\n 6.7 Ensure the OWASP ModSecurity Core Rule Set Is Installed and Enabled"
echo "=============================================================================="
((++total))
RULE_DIR="$APACHE_PREFIX"/modsecurity.d/owasp-modsecurity-crs
rulecount=$(find $RULE_DIR -name '*.conf' | xargs grep -i '^SecRule ' | wc -l)
inanomaly=$(find $RULE_DIR -name '*.conf' | xargs egrep -v '^\s*#' | grep -i "setvar:'tx.inbound_anomaly_score_threshold" | cut -d"=" -f2 | sed "s/'\"//g")
outanomaly=$(find $RULE_DIR -name '*.conf' | xargs egrep -v '^\s*#' | grep -i "setvar:'tx.outbound_anomaly_score_threshold" | cut -d"=" -f2 | sed "s/'\"//g")
paranoia=$(find $RULE_DIR -name '*.conf' | xargs egrep -v '^\s*#' | grep -i "setvar:'tx.outbound_anomaly_score_threshold" | cut -d"=" -f2 | sed "s/'\"//g")
local resultado
# we captured output of the subshell, let's interpret it
if [ $rulecount -gt 325 ] && [ $inanomaly -le 5 ] && [ $outanomaly -le 4 ] && [ $paranoia -ge 1 ]; then
# echo "pass"
echo -e " -------------------------------------------------------------------------------------------------------------------------${GREEN} Cumple ${WHITE}"
resultado="Cumple"
((++pass))
else
# echo "fallo"
echo -e " ------------------------------------------------------- -----------------------------------------------------------------${RED} No Cumple ${WHITE}"
resultado="No Cumple"
((++fail))
fi
# JSON -------------------
sal_67=$(jo id="2.2 Ensure the Log Config Module Is Enabled" descripcion="The log_config module provides for flexible logging of client requests, and provides for the configuration of the information in each log" como_cumplirlo="If the module is correctly enabled, the output will include the module name and whether it is loaded statically or as a shared module" evidencia="$variable" estado="$resultado")
#echo -e "$sal_67"
}
function mod6(){
# MODULO 6
#++++++++++
Mod6_1
sal_mod6="$sal_61"
#echo -e "\n\n $sal_mod6"
}
mod6
|
<reponame>wongoo/alipay-sdk-java-all
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: zhima.credit.pe.zmgo.bizopt.close response.
*
* @author <NAME>
* @since 1.0, 2021-02-24 19:05:15
*/
public class ZhimaCreditPeZmgoBizoptCloseResponse extends AlipayResponse {
private static final long serialVersionUID = 6653362665137683749L;
/**
* 芝麻GO签约申请时生成的签约申请单据号
*/
@ApiField("biz_opt_no")
private String bizOptNo;
/**
* 商户本次操作的请求流水号
*/
@ApiField("out_request_no")
private String outRequestNo;
/**
* 商户ID
*/
@ApiField("partner_id")
private String partnerId;
/**
* 蚂蚁统一会员ID
*/
@ApiField("user_id")
private String userId;
public void setBizOptNo(String bizOptNo) {
this.bizOptNo = bizOptNo;
}
public String getBizOptNo( ) {
return this.bizOptNo;
}
public void setOutRequestNo(String outRequestNo) {
this.outRequestNo = outRequestNo;
}
public String getOutRequestNo( ) {
return this.outRequestNo;
}
public void setPartnerId(String partnerId) {
this.partnerId = partnerId;
}
public String getPartnerId( ) {
return this.partnerId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserId( ) {
return this.userId;
}
}
|
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-form',
templateUrl: './form.component.html',
styleUrls: ['./form.component.css']
})
export class FormComponent implements OnInit {
constructor() { }
ngOnInit() {
}
submit(formData) {
// Handle form submission
}
}
<!-- form.component.html -->
<form (submit)='submit(formData)' #formData='ngForm'>
<input type='text' name='name' ngModel>
<input type='number' name='age' ngModel>
<input type='submit' value='Submit'>
</form> |
function sum( num1, num2 ) {
return num1 + num2;
}
let num1 = 5;
let num2 = 10;
let result = sum(num1, num2);
console.log(`The sum of ${num1} and ${num2} is ${result}.`); |
/**
* @function
* @return { DOMElement }
* @param { string } id
*/
export const queryById = id => {
return document.getElementById(id)
}
/**
* @name queryBySelector
* @param {String} selector
* @return {DOMElement}
*/
export const queryBySelector = selector => {
return document.querySelector(selector)
}
/**
* Utility to detect if the device is a touch
* @returns { Boolean }
*/
export const isTouchDevice = () => {
return 'ontouchstart' in window ||
navigator.maxTouchPoints > 0 ||
navigator.msMaxTouchPoints > 0
}
/**
* Will return boolean if there is any highlighted text
* @return {Boolean}
*/
export const getTextSelection = () => {
if (window.getSelection) {
return window.getSelection()
} else
if (document.getSelection) {
return window.getSelection()
} else
if (document.selection) {
const selection = document.selection.createRange()
if (selection.text) {
return selection
}
}
}
export const hasTextSelection = () => {
const selection = getTextSelection()
// If there is no selection, return false
if (!selection) {
return false
}
// Cast selection to string and then cast to boolean
return !!(selection.toString())
}
/**
* @name addClass
* @description Adds a class to passed element
* @param { HTMLElement } element
* @param { String } className
*/
export const addClass = (element, className) => {
if (element && element.classList) {
const classNames = className.split(' ')
for (let i = 0, len = classNames.length; i < len; i++) {
element.classList.add(classNames[i])
}
}
}
/**
* @name toggleClass
* @description Toggles class to passed in element
* @param {HTMLElement} element
* @param {String} className
* @return {Void}
*/
export const toggleClass = (element, className) => {
if (element && element.classList) {
const classNames = className.split(' ')
for (let i = 0, len = classNames.length; i < len; i++) {
element.classList.toggle(classNames[i])
}
}
}
/**
* @name removeClass
* @description Removes class to passed in element
* @param {HTMLElement} element
* @param {String} className
* @return {Void}
*/
export const removeClass = (element, className = '') => {
if (element && element.classList) {
const classNames = className.split(' ')
for (let i = 0, len = classNames.length; i < len; i++) {
element.classList.remove(classNames[i])
}
}
}
/**
* getTop
* @param {HTMLElement} elem
* @return {Object}
*/
export const getTop = (elem) => {
var box = elem.getBoundingClientRect()
var body = document.body
var docEl = document.documentElement
var scrollTop = window.pageYOffset || docEl.scrollTop || body.scrollTop
var scrollLeft = window.pageXOffset || docEl.scrollLeft || body.scrollLeft
var clientTop = docEl.clientTop || body.clientTop || 0
var clientLeft = docEl.clientLeft || body.clientLeft || 0
var top = box.top + scrollTop - clientTop
var left = box.left + scrollLeft - clientLeft
return { top: Math.round(top), left: Math.round(left) }
}
/**
* stopPropagation
* @description Will stop the propagation of events going to the parent
* @param {Event} e
* @return {Void}
*/
export const stopPropagation = (e) => {
if (e && e.stopPropagation) {
e.stopPropagation()
} else
if (e && e.nativeEvent) {
e.nativeEvent.stopPropagation()
}
}
/**
* testForPassiveScroll
* @description Will check to see if passive options is available with dom events
* @return {Boolean}
*/
export const testForPassiveScroll = () => {
let supportsPassiveOption = false
try {
const opts = Object.defineProperty({}, 'passive', {
get: function () {
supportsPassiveOption = true
}
})
window.addEventListener('test', null, opts)
window.removeEventListener('test', null, opts)
} catch (e) {}
return supportsPassiveOption
}
|
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package com.amazon.dataprepper.plugins.processor.date;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
class DateProcessorConfigTest {
@Test
void testDefaultConfig() {
final DateProcessorConfig dateProcessorConfig = new DateProcessorConfig();
assertThat(dateProcessorConfig.getDestination(), equalTo(DateProcessorConfig.DEFAULT_DESTINATION));
assertThat(dateProcessorConfig.getTimezone(), equalTo(DateProcessorConfig.DEFAULT_TIMEZONE));
assertThat(dateProcessorConfig.getLocale(), equalTo(DateProcessorConfig.DEFAULT_LOCALE));
}
} |
'use strict'
const { v4: uuid } = require('uuid');
const db = require('../database/database')
class Product {
constructor(name, price, brand) {
this.productId = uuid()
this.name = name;
this.price = price
this.brand = brand
}
save() {
db.push(this)
return this
}
}
module.exports = Product |
#!/bin/bash
# Description: Listing from script files
type _list_csv >/dev/null 2>&1 && return
. func.file
#type -t _list_csv >/dev/null 2>&1 && return
shopt -s nullglob extglob
_list_csv(){ # Connect each line with ',' to csv line (a,b,c..) from <stdin>
local line list
while read line;do
list+="${list:+,}$line"
done
echo "$list"
}
_add_list(){ # Add elemnt to ver without duplication [varname] [elements...]
# $1 must not be '_k' '_i' '_l' '_e'
local _k=$1 _i _l _e;shift
set - $(for _i in ${!_k} $*;do echo $_i;done|sort -u)
eval "$_k=\"$*\""
return $_e
}
_uniq(){ # Show uniq folded list without marked line from args
local _i _ex tmplist
_temp tmplist
for _i ; do
if [[ $_i =~ ^# ]] ; then
_ex="${_ex:+$ex|}${_i#\#}"
else
echo $_i
fi
done > $tmplist
if [ "$_ex" ] ; then
egrep -v "^($_ex)$" $tmplist | sort -u
else
sort -u $tmplist
fi
}
_colm(){ # Convert (item,desc) to folded list from <stdin>
# if max width $1 is set, print multi column
local width=${1:-1} ilen=0 clen=0 row
# Get max line length
mapfile -C __max -c 1 -t
(( $ilen > 0 )) || return
# Print lines
row=$(( $width / $ilen + 1 ))
set - "${MAPFILE[@]}"
while (( $# > 0 )); do
echo -en "\t"
for (( i=0 ; i < $row && $# > 0 ; i++ )); do
printf "%-${ilen}s " "$(_item "$1" $clen)"
shift
done
echo
done
}
__max(){
(( ${#2} > $ilen )) && ilen=${#2}
cap=${2%,*}
(( ${#cap} > $clen )) && clen=${#cap}
}
_basename_list(){ # List of file basename
set - "${@##*/}"
echo -n "${@%.*}"
}
_chkfunc $*
|
<gh_stars>10-100
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""
"""
#end_pymotw_header
import xmlrpclib
server = xmlrpclib.ServerProxy('http://localhost:9000', use_datetime=True)
now = server.now()
print 'With:', now, type(now), now.__class__.__name__
server = xmlrpclib.ServerProxy('http://localhost:9000', use_datetime=False)
now = server.now()
print 'Without:', now, type(now), now.__class__.__name__
|
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.neighbors import NearestNeighbors
# Load data
dataset = pd.read_csv('movies.csv')
# Vectorize data
tfv = TfidfVectorizer(analyzer='word', ngram_range=(1, 3), min_df=0, stop_words='english')
X = tfv.fit_transform(dataset['overview'])
# Compute similarities
cosine_sim = cosine_similarity(X, X)
# Build model
knn = NearestNeighbors(n_neighbors=11, metric='cosine', algorithm='brute', n_jobs=-1)
knn.fit(X)
# Make recommendations
def recommend(movie):
# Find the index of the movie in the dataset
index = dataset[dataset['title'] == movie].index[0]
# Get list of 10 most similar films
distances, indices = knn.kneighbors(X[index], n_neighbors=11)
# Get recommendations
for i in range(0, len(distances.flatten())):
if i == 0:
print('Recommendations for {0}:\n'.format(dataset['title'][index]))
else:
print('{0}: {1}, with distance of {2}:'.format(i, dataset['title'][indices.flatten()[i]], distances.flatten()[i]))
# Get recommendations
recommend('The Godfather') |
<gh_stars>10-100
#!/usr/bin/env python
import logging
import os
import pickle
import time
from collections import defaultdict
from datetime import datetime
from json import dumps
import matplotlib
import matplotlib.pyplot as plt
import obspy
import pandas as pd
import requests
from kafka import KafkaProducer
from obspy.clients.fdsn import Client
from obspy.clients.seedlink.easyseedlink import EasySeedLinkClient, create_client
logging.basicConfig(level=logging.WARN)
matplotlib.use("agg")
## realtime station information
# http://ds.iris.edu/gmap/#network=_REALTIME&starttime=2021-03-01&datacenter=IRISDMC&networktype=permanent&planet=earth
#
# http://ds.iris.edu/gmap/#network=_REALTIME&channel=HH*&starttime=2021-03-01&datacenter=IRISDMC&networktype=permanent&planet=earth
timestamp = lambda x: x.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3]
##################### Config #####################
pi = 3.1415926
degree2km = pi * 6371 / 180
## Location
# center = (-115.53, 32.98) #salton sea
# center = (-117.504, 35.705) #ridgecrest
center = (-155.32, 19.39) # havaii
horizontal_degree = 2.0
vertical_degree = 2.0
## Time range
starttime = obspy.UTCDateTime("2021-01-01T00:00:00")
endtime = obspy.UTCDateTime(datetime.utcnow())
## seismic stations
network_list = ["HV", "PT"]
# channel_list = "HH*,HN*,BH*,EH*"
channel_list = "HH*"
## data center
client = "IRIS"
config = {}
config["center"] = center
config["xlim_degree"] = [center[0] - horizontal_degree / 2, center[0] + horizontal_degree / 2]
config["ylim_degree"] = [center[1] - vertical_degree / 2, center[1] + vertical_degree / 2]
config["degree2km"] = degree2km
config["starttime"] = starttime.datetime
config["endtime"] = endtime.datetime
config["networks"] = network_list
config["channels"] = channel_list
config["client"] = client
with open("config.pkl", "wb") as fp:
pickle.dump(config, fp)
##################### realtime streaming station list #####################
stations_total = pd.read_csv(
"realtime-stations.txt",
sep="|",
header=None,
skiprows=3,
names=["network", "station", "latitude", "longitude", "elevation(m)", "location", "starttime", "endtime"],
)
stations_total = stations_total[stations_total["network"].isin(config["networks"])]
plt.figure()
plt.plot(stations_total["longitude"], stations_total["latitude"], '^')
# plt.axis("scaled")
stations_select = stations_total[
(config["xlim_degree"][0] < stations_total["longitude"])
& (stations_total["longitude"] < config["xlim_degree"][1])
& (config["ylim_degree"][0] < stations_total["latitude"])
& (stations_total["latitude"] < config["ylim_degree"][1])
]
stations_select = stations_select.reset_index()
print("Number of selected stations: ", len(stations_select))
##################### download station info #####################
stations = Client("IRIS").get_stations(
network=",".join(config["networks"]),
station=",".join(stations_select["station"]),
starttime=config["starttime"],
endtime=config["endtime"],
minlongitude=config["xlim_degree"][0],
maxlongitude=config["xlim_degree"][1],
minlatitude=config["ylim_degree"][0],
maxlatitude=config["ylim_degree"][1],
channel=config["channels"],
level="response",
) # ,
# filename="stations.xml")
# stations = obspy.read_inventory("stations.xml")
print("Number of downloaded stations: {}".format(sum([len(x) for x in stations])))
# stations.plot('local', outfile="stations.png")
# stations.plot('local')
station_locs = defaultdict(dict)
station_resp = defaultdict(dict)
for network in stations:
for station in network:
for chn in station:
sid = f"{network.code}.{station.code}.{chn.location_code}.{chn.code[:-1]}"
station_resp[
f"{network.code}.{station.code}.{chn.location_code}.{chn.code}"
] = chn.response.instrument_sensitivity.value
if sid in station_locs:
station_locs[sid]["component"] += f",{chn.code[-1]}"
station_locs[sid]["response"] += f",{chn.response.instrument_sensitivity.value:.2f}"
else:
component = f"{chn.code[-1]}"
response = f"{chn.response.instrument_sensitivity.value:.2f}"
dtype = chn.response.instrument_sensitivity.input_units.lower()
tmp_dict = {}
tmp_dict["longitude"], tmp_dict["latitude"], tmp_dict["elevation(m)"] = (
chn.longitude,
chn.latitude,
chn.elevation,
)
tmp_dict["component"], tmp_dict["response"], tmp_dict["unit"] = component, response, dtype
station_locs[sid] = tmp_dict
station_locs = pd.DataFrame.from_dict(station_locs, orient='index')
station_locs.to_csv(
"stations.csv",
sep="\t",
float_format="%.3f",
index_label="station",
columns=["longitude", "latitude", "elevation(m)", "unit", "component", "response"],
)
class Client(EasySeedLinkClient):
def __init__(self, server_url, producer, autoconnect=True):
super().__init__(server_url, producer)
self.producer = producer
def on_data(self, trace):
if time.time() % 10 < 0.5: ## print every 60s
print(f'Received trace: {trace}')
if trace.stats.sampling_rate != 100:
trace = trace.interpolate(100, method="linear")
if trace.stats.channel[1] == "N": ## acceleration
trace = trace.integrate()
trace = trace.filter("highpass", freq=1.0)
value = {
"timestamp": timestamp(trace.stats.starttime.datetime),
"vec": (trace.data / station_resp[trace.id]).tolist(),
}
self.producer.send('waveform_raw', key=trace.id, value=value)
if __name__ == '__main__':
print('Connecting to Kafka cluster for producer...')
# TODO Will need to clean up this with better env config
try:
BROKER_URL = 'quakeflow-kafka-headless:9092'
# BROKER_URL = '172.16.31.10:9094'
producer = KafkaProducer(
bootstrap_servers=[BROKER_URL],
key_serializer=lambda x: dumps(x).encode('utf-8'),
value_serializer=lambda x: dumps(x).encode('utf-8'),
)
except Exception as error:
print('k8s kafka not found or connection failed, fallback to local')
BROKER_URL = 'localhost:9092'
producer = KafkaProducer(
bootstrap_servers=[BROKER_URL],
key_serializer=lambda x: dumps(x).encode('utf-8'),
value_serializer=lambda x: dumps(x).encode('utf-8'),
)
print(f'Starting producer ...')
client = Client('rtserve.iris.washington.edu:18000', producer=producer)
for x in station_locs.index:
x = x.split(".")
client.select_stream(x[0], x[1], x[-1] + "?")
client.run()
|
package ttv.com.vn.stream;
public class LamdaEx {
public void lamdaFunInf(){
}
public void webEx(){
WedClass wclass = new WedClass();
// take care nhan dau vao la mot interface co mot method abstract
// roi dung param trong method do nhu mot dau vao cho lamda
String s1 = "abc";
wclass.takeCare(s -> {
System.out.println(s);
return s.concat(",");
});
wclass.takeCare(s -> s.concat(","));
//we can using function reference
//wclass.takeCare(this::print);
}
private void print(String s){
System.out.println("Print: " + s);
}
public void webExWithoutLamda(){
WedClass wclass = new WedClass();
wclass.takeCare(new StudioInterface() {
@Override
public String takeImage(String s) {
System.out.println(s);
return s.concat(",");
}
});
}
public static void main(String... args){
}
}
|
<gh_stars>0
import React from "react";
import { FcAcceptDatabase, FcAddressBook, FcFaq } from "react-icons/fc";
import { IconContext } from "react-icons/lib";
import {
PricingSection,
PricingWrapper,
PricingHeading,
PricingContainer,
PricingCard,
PricingCardInfo,
PricingCardIcon,
PricingCardPlan,
PricingCardLength,
PricingCardFeatures,
PricingCardFeature,
} from "./Pricing.elements";
import FadeInWhenVisible from "../animations/FadeInWhenVisible";
function Pricing() {
return (
<IconContext.Provider value={{ color: "#a9b3c1", size: 64 }}>
<PricingSection>
<FadeInWhenVisible>
<PricingWrapper>
<PricingHeading>Servicios</PricingHeading>
<PricingContainer>
<PricingCard to="/">
<PricingCardInfo>
<PricingCardIcon>
<FcAddressBook />
</PricingCardIcon>
<PricingCardPlan>Sistemas</PricingCardPlan>
<PricingCardLength>Java / Javascript </PricingCardLength>
<PricingCardFeatures>
<PricingCardFeature>Desarollo de sistemas</PricingCardFeature>
<PricingCardFeature>Implementaciones</PricingCardFeature>
<PricingCardFeature>
Conexiones directas
</PricingCardFeature>
</PricingCardFeatures>
<a href="/"> Ver ahora</a>
</PricingCardInfo>
</PricingCard>
<PricingCard to="/">
<PricingCardInfo>
<PricingCardIcon>
<FcFaq />
</PricingCardIcon>
<PricingCardPlan>Soporte</PricingCardPlan>
<PricingCardLength>24/7</PricingCardLength>
<PricingCardFeatures>
<PricingCardFeature>Dudas del producto</PricingCardFeature>
<PricingCardFeature>Mantenimiento y actualizacion</PricingCardFeature>
<PricingCardFeature>Analisis profundo</PricingCardFeature>
</PricingCardFeatures>
<a href="/">Aprender como</a>
</PricingCardInfo>
</PricingCard>
<PricingCard to="/">
<PricingCardInfo>
<PricingCardIcon>
<FcAcceptDatabase />
</PricingCardIcon>
<PricingCardPlan>Base de datos</PricingCardPlan>
<PricingCardLength>Remota</PricingCardLength>
<PricingCardFeatures>
<PricingCardFeature>Usuarios ilimitados</PricingCardFeature>
<PricingCardFeature>Base de datos Nosql</PricingCardFeature>
<PricingCardFeature>Base de datos SQL</PricingCardFeature>
</PricingCardFeatures>
<a href="/"> Explorar soluciones</a>
</PricingCardInfo>
</PricingCard>
</PricingContainer>
</PricingWrapper>
</FadeInWhenVisible>
</PricingSection>
</IconContext.Provider>
);
}
export default Pricing;
|
#!/bin/bash
cp /usr/share/nginx/html/config.${env}.js /usr/share/nginx/html/config.js
rm /usr/share/nginx/html/config.*.js
nginx -g "daemon off;"
|
#/bin/sh
IPA="$PWD/pokemongo_hook.ipa"
if [ -f "$IPA" ]; then
echo "Delete old file..."
rm $IPA
fi
ln -f $TARGET_BUILD_DIR/libLocationFaker.dylib Payload/pokemongo.app/libLocationFaker.dylib
codesign -f -s "iPhone Developer: Lin Hong (CH4P9SEPHY)" ./Payload/pokemongo.app/libLocationFaker.dylib
codesign -f -s "iPhone Developer: Lin Hong (CH4P9SEPHY)" --entitlements Entitlements.plist ./Payload/pokemongo.app
xcrun -sdk iphoneos PackageApplication -v ./Payload/pokemongo.app -o $PWD/pokemongo_hook.ipa
command -v ios-deploy >/dev/null 2>&1 || { echo >&2 "Require ios-deploy but it's not installed. brew install ios-deploy.";exit 1;}
ios-deploy -c
ios-deploy -t 8 --justlaunch --no-wifi --bundle $PWD/pokemongo_hook.ipa
echo "Deploy Finish!"
exit 0; |
from cyder.api.v1.endpoints.dhcp.vrf import api
|
#!/usr/bin/env bash
export PATH=$PATH:~/naacl12COPY/naacl12/util
TRAIN=~/liblinear-1.96/train
language=$1
# Below, when I refer to best C values, I use D for the denominator. D is the
# number of QIDs in the training set. I show the command in each case to find
# the value.
cd original
cut -d# -f 1 rerank.trn | rr2svm.py > pair.trn
# during dev we found the best C = 8/D
# to find D do:
tail -n 1 rerank.trn | cut -d ' ' -f 2 | cut -d ':' -f 2
# D = 25211, so C = 0.0003173218039744556
# ADAM: D = 2621, so C = 0.0030522701
$TRAIN -c 0.00305227012 pair.trn model
llnrpredictr.py rerank.tst model > rerank.tst.out
#cd ../directlp
#cut -d# -f 1 rerank.trn | rr2svm.py > pair.trn
# during dev we found the best C = 4/D
# to find D do:
#tail -n 1 rerank.trn | cut -d ' ' -f 2 | cut -d ':' -f 2
# D = 25211, so C = 0.0001586609019872278 ADAM: D = 2621
#$TRAIN -c 0.00305227012 pair.trn model
#llnrpredictr.py rerank.tst model > rerank.tst.out
cd ..
# Get the results
paste ${language}.tst.orig.sc.base original/rerank.tst.out | cut -f 1-3 | rerank.py | \
tee ${language}.tst.orig.sc.reranked | eval.py ${language}.tst; mcNemars.py ${language}.tst \
${language}.tst.orig.sc.{base,reranked}
#Word accuracy: 50.05%
#Character accuracy: 82.20%
#Oracly re-ranked word accuracy: 84.40%
#Total words: 2801
#Error reduction: 8.26%
#Performance increase: 9.87%
#Significant with p < 0.001
#p = 1.5761048022255864e-10
# NOTE: a bit weaker than in the paper
#paste ${language}.tst.dtlp.sc.base directlp/rerank.tst.out | cut -f 1-3 | rerank.py | \
# tee ${language}.tst.dtlp.sc.reranked | eval.py ${language}.tst; mcNemars.py ${language}.tst \
# ${language}.tst.dtlp.sc.{base,reranked}
#Word accuracy: 49.66%
#Character accuracy: 81.68%
#Oracly re-ranked word accuracy: 78.22%
#Total words: 2801
#Error reduction: 2.22%
#Performance increase: 2.35%
#Not statistically significant
#p = 0.027047248378120115
# NOTE: a bit weaker than in the paper
# ADAM
# evaluate results for NEWS results
#echo "NEWS results for dtl reranked"
#sed 's/ //g' ${language}.tst.dtlp.sc.reranked > tmp
#sed -e 's/#/_/g' -e 's/%/ /g' tmp > ${language}.dtlp.sc.final # convert back to spaces and underscoress
#evalDirecTL.sh ${language}.dtlp.sc.final ../../dev.xml
echo "NEWS results for orig reranked"
sed 's/ //g' ${language}.tst.orig.sc.reranked > tmp
sed -e 's/#/_/g' -e 's/%/ /g' tmp > ${language}.orig.sc.final # convert back to spaces and underscoress
evalDirecTL.sh ${language}.orig.sc.final ../../dev.xml |
g++ -fopenmp -Wall -g -lm lab2_io.cpp lab2_omp.cpp main_omp.cpp -o pca
|
package dev.rudrecciah.admincore.staffmode.items;
import dev.rudrecciah.admincore.playerdata.PlayerDataHandler;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import java.util.ArrayList;
public class ItemCreator {
public static ItemStack createSimpleItemStack(Material type, int amount, String name, String lore) {
ItemStack item = new ItemStack(type, amount);
ItemMeta itemMeta = item.getItemMeta();
if(!name.equals("")) {
itemMeta.setDisplayName(ChatColor.BLUE + "" + ChatColor.BOLD + "[" + name + "]");
item.setItemMeta(itemMeta);
}else{
itemMeta.setDisplayName("NOREASON");
item.setItemMeta(itemMeta);
}
if(!lore.isEmpty()) {
itemMeta.setLore(Collections.singletonList(ChatColor.YELLOW + lore));
item.setItemMeta(itemMeta);
}
return item;
}
public static ItemStack createSimpleItemStack(Material type, int amount, String name) {
ItemStack item = new ItemStack(type, amount);
ItemMeta itemMeta = item.getItemMeta();
if(!name.equals("")) {
itemMeta.setDisplayName(ChatColor.BLUE + "" + ChatColor.BOLD + "[" + name + "]");
item.setItemMeta(itemMeta);
}else{
itemMeta.setDisplayName("NOREASON");
item.setItemMeta(itemMeta);
}
return item;
}
public static ItemStack createBanItem(Material type, int amount, String name, Player target) {
ItemStack item = new ItemStack(type, amount);
ItemMeta itemMeta = item.getItemMeta();
int bans = PlayerDataHandler.getBans(target);
int mutes = PlayerDataHandler.getMutes(target);
if(!name.equals("")) {
itemMeta.setDisplayName(ChatColor.BLUE + "" + ChatColor.BOLD + "[" + name + "]");
item.setItemMeta(itemMeta);
}else{
itemMeta.setDisplayName("NOREASON");
item.setItemMeta(itemMeta);
}
ArrayList<String> lore = new ArrayList<String>();
lore.add(ChatColor.YELLOW + "Times Banned: " + bans);
lore.add(ChatColor.YELLOW + "Times Muted: " + mutes);
itemMeta.setLore(lore);
item.setItemMeta(itemMeta);
return item;
}
}
|
<gh_stars>0
class NavigationActiveClass {
constructor( navigationActiveClassElement ) {
this.navigationActiveClassElement = navigationActiveClassElement;
this._addActiveClass();
}
_addActiveClass() {
let url = window.location.pathname;
let activePage;
activePage = url.substring( url.lastIndexOf( '/' ) + 1 );
console.log( url, activePage );
// language=JQuery-CSS
$( '.primary-nav li a' ).each( function() {
let linkPage = this.href.substring( this.href.lastIndexOf( '/' ) + 1 );
if ( activePage === linkPage ) {
$( this ).parent().addClass( 'active' );
}
});
}
}
module.exports = NavigationActiveClass;
|
"""
print("String : (((({}))))] Balanced or not?")
print(is_balanced_string("(((({}))))]"))
print("String : {1+[2*(3/4)-1]+2} Balanced or not?")
print(is_balanced_string("{1+[2*(3/4)-1]+2}"))
"""
class Stack:
def __init__(self):
self._data = []
def is_empty(self):
return self._data == []
def push(self, item):
self._data.append(item)
def pop(self):
if not self.is_empty():
return self._data.pop()
else:
raise ValueError('Empty stack')
def top(self):
if not self.is_empty():
return self._data[len(self._data)-1]
else:
return None
def __len__(self):
return len(self._data)
def is_balanced_string(st):
left_param = "({["
right_params = ")}]"
s = Stack()
for i in st:
if i in left_param:
s.push(i)
elif i in right_params:
if s.is_empty():
return False
a = s.pop()
if right_params.index(i) != left_param.index(a):
return False
return s.is_empty() |
<gh_stars>1-10
package malte0811.controlengineering.datagen.recipes;
import net.minecraft.advancements.CriterionTriggerInstance;
import net.minecraft.advancements.critereon.ImpossibleTrigger;
import net.minecraft.data.recipes.FinishedRecipe;
import net.minecraft.data.recipes.ShapelessRecipeBuilder;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.level.ItemLike;
import net.minecraftforge.registries.RegistryObject;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.function.Consumer;
public class NoAdvancementShapelessBuilder extends ShapelessRecipeBuilder {
@Nullable
private final CompoundTag nbt;
public NoAdvancementShapelessBuilder(ItemLike pResult, int pCount, @Nullable CompoundTag nbt) {
super(pResult, pCount);
this.nbt = nbt;
}
public static ShapelessRecipeBuilder shapeless(ItemStack result) {
return new NoAdvancementShapelessBuilder(result.getItem(), result.getCount(), result.getTag());
}
public static ShapelessRecipeBuilder shapeless(ItemLike result) {
return shapeless(result, 1);
}
public static ShapelessRecipeBuilder shapeless(RegistryObject<? extends ItemLike> regObject) {
return shapeless(regObject.get());
}
public static ShapelessRecipeBuilder shapeless(ItemLike result, int pCount) {
return new NoAdvancementShapelessBuilder(result, pCount, null);
}
@Nonnull
@Override
public ShapelessRecipeBuilder unlockedBy(
@Nonnull String pCriterionName, @Nonnull CriterionTriggerInstance pCriterionTrigger
) {
throw new UnsupportedOperationException();
}
public void save(@Nonnull Consumer<FinishedRecipe> out, @Nonnull ResourceLocation recipeId) {
super.unlockedBy("dummy", new ImpossibleTrigger.TriggerInstance());
super.save(recipe -> out.accept(new NoAdvancementFinishedRecipe(recipe, nbt)), recipeId);
}
}
|
angular.module("wust.components").controller("TagDetailsCtrl", TagDetailsCtrl);
TagDetailsCtrl.$inject = ["$stateParams", "Scope", "Search", "DiscourseNode", "StreamService", "Auth", "ContextService", "ModalEditService"];
function TagDetailsCtrl($stateParams, Scope, Search, DiscourseNode, StreamService, Auth, ContextService, ModalEditService) {
let vm = this;
let postSize = 30;
let postPage = 0;
vm.loadMorePosts = loadMorePosts;
vm.addTagStream = addTagStream;
vm.changedInherits = changedInherits;
vm.auth = Auth;
vm.newDiscussion = newDiscussion;
vm.tag = Scope.$find($stateParams.id);
// vm.tag.$then(() => {
// ContextService.setContext(vm.tag);
// });
//TODO: tags/id/posts should honor inherits relation
// vm.contributions = vm.tag.posts.$search({
// page: postPage,
// size: postSize
// });
// for now use search api
vm.contributions = Search.$search({
label: DiscourseNode.Post.label,
tagsAll: [$stateParams.id],
sortOrder: wust.SortOrder().QUALITY,
page: postPage,
size: postSize
});
function newDiscussion() {
ModalEditService.show(undefined, false, [vm.tag.$encode()]);
}
function loadMorePosts() {
if (vm.tag === undefined)
return false;
postPage++;
return vm.contributions.$fetch({
page: postPage,
size: postSize
});
}
function addTagStream() {
if(!vm.tag)
return;
StreamService.push({tagsAll: [vm.tag]});
humane.success(`Added stream for '${vm.tag.title}'`);
}
function changedInherits(list, type, tag) {
switch (type) {
case "add":
list.$buildRaw(tag).$save({}).$then(() => vm.contributions.$refresh());
break;
case "remove":
list.$buildRaw(tag).$destroy().$then(() => vm.contributions.$refresh());
break;
}
}
}
|
#!/bin/bash
curl -XGET "http://localhost:4000/release/?os=osx64&project=farmer-gui"
|
import * as types from '../../mutation-types'
import projects from '../../../api/projects'
import store from '../../../store/index'
import Vue from 'vue';
import Enum from 'enum';
import axios from 'axios';
import { coreEmailId, isProdEnv, cloudOptionsMap } from "../../../../config/config";
const mailIdsForClouds = Object.freeze({
alicloud: ['<EMAIL>', '<EMAIL>']
});
const state = {
Projects : [],
projectSearchQuery : '',
projectStatus : new Enum({ 'IDLE':0, 'PUBLISHING':1, 'BUILDING':2,
'CRAWLING':3, 'QUEUED':4, 'PUBLISHINGERROR':-1, 'PREPARINGDOWNLOAD': 10,
'BUILDINGERROR':-2, 'ERROR':-3 }),
liveProjects : [],
Errors : [],
projectType : new Enum ({ CRAWL : 0, DRAGANDDROP : 1, NEWPROJECT : 2, WORDPRESS:3, ERROR : -1 }),
uploadDetails: {
projectId: '',
isUploading: false,
totalFiles: 0,
filesUploaded: 0,
filesFailed: []
},
publishProject : {
stage: 0,
projectId : '',
projectName : '',
customerList: [],
customerForPublishing: {},
newCustomerDetails: {
FullName: '',
Email: '',
PhoneNumber: 0,
ProjectId: '',
WebsiteTag: '',
DeveloperId: '',
Domain: ''
},
isNewCustomer: false,
isCreatingCustomer: false,
isCustomerCreationError : false,
isPublishing: false,
isGettingCustomerList : false,
isAPIError: false,
isEligibleForPublishing: true,
aliCloudDetails: {},
cloudSelected: '',
cloudDetailsPresent: false,
gcpCloudDetails: {},
gcpTokenGeneratorUrl: '',
gcpCredsFile: null,
chooseOwnAccount: null,
domainForCloud: ''
// doesWebsiteTagExist: false
},
uploadArray : [],
BuildStatus : new Enum( { 'Queued': 'Queued',
'Compiled': 'compiling html',
'QueuedBuild': 'in queue',
'Analyzer': 'analyzing',
'Optimizer': 'optimizing',
'Replacer': 'updating asset versions',
'Completed': 'completed',
'Error': 'error' }),
CrawlStatus : new Enum( {
Initialising : 'initiating',
IdentifyingAllAssetsAndDownloadingWebpage : 'analyzing',
IdentifyingExternalDomains : 'action needed',
DownloadingAllStaticAssetsToStorage : 'converting',
UpdatingWebPagesWithNewStaticAssetUri : 'updating urls',
Completed : 'completed',
Error : 'error'
}),
IsPollingForProjectsInProcess: false,
downloadProject : {
projectId : '',
isLinkRequested : false,
downloadLink : '',
isPolling : false,
isApiCallError : false
},
optimizeProject: {
projectId : '',
projectName : '',
isApiCallCompleted: false,
isApiCallSuccess:false
},
optimizationError: false,
customerDetails: {
customerId : '',
customer : {},
isFetchingCustomerDetails: false,
areDetailsFetchedSuccessfully : false
},
verifyDomain:{
customerId : '',
isFetchingDetails: false,
isApiCallSuccess : false,
domainDetails : {}
},
dnsDetails:{
websiteId: '',
pendingDomains: [],
},
previewProject: {
projectId: '',
customerList: [],
isFetchingDetails: false,
projectName: ''
},
renameProject: {},
deactivateProject: {
name: '',
websiteIds: []
},
isApiCallInProgress: false,
lazyLoad: {
showLoader: false,
stop: false
},
showNotificationLiveSite: [ ...cloudOptionsMap.values() ].toString()
};
// getters
const getters = {
getAllProjects : state => state.Projects,
getAllLiveProjects : state => state.liveProjects,
getTotalProjectsCount : state => state.Projects.length,
getTotalLiveProjectsCount : state => state.liveProjects.length,
uploadDetails: state => state.uploadDetails,
getPublishDetails: state => state.publishProject,
getOptimizeDetails: state => state.optimizeProject,
getProjectsStatus : state => state.projectStatus,
getPreviewProjectDetails: state => state.previewProject,
getStageName : state => (stage,isBuild) => {
if(isBuild){
return state.BuildStatus.getValue(stage);
}
else {
return state.CrawlStatus.getValue(stage);
}
},
getCustomer : state => {
state.liveProjects.find((pro)=>{
return pro.CustomerId === state.dnsDetails.customerId;
})
},
getPublishCustomers: state => state.publishProject.customerList,
getCustomerDetailsForLiveSite : state => state.customerDetails,
getVerifyDomainDetails : state => state.verifyDomain,
getDnsDetailsDetails : state => state.dnsDetails,
getProjectTypes: state => state.projectType,
getProjectsBySearchQuery: state => {
return state.Projects.filter(function (item) {
return item.ProjectName.toLowerCase().indexOf(state.projectSearchQuery) >= 0;
})
},
getCrawlStatus : state => state.CrawlStatus,
getBuildStatus : state => state.BuildStatus,
getPendingDomains: state => state.dnsDetails.pendingDomains,
getOptimizationError: state => state.optimizationError,
renameProjectDetails: state => state.renameProject,
isApiCallInProgress: state => state.isApiCallInProgress,
selectedCloud: state => state.publishProject.cloudSelected,
aliCloudDetails: state => state.publishProject.aliCloudDetails,
gcpCloudDetails: state => state.publishProject.gcpCloudDetails,
areCloudDetailsPresent: state => state.publishProject.cloudDetailsPresent,
gcpTokenGeneratorUrl: state => state.publishProject.gcpTokenGeneratorUrl,
gcpCredsFile: state => state.publishProject.gcpCredsFile,
chooseOwnAccount: state => state.publishProject.chooseOwnAccount,
lazyLoad: state => state.lazyLoad,
deactivateProject: state => state.deactivateProject,
domainForCloud: state => state.publishProject.domainForCloud,
showNotificationLiveSite: state => state.showNotificationLiveSite
};
// actions
const actions = {
downloadURI ({ commit },payload){
let link = document.getElementById("kdownload");
link.href = "";
link.href = payload;
link.click();
let project = state.Projects.filter(function(project){
return project.ProjectId == state.downloadProject.projectId
})[0];
let processProject = {
project,
status: 0
};
commit(types.setProjectStatus, processProject)
},
getAllProjects ({ commit,dispatch }, payload) {
commit(types.setLazyLoadDetails, { stop: false, showLoader: true });
projects.getAllProjectsfromApi(payload, (sucess, { Projects }) => {
if(sucess) {
const lazyLoad = payload ? payload.lazyLoad : false;
commit(types.getListOfAllProjects, { payload: Projects, lazyLoad: lazyLoad });
commit(types.projectListReceived);
commit(types.setCardsSkeletonLoaderComponentStatus, false);
dispatch('startPollingForProjectsInProcess');
const { uploadFiles } = store.getters;
if(uploadFiles.length === 0) {
store.dispatch('toggleLoader', false);
store.dispatch('toggleActionModal');
}
if (lazyLoad) {
if(Projects.length === 0 || Projects.length < payload.limit) {
commit(types.setLazyLoadDetails, { stop: true, showLoader: false });
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false });
}
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false });
}
} else {
store.dispatch('toggleLoader', false);
store.dispatch('toggleActionModal');
}
})
},
sortProjectsByCreatedOn({ commit }, payload) {
commit(types.sortAllProjectsByCreatedOn, payload)
},
optimizeProject({ commit,dispatch }) {
commit(types.setIsApiCallCompletedInOptimizeProject,false);
projects.optimizeProject(state.optimizeProject.projectId ,(success, response)=>{
commit(types.setIsApiCallCompletedInOptimizeProject,true);
if(success && response){
commit(types.setIsApiCallSuccessfullInOptimizeProject,true);
commit(types.setCardsSkeletonLoaderComponentStatus,true);
dispatch('startPollingForProjectsInProcess');
dispatch('checkBuildStats');
}else{
commit(types.setIsApiCallSuccessfullInOptimizeProject,false);
}
})
},
checkBuildStats({ commit, dispatch }) {
projects.checkBuildStats(state.optimizeProject.projectId, (success, response) => {
if(success) {
const { Stage, IsCompleted, Error } = response;
if(IsCompleted) {
return;
}
if(Stage >= 0 && Stage < 9) {
setTimeout(() => {
dispatch('checkBuildStats');
}, 1000)
}
if(Stage < 0) {
if(isProdEnv) {
dispatch('triggerEmail', {
To: [coreEmailId],
Subject: `[URGENT] build failed for ${store.state.app.UserEmail} on ${new Date().toLocaleString()}`,
EmailBody: `Project Name: ${state.optimizeProject.projectName}
<br />Project Id: ${state.optimizeProject.projectId}
<br />Developer Phone: ${store.state.user.user.PhoneNumber}
<br /><pre>${JSON.stringify(Error, null, 2)}</pre>`
});
}
commit(types.setOptimizeErrorStatus, true);
}
} else {
//TODO handler
console.log(response);
}
})
},
checkProjectCanBeDeletedOtNot({ commit,dispatch }) {
projects.checkProjectIsPublishedOrNot(store.state.app.deleteProject.projectId,(success,published)=>{
if (success) {
if (!published) {
dispatch('deleteProject');
}
else {
commit(types.setIsProjectPublishedInDeleteProject,true);
}
}
else {
dispatch('showToastr', {
isError : true,
title: 'Error',
message: 'error while checking project status.'
})
}
})
},
showToastr({ dispatch }, payload) {
const { toasterTypes } = store.state.toastr;
let toastrMessage = {
type : '',
title: '',
message : ''
};
if (payload.isError) {
toastrMessage.type = toasterTypes.ERROR;
} else {
toastrMessage.type = toasterTypes.SUCCESS;
}
toastrMessage.title = payload.title ? payload.title : "";
toastrMessage.message = payload.message ? payload.message : "";
dispatch('addToaster', toastrMessage);
},
deleteProject({ commit,dispatch }) {
projects.deleteProject(store.state.app.deleteProject.projectId,(success, res)=>{
let toasterTypes = store.state.toastr.toasterTypes;
if(success && res){
commit(types.setDeleteProjectComponentStatus, false);
commit(types.updateOverlayComponent, false);
dispatch('getAllProjects');
commit(types.setCardsSkeletonLoaderComponentStatus,true);
dispatch('addToaster',{
type : toasterTypes.SUCCESS,
title : 'project successfully deleted',
message: store.state.app.deleteProject.projectName
});
}
else {
dispatch('addToaster',{
type : toasterTypes.ERROR,
title : 'project deletetion failed',
message: store.state.app.deleteProject.projectName
});
}
})
},
setUploadDetails ({ commit }, payload) {
commit(types.setUploadDetails, payload)
},
checkEligibleForPublishingAndGetCustomerList({ commit,dispatch }){
let userBalance = store.getters.getUserNetBalance;
if(userBalance >= 100){
commit(types.setEligibleForPublish,true);
dispatch('getCustomerList');
}else{
commit(types.setEligibleForPublish,false);
}
},
getCustomerList ({ commit, dispatch }, payload) {
if (!payload || !payload.limit) {
commit(types.setIsGettingListOfCustomersForPublishProject, true);
}
payload = {
...payload,
projectId: state.publishProject.projectId
};
projects.customerListForProject(payload, (success, response) => {
if (!payload || !payload.limit) {
commit(types.setIsGettingListOfCustomersForPublishProject, false);
}
if (success) {
commit(types.setCustomerListInProjectDetails, response.Websites);
if(response.Websites.length === 0 || response.Websites.length < payload.limit) {
commit(types.setLazyLoadDetails, { stop: true, showLoader: false });
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false });
}
} else {
dispatch('showToastr', {
isError: true,
title : 'ERROR',
message : 'error while fetching customer list.'
})
}
})
},
setPublishProjectDetails ( { commit }, payload ) {
commit(types.setPublishProjectDetails, payload);
},
setPreviewProjectDetails( { commit }, payload ) {
commit(types.setProjectIdInProjectPreview, payload)
},
publish({ dispatch }){
const { isNewCustomer } = state.publishProject;
if(isNewCustomer){
dispatch('createNewCustomer');
}
else {
dispatch('publishNewProject');
}
},
createNewCustomer ({ commit, dispatch }) {
commit(types.setIsCreatingNewCustomerInPublishProject, true);
const { newCustomerDetails, cloudSelected } = state.publishProject;
const payload = {
...newCustomerDetails,
CloudProviderType: cloudSelected
};
projects.createCustomer(payload, (success, response)=>{
commit(types.setIsCreatingNewCustomerInPublishProject, false);
if(success){
commit(types.setIsCustomerCreationErrorForPublishProject, false);
commit(types.setCustomerForPublishProject, { WebsiteId : response });
dispatch('publishNewProject');
}
else {
commit(types.setIsCreatingNewCustomerInPublishProject, true);
commit(types.setIsAPIErrorInPublishProject, true);
commit(types.setIsCustomerCreationErrorForPublishProject, true);
dispatch('showToastr', {
isError : true,
title: 'error',
message: 'error while creating customer.'
})
}
});
},
publishNewProject ( { commit,dispatch }) {
commit(types.setIsPublishingForPublishProject, true);
commit(types.setStageForPublishProject, 0);
commit(types.setCardsSkeletonLoaderComponentStatus, true);
projects.publishProject(state.publishProject.customerForPublishing.WebsiteId,(success, response)=>{
if(success && !response.IsError){
setTimeout(() => {
dispatch('getAllProjects')
}, 2000);
setTimeout(() => {
dispatch('getAllLiveProjects')
}, 4000)
if(isProdEnv) {
const publishUrl = state.publishProject.isNewCustomer
? `${state.publishProject.newCustomerDetails.WebsiteTag}.getkitsune.com`
: `${state.publishProject.customerForPublishing.WebsiteDomain}`;
dispatch('triggerEmail', {
To: [coreEmailId],
Subject: `publish success for ${store.state.app.UserEmail} on ${new Date().toLocaleString()}`,
EmailBody: `Project Name: ${state.publishProject.projectName}
<br>Project Id: ${state.publishProject.projectId}
<br>Publish Url: ${publishUrl}`
})
}
}else{
commit(types.setIsAPIErrorInPublishProject, true);
commit(types.setCardsSkeletonLoaderComponentStatus, false);
const { Message : message } = response;
dispatch('showToastr',{
isError : true,
title : 'error in pubishing',
message : message
});
}
});
},
getPendingDomains({ commit }) {
return new Promise( (resolve, reject) => {
projects.getPendingDomains(state.dnsDetails.websiteId, (success, response) => {
if(success) {
resolve();
commit(types.setPendingDomains, response.RequestedDomains)
}else {
reject();
//TODO error handler
}
})
})
},
updateDomain({ commit }, payload) {
return new Promise((resolve, reject) => {
projects.updateDomain(payload, (success, response) => {
if(success) {
resolve();
}else {
reject();
//TODO error handler
}
})
})
},
setStageForPublishing({ commit }, payload) {
commit(types.setStageForPublishProject,payload);
},
resetStoreForPublishingProject ({ commit }) {
commit(types.resetStoreForPublishProject);
},
setCustomerForPublishingProject ({ commit },payload) {
commit(types.setCustomerForPublishProject,payload);
},
setDomainNameForNewCustomerInPublishProject ({ commit },payload) {
commit(types.setDomainNameForNewCustomerInPublishProject,payload);
},
setIsNewCustomerInPublishProject ({ commit },payload) {
commit(types.setIsNewCustomerInPublishProject,payload);
},
setCustomerDetailsForNewCustomerInPublishProject ({ commit }, payload) {
commit(types.setCustomerDetailsForNewCustomerInPublishProject,payload);
},
setIsCreatingNewCustomerInPublishProject ({ commit },payload) {
commit(types.setIsCreatingNewCustomerInPublishProject,payload);
},
getAllLiveProjects({ commit, dispatch }, payload) {
commit(types.setLazyLoadDetails, { stop: false, showLoader: true });
projects.getAllLiveProjects(payload, (success, response) => {
if(success) {
const { PageSize, TotalCount, LiveWebsites } = response;
const lazyLoad = payload && payload.lazyLoad;
if(lazyLoad) {
if(TotalCount < PageSize || LiveWebsites.length === 0) {
commit(types.setLazyLoadDetails, { stop: true, showLoader: false });
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false });
}
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false })
}
commit(types.getListOfAllLiveProjects, { payload: LiveWebsites, lazyLoad });
} else {
commit(types.setLazyLoadDetails, { stop: false, showLoader: false });
dispatch('showToastr', {
isError: true,
title : 'error',
message: 'error while fetching live sites.'
})
}
})
},
requestProjectDownloadLink ({ commit,dispatch },payload){
let project = state.Projects.filter(function(project){
return project.ProjectId == state.downloadProject.projectId
})[0];
let processProject = {
project,
status: 10
}
commit(types.setProjectStatus, processProject)
commit(types.setIsLinkRequestedInDownloadProject,true);
projects.getProjectDownlink({ projectId : state.downloadProject.projectId },(success,response)=>{
commit(types.setIsLinkRequestedInDownloadProject,false);
if(success && !response.IsError){
if(response.DownloadUrl != null || response.DownloadUrl != undefined){
commit(types.setDownloadLinkInDownloadProject,response.DownloadUrl);
dispatch('downloadURI',state.downloadProject.downloadLink);
}
else{
commit(types.setIsPollingInDownloadProject,true);
dispatch('getRequestedProjectDownloadLinkStatus');
}
//TODO HANDLE ERROR
} else {
let processProject = {
project,
status: 0
};
commit(types.setProjectStatus, processProject)
}
})
},
setProjectIdInDownloadProject ({ commit },payload){
commit(types.setProjectIdInDownloadProject,payload);
},
getRequestedProjectDownloadLinkStatus ( { commit, dispatch },payload ){
projects.getProjectDownlinkStatus({ projectId : state.downloadProject.projectId },(success,response)=>{
if(success){
if(response.LinkUrl != null){
commit(types.setDownloadLinkInDownloadProject,response.LinkUrl);
commit(types.setIsPollingInDownloadProject,false);
dispatch('downloadURI',state.downloadProject.downloadLink);
}else{
setTimeout(() => {
dispatch('getRequestedProjectDownloadLinkStatus');
}, 3000)
}
}
})
},
startPollingForProjectsInProcess({ commit,dispatch }){
if(!state.IsPollingForProjectsInProcess){
commit(types.setIsPollingForProjectsInProcess,true);
dispatch('getAllProjectsUnderProcessing');
}
},
getAllProjectsUnderProcessing({ commit,dispatch }) {
projects.getProjectsInProcess((success,response)=>{
commit(types.setPollingCompletedForBaseApiCalls,true);
if(success){
dispatch('updateAllProjectsForCurrentStage',response);
}
else{
//TODO error handler
}
})
},
updateAllProjectsForCurrentStage({ commit,dispatch }, payload){
const projects = payload.Projects;
let projectDownloadStatus = state.projectStatus.PREPARINGDOWNLOAD;
commit(types.setCardsSkeletonLoaderComponentStatus, false);
if(projects.length != 0){
for(let i=0; i<state.Projects.length;i++){
let projectInProcess = state.Projects[i];
let project = projects.find((pro)=>{
return pro.ProjectId == projectInProcess.ProjectId;
})
if(project != undefined){
commit(types.setProjectStatus,{ project : projectInProcess, status : project.ProjectStatus });
switch(project.ProjectStatus){
case state.projectStatus.BUILDING.key:
commit(types.setProjectStage,{ project: projectInProcess, stage: project.BuildStage })
break;
case state.projectStatus.CRAWLING.key:
commit(types.setProjectStage,{ project: projectInProcess, stage: project.CrawlStage })
break;
default:
commit(types.setProjectStage, { project: projectInProcess, stage: null } )
break;
}
}
else{
if(projectInProcess.ProjectStatus != projectDownloadStatus.key &&
projectInProcess.ProjectStatus != projectDownloadStatus.value){
commit(types.setProjectStatus,{ project : projectInProcess, status : state.projectStatus.IDLE.value });
}
}
}
setTimeout(() => {
dispatch('getAllProjectsUnderProcessing')
}, 3000)
}
else {
commit(types.setIsPollingForProjectsInProcess,false);
dispatch('resetStagesOfAllProjects');
}
},
resetStagesOfAllProjects({ commit }){
let projects = state.Projects;
let projectDownloadStatus = state.projectStatus.PREPARINGDOWNLOAD;
for(let i=0; i<projects.length;i++){
let currentProject = projects[i];
if(currentProject.ProjectStatus != projectDownloadStatus.key &&
currentProject.ProjectStatus != projectDownloadStatus.value)
commit(types.setProjectStatus, { project: currentProject, status: state.projectStatus.IDLE.value });
}
},
setProjectIdAndNameForOptimizeProjectAndStartBuild({ commit,dispatch }, payload) {
commit(types.setProjectIdAndNameInOptimizeProject,payload);
dispatch('optimizeProject');
},
getCustomerDetails({ commit }){
commit(types.setIsFetchingCustomerDetailsInCustomerDetails,true);
projects.getCustomerDetails({ websiteId : state.customerDetails.customerId }, (success,response)=>{
commit(types.setIsFetchingCustomerDetailsInCustomerDetails,false);
if(success){
commit(types.setAreDetailsFetchedSuccessfullyInCustomerDetails,true);
commit(types.setCustomerInCustomerDetails,response);
}else{
commit(types.setAreDetailsFetchedSuccessfullyInCustomerDetails,false);
}
})
},
setCustomerIdForCustomerdetails({ commit },payload){
commit(types.resetCustomerDetails);
commit(types.setCustomerIdInCustomerDetails,payload);
},
getMapDomainMappingAndMap({ commit }){
commit(types.setIsFetchingDetailsInVerifyDomain,true);
projects.checkAndMapDomain({ customerId : state.dnsDetails.websiteId },(success,response)=>{
commit(types.setIsFetchingDetailsInVerifyDomain,false);
if(success){
commit(types.setdomainDetailsInVerifyDomain,response);
commit(types.setIsApiCallSuccessInVerifyDomain,true);
}
else{
commit(types.setIsApiCallSuccessInVerifyDomain,false);
}
})
},
setCustomerIdForVerifyDomain({ commit },payload){
commit(types.resetVerifyDomain);
commit(types.setcustomerIdInVerifyDomain,payload);
},
setWebsiteIdInDnsDetails({ commit },payload){
commit(types.setWebsiteIdInDnsDetails,'');
commit(types.setWebsiteIdInDnsDetails,payload);
},
setProjectSearchQueryInProjects({ commit },payload){
commit(types.setProjectSearchQueryInProjects,payload);
},
getCustomerListForProjectPreview ({ commit, dispatch }){
commit(types.setIsFetchingDetailsInProjectPreview,true);
projects.customerListForProject(state.previewProject.projectId,(success,response)=>{
commit(types.setIsFetchingDetailsInProjectPreview,false);
if(success){
const { Customers } = response;
commit(types.setCustomerListInProjectPreview, Customers);
dispatch('showPreviewModal', true);
}else {
// TODO error handler
}
})
},
checkIfWebsiteTagExists({ commit }, payload) {
return new Promise((resolve, reject) => {
projects.checkIfWebsiteTagExists(payload, (success, response) => {
if(success) {
resolve(response);
}else {
reject(response);
// TODO error handler
}
})
})
},
setOptimizeErrorStatus({ commit }, payload) {
commit(types.setOptimizeErrorStatus, payload);
},
setRenameProject({ commit }, payload) {
commit(types.setProjectForRenaming, payload);
},
renameProject({ dispatch, commit }, payload) {
commit(types.apiCallProgressStatus, true);
return new Promise((resolve, reject) => {
projects.renameProject(payload, (success, response) => {
if(success) {
const { ProjectName } = payload;
dispatch('showToastr', {
isError: false,
title : 'project renamed',
message : `'${ProjectName}' successfully renamed.`
});
commit(types.setCardsSkeletonLoaderComponentStatus, true);
dispatch('getAllProjects');
resolve();
} else {
dispatch('showToastr', {
isError: true,
title : 'ERROR',
message : 'error while renaming project'
});
reject(response);
}
commit(types.apiCallProgressStatus, false);
})
});
},
setSelectedCloud({ commit }, payload) {
commit(types.setSelectedCloud, payload);
},
setAliCloudDetails({ commit }, payload) {
commit(types.setAliCloudDetails, payload);
},
setGCPDetails({ commit }, payload) {
commit(types.setGCPDetails, payload);
},
getKAdminUrl({ commit }, payload) {
return new Promise((resolve, reject) => {
projects.getKAdminUrl(payload, (success, response) => {
success ? resolve(response) : reject(response);
})
})
},
publishToCloud({ dispatch }, payload) {
const { isNewCustomer } = state.publishProject;
if(isNewCustomer) {
projects.publishToCloud(payload, (success, response) => {
if(success) {
dispatch('publish');
const { cloudSelected, aliCloudDetails, newCustomerDetails } = store.state.projects.publishProject;
if(Object.keys(aliCloudDetails).length && cloudSelected == cloudOptionsMap.get(1)) {
if(isProdEnv) {
let data = { ...aliCloudDetails, ...newCustomerDetails };
dispatch('triggerEmail', {
To: mailIdsForClouds[cloudSelected],
Subject: `cloud details entered by ${store.state.app.UserEmail}`,
EmailBody: `<pre>
${JSON.stringify(data, null, 2)}
</pre>`
})
}
}
} else {
dispatch('showToastr', {
isError: true,
title : 'error while publishing',
message : `could not publish to ${payload.provider}`
});
}
})
} else {
dispatch('publish');
}
},
getCloudProviderDetails({ commit }, payload) {
projects.getCloudProviderDetails(payload, (success, response) => {
if(success && response.data) {
commit(types.setSelectedCloud, cloudOptionsMap.get(response.data.provider));
commit(types.setCloudDetailsPresent, true);
} else {
commit(types.setCloudDetailsPresent, false);
}
})
},
resetCloudDetails({ commit }) {
commit(types.resetCloudDetails);
},
getGCPTokenGeneratorUrl({ commit }) {
projects.getUrlForGCPTokenGeneration((success, { auth }) => {
if(success) {
commit(types.gcpTokenGeneratorUrl, auth);
}
})
},
setGCPCredsFile({ commit }, payload) {
commit(types.setGCPCredsFile, payload);
},
setChooseOwnAccount({ commit }, payload) {
commit(types.setChooseOwnAccount, payload);
},
setDeactivateSiteDetails({ commit }, payload) {
commit(types.setDeactivateSiteDetails, payload);
},
deactivateSite({ commit, dispatch }, event) {
commit(types.apiCallProgressStatus, true);
const { websiteIds, name } = state.deactivateProject;
projects.deactivateSite(websiteIds, (success, response) => {
commit(types.apiCallProgressStatus, false);
dispatch('toggleStatus', { components: ['overlay', 'deactivateSite'], event });
if(success) {
dispatch('getAllLiveProjects');
dispatch('showToastr', {
isError : false,
title: `${name}`,
message: 'successfully deactivated the site.'
})
} else {
dispatch('showToastr', {
isError : true,
title: `${name}`,
message: 'unable to deactivate the site.'
})
}
})
},
setDomainForCloud({ commit }, payload) {
commit(types.setDomainForCloud, payload);
},
setShowNotificationLiveSite({ commit }, payload) {
commit(types.setShowNotificationLiveSite, payload);
}
};
// mutations
const mutations = {
[types.getListOfAllProjects] (state, { payload, lazyLoad }) {
state.Projects = lazyLoad ? [...state.Projects, ...payload] : payload;
},
[types.getListOfAllLiveProjects] (state, { payload, lazyLoad }) {
state.liveProjects = lazyLoad ? [...state.liveProjects, ...payload] : payload;
},
[types.sortAllProjectsByCreatedOn] (state, payload) {
if(payload)
state.Projects.sort( (a,b)=> {return new Date(a.CreatedOn).getTime() - new Date(b.CreatedOn).getTime();} );
else
state.Projects.sort( (a,b)=> {return new Date(b.CreatedOn).getTime() - new Date(a.CreatedOn).getTime();} );
},
[types.setUploadDetails] (state, { details, totalFiles, resetFlag }) {
state.uploadDetails = resetFlag ? ({
projectId: '',
isUploading: false,
totalFiles: 0,
filesUploaded: 0,
filesFailed: []
}) : { ...state.uploadDetails, ...details, totalFiles };
},
[types.setDeleteProjectComponentStatus] (state,payload) {
store.state.app.componentIsActive.deleteproject = payload;
},
[types.setPublishProjectDetails] (state,payload) {
state.publishProject.projectId = payload.projectId;
state.publishProject.projectName = payload.projectName;
},
[types.setProjectIdInProjectPreview] (state, payload) {
state.previewProject.projectId = payload.projectId;
state.previewProject.projectName = payload.projectName;
},
[types.setIsGettingListOfCustomersForPublishProject] (state,payload) {
state.publishProject.isGettingCustomerList = payload;
},
[types.setIsFetchingDetailsInProjectPreview] (state, payload) {
state.previewProject.isFetchingDetails = payload;
},
[types.setCustomerListInProjectDetails] (state,payload) {
state.publishProject.customerList = [
...state.publishProject.customerList,
...payload
];
},
[types.setCustomerListInProjectPreview] (state, payload) {
state.previewProject.customerList = payload;
},
[types.setIsPublishingForPublishProject] (state,payload) {
state.publishProject.isPublishing = payload;
},
[types.setCustomerForPublishProject] (state, payload){
state.publishProject.customerForPublishing = payload;
},
[types.setStageForPublishProject] (state,payload){
state.publishProject.stage = payload;
},
[types.setDomainNameForNewCustomerInPublishProject] (state,payload){
const { subDomain, domain } = payload;
state.publishProject.newCustomerDetails =
{ ...state.publishProject.newCustomerDetails, WebsiteTag: subDomain, Domain: domain };
},
[types.setPendingDomains] (state, payload) {
state.dnsDetails.pendingDomains = payload;
},
[types.setIsNewCustomerInPublishProject] (state,payload){
state.publishProject.isNewCustomer = payload;
},
[types.setCustomerDetailsForNewCustomerInPublishProject] (state, payload){
let developerId = axios.defaults.headers.common['Authorization'];
state.publishProject.newCustomerDetails.FullName = payload.customerName;
state.publishProject.newCustomerDetails.Email = payload.customerEmail;
state.publishProject.newCustomerDetails.ProjectId = state.publishProject.projectId;
state.publishProject.newCustomerDetails.PhoneNumber = payload.phoneNumber;
state.publishProject.newCustomerDetails.DeveloperId = developerId;
},
[types.resetStoreForPublishProject] (state){
const { publishProject } = state;
publishProject.stage = 0;
publishProject.isPublishing = false;
publishProject.customerForPublishing = '';
publishProject.customerList = [];
publishProject.isGettingCustomerList = false;
publishProject.projectName = '';
publishProject.projectId = '';
publishProject.isCreatingCustomer = false;
publishProject.isNewCustomer = false;
publishProject.newCustomerDetails.DeveloperId = '';
publishProject.newCustomerDetails.Email = '';
publishProject.newCustomerDetails.PhoneNumber = '';
publishProject.newCustomerDetails.FullName = '';
publishProject.newCustomerDetails.ProjectId = '';
publishProject.newCustomerDetails.WebsiteTag = '';
publishProject.isAPIError = false;
publishProject.isEligibleForPublishing = false;
publishProject.isCustomerCreationError = false;
},
[types.setIsCreatingNewCustomerInPublishProject] (state,payload) {
state.publishProject.isCreatingCustomer = payload;
},
[types.setIsAPIErrorInPublishProject] (state,payload){
state.publishProject.isAPIError = payload;
},
[types.setProjectStatus] (state,payload){
Vue.set(payload.project, 'ProjectStatus' , payload.status);
},
[types.setProjectStage] (state,payload){
Vue.set(payload.project, 'ProjectStage' , payload.stage);
},
[types.setIsPollingForProjectsInProcess] (state,payload) {
state.IsPollingForProjectsInProcess = payload;
},
[types.setPollingCompletedForBaseApiCalls] (payload) {
store.state.app.baseApiCalls.pollingProjectsInProcess = payload;
},
[types.setIsLinkRequestedInDownloadProject] (state,payload){
state.downloadProject.isLinkRequested = payload;
},
[types.setProjectIdInDownloadProject] (state,payload){
state.downloadProject.projectId = payload;
},
[types.setIsPollingInDownloadProject] (state,payload){
state.downloadProject.isPolling = payload;
},
[types.setDownloadLinkInDownloadProject] (state,payload){
state.downloadProject.downloadLink = payload;
},
[types.setIsApiCallErrorInDownloadProject] (state,payload){
state.downloadProject.isApiCallError = payload;
},
[types.setProjectIdAndNameInOptimizeProject] (state,payload) {
state.optimizeProject.projectId = payload.projectId;
state.optimizeProject.projectName = payload.projectName;
},
[types.setIsApiCallCompletedInOptimizeProject] (state,payload) {
state.optimizeProject.isApiCallCompleted = payload;
},
[types.setIsApiCallSuccessfullInOptimizeProject] (state,payload) {
state.optimizeProject.isApiCallSuccess = payload;
},
[types.setOptimizeProjectInComponentStatus] (state,payload){
store.state.app.componentIsActive.optimizeProject = payload;
},
[types.setCustomerInCustomerDetails] (state, payload) {
state.customerDetails.customer = payload;
},
[types.setIsFetchingCustomerDetailsInCustomerDetails] (state,payload){
state.customerDetails.isFetchingCustomerDetails = payload;
},
[types.setAreDetailsFetchedSuccessfullyInCustomerDetails] (state,payload){
state.customerDetails.areDetailsFetchedSuccessfully = payload;
},
[types.setCustomerIdInCustomerDetails] (state,payload){
state.customerDetails.customerId = payload;
},
[types.resetCustomerDetails] (state) {
state.customerDetails.customerId = '';
state.customerDetails.areDetailsFetchedSuccessfully = false;
state.customerDetails.isFetchingCustomerDetails = false;
state.customerDetails.customer = {};
},
[types.setcustomerIdInVerifyDomain] (state,payload){
state.verifyDomain.customerId = payload;
},
[types.setdomainDetailsInVerifyDomain] (state,payload){
state.verifyDomain.domainDetails = payload;
},
[types.setIsApiCallSuccessInVerifyDomain] (state,payload){
state.verifyDomain.isApiCallSuccess = payload;
},
[types.setIsFetchingDetailsInVerifyDomain] (state,payload){
state.verifyDomain.isFetchingDetails = payload;
},
[types.resetVerifyDomain] (state){
state.verifyDomain.isFetchingDetails = false;
state.verifyDomain.isApiCallSuccess = false;
state.verifyDomain.customerId = '';
state.verifyDomain.domainDetails = {};
},
[types.setWebsiteIdInDnsDetails] (state,payload){
state.dnsDetails.websiteId = payload;
},
[types.setEligibleForPublish] (state,payload){
state.publishProject.isEligibleForPublishing = payload;
},
[types.setProjectSearchQueryInProjects] (state,payload){
state.projectSearchQuery = payload;
},
[types.setIsCustomerCreationErrorForPublishProject] (state, payload) {
state.publishProject.isCustomerCreationError = payload;
},
[types.setOptimizeErrorStatus] (state, payload) {
state.optimizationError = payload;
},
[types.setProjectForRenaming] (state, payload) {
state.renameProject = payload;
},
[types.apiCallProgressStatus] (state, payload) {
state.isApiCallInProgress = payload;
},
[types.addFailedFiles] (state, payload) {
state.uploadDetails.filesFailed.push(payload);
},
[types.setUploadedFilesCount] (state, payload) {
state.uploadDetails.filesUploaded = payload ? payload : ++state.uploadDetails.filesUploaded;
},
[types.setSelectedCloud] (state, payload) {
state.publishProject.cloudSelected = payload;
},
[types.setAliCloudDetails] (state, payload) {
state.publishProject.aliCloudDetails = { ...payload }
},
[types.setGCPDetails] (state, payload) {
state.publishProject.gcpCloudDetails = { ...payload };
},
[types.setCloudDetailsPresent] (state, payload) {
state.publishProject.cloudDetailsPresent = payload;
},
[types.resetCloudDetails] (state) {
state.publishProject.aliCloudDetails = {};
state.publishProject.cloudDetailsPresent = false;
state.publishProject.cloudSelected = '';
state.publishProject.gcpCloudDetails = {};
state.publishProject.gcpTokenGeneratorUrl = '';
state.publishProject.gcpCredsFile = null;
state.publishProject.chooseOwnAccount = null;
},
[types.gcpTokenGeneratorUrl] (state, payload) {
state.publishProject.gcpTokenGeneratorUrl = payload
},
[types.setGCPCredsFile] (state, payload) {
state.publishProject.gcpCredsFile = payload;
},
[types.setChooseOwnAccount] (state, payload) {
state.publishProject.chooseOwnAccount = payload;
},
[types.setLazyLoadDetails] (state, payload) {
state.lazyLoad = { ...payload };
},
[types.setDeactivateSiteDetails] (state, payload) {
state.deactivateProject = payload;
},
[types.setDomainForCloud] (state, payload) {
state.publishProject.domainForCloud = payload;
},
[types.setShowNotificationLiveSite] (state, payload) {
state.showNotificationLiveSite = payload;
}
};
export default {
state,
getters,
actions,
mutations
}
|
# extract all emails from a given text
text = "My email is example@example.com and my friend's email is friend@example.net"
regex = /\S+@\S+\.\S+/
emails = text.scan(regex)
puts emails |
#!/bin/bash
if [[ $PACKER_BUILDER_TYPE =~ amazon-ebs ]]; then
echo "==> Amazon EBS build. Exiting desktop.sh"
exit
fi
if [[ ! "$DESKTOP" =~ ^(true|yes|on|1|TRUE|YES|ON])$ ]]; then
echo "==> Not a desktop build. Exiting desktop.sh"
exit
fi
SSH_USER=${SSH_USERNAME:-vagrant}
echo "==> Checking version of Ubuntu"
. /etc/lsb-release
echo "==> Installing ubuntu-desktop"
apt-get install -y ubuntu-desktop
USERNAME=${SSH_USER}
LIGHTDM_CONFIG=/etc/lightdm/lightdm.conf
GDM_CUSTOM_CONFIG=/etc/gdm3/custom.conf
if [ -f $GDM_CUSTOM_CONFIG ]; then
mkdir -p $(dirname ${GDM_CUSTOM_CONFIG})
> $GDM_CUSTOM_CONFIG
echo "[daemon]" >> $GDM_CUSTOM_CONFIG
echo "# Enabling automatic login" >> $GDM_CUSTOM_CONFIG
echo "AutomaticLoginEnable = true" >> $GDM_CUSTOM_CONFIG
echo "AutomaticLogin = ${USERNAME}" >> $GDM_CUSTOM_CONFIG
fi
if [ -f $LIGHTDM_CONFIG ]; then
echo "==> Configuring lightdm autologin"
echo "[SeatDefaults]" >> $LIGHTDM_CONFIG
echo "autologin-user=${USERNAME}" >> $LIGHTDM_CONFIG
echo "autologin-user-timeout=0" >> $LIGHTDM_CONFIG
fi
if [ -d /etc/xdg/autostart/ ]; then
echo "==> Disabling screen blanking"
NODPMS_CONFIG=/etc/xdg/autostart/nodpms.desktop
echo "[Desktop Entry]" >> $NODPMS_CONFIG
echo "Type=Application" >> $NODPMS_CONFIG
echo "Exec=xset -dpms s off s noblank s 0 0 s noexpose" >> $NODPMS_CONFIG
echo "Hidden=false" >> $NODPMS_CONFIG
echo "NoDisplay=false" >> $NODPMS_CONFIG
echo "X-GNOME-Autostart-enabled=true" >> $NODPMS_CONFIG
echo "Name[en_US]=nodpms" >> $NODPMS_CONFIG
echo "Name=nodpms" >> $NODPMS_CONFIG
echo "Comment[en_US]=" >> $NODPMS_CONFIG
echo "Comment=" >> $NODPMS_CONFIG
fi
|
public class UpperCase {
public static void main(String[] args) {
String str = "This is a test string";
String strUpperCase = str.toUpperCase();
System.out.println("Original String: " + str);
System.out.println("String in Upper Case: " + strUpperCase);
}
} |
#!/bin/sh
# run reactotron
node_modules/.bin/reactotron
|
#!/bin/sh
#
# The MIT License (MIT)
#
# Copyright (c) 2016-present IxorTalk CVBA
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Exit on non-zero return values
set -e
IXORTALK_PROFILE=${IXORTALK_PROFILE:="dev"}
IXORTALK_CONFIG_SERVER_LABEL=${IXORTALK_CONFIG_SERVER_LABEL:="master"}
IXORTALK_CONFIG_SERVER_URL=${IXORTALK_CONFIG_SERVER_URL:="http://ixortalk-config-server:8899/config"}
echo "Executing wget ${IXORTALK_CONFIG_SERVER_URL}/ixortalk.prometheus/${IXORTALK_PROFILE}/${IXORTALK_CONFIG_SERVER_LABEL}/prometheus.yml -O /etc/prometheus/config.yml"
wget ${IXORTALK_CONFIG_SERVER_URL}/ixortalk.prometheus/${IXORTALK_PROFILE}/${IXORTALK_CONFIG_SERVER_LABEL}/prometheus.yml -O /etc/prometheus/config.yml
wget ${IXORTALK_CONFIG_SERVER_URL}/ixortalk.prometheus/${IXORTALK_PROFILE}/${IXORTALK_CONFIG_SERVER_LABEL}/alert.rules.yml -O /etc/prometheus/alert.rules.yml
wget ${IXORTALK_CONFIG_SERVER_URL}/ixortalk.prometheus/${IXORTALK_PROFILE}/${IXORTALK_CONFIG_SERVER_LABEL}/recording.rules.yml -O /etc/prometheus/recording.rules.yml
/bin/prometheus $@
|
const cleanAndSort = (input) => {
// Create new Set to remove duplicates
const integers = new Set(input);
// Sort the Set in ascending order
const sorted = [...integers].sort((a, b) => a - b);
// Return the sorted array
return sorted;
};
let array = [9, 4, 7, 1, 5, 6, 4, 9];
console.log(cleanAndSort(array)); // [1, 4, 5, 6, 7, 9] |
package uk.ac.cam.ch.wwmm.opsin;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static uk.ac.cam.ch.wwmm.opsin.XmlDeclarations.*;
import org.junit.jupiter.api.Test;
public class ComponentGeneration_MiscTest {
@Test()
public void testRejectSingleComponentSaltComponent() {
assertThrows(ComponentGenerationException.class, () -> {
// reject "hydrate"
Element molecule = new GroupingEl(MOLECULE_EL);
Element wordRule = new GroupingEl(WORDRULE_EL);
Element word = new GroupingEl(WORD_EL);
Element root = new GroupingEl(ROOT_EL);
Element group = new TokenEl(GROUP_EL);
group.addAttribute(new Attribute(TYPE_ATR, SIMPLEGROUP_TYPE_VAL));
group.addAttribute(new Attribute(SUBTYPE_ATR, SALTCOMPONENT_SUBTYPE_VAL));
root.addChild(group);
word.addChild(root);
wordRule.addChild(word);
molecule.addChild(wordRule);
processComponents(molecule);
});
}
@Test
public void testNumericallyMultipliedSaltComponent() throws ComponentGenerationException {
Element molecule = new GroupingEl(MOLECULE_EL);
molecule.addChild(new GroupingEl(WORDRULE_EL));
Element wordRule = new GroupingEl(WORDRULE_EL);
Element word = new GroupingEl(WORD_EL);
Element root = new GroupingEl(ROOT_EL);
Element group = new TokenEl(GROUP_EL);
group.addAttribute(new Attribute(TYPE_ATR, SIMPLEGROUP_TYPE_VAL));
group.addAttribute(new Attribute(SUBTYPE_ATR, SALTCOMPONENT_SUBTYPE_VAL));
group.setValue("2hcl");
root.addChild(group);
word.addChild(root);
wordRule.addChild(word);
molecule.addChild(wordRule);
processComponents(molecule);
assertEquals(2, root.getChildCount());
Element multiplier = root.getChild(0);
assertEquals(MULTIPLIER_EL, multiplier.getName());
assertEquals("2", multiplier.getAttributeValue(VALUE_ATR));
assertEquals("2", multiplier.getValue());
Element updatedGroup = root.getChild(1);
assertEquals("hcl", updatedGroup.getValue());
}
private void processComponents(Element parse) throws ComponentGenerationException {
new ComponentGenerator(new BuildState(new NameToStructureConfig())).processParse(parse);
}
}
|
<reponame>giorgosmamakoukas/DerivativeBasedKoopman
from settings import *
seed(1)
# Samples = Number_of_Samples
A = zeros((NKoopman, NKoopman))
G = zeros((NKoopman, NKoopman))
Ps0_list = empty((Samples,NKoopman))
Psi_list = empty((Samples,NKoopman))
for i in range (Samples):
# Sample states
th0 = uniform(-2*pi, 2*pi)
dth0 = uniform(-5, 5)
s0 = [th0, dth0]
u10 = uniform(-5, 5)
# Simulate system forward
sn = odeint(single_pendulum, s0, [0, ts], args=(u10,))
sn = sn[-1,:]
# Evaluate basis functions at t = 0 and t = ts
Ps0_list[i,:] = Psi_k(s0, u10).T
Psi_list[i,:] = Psi_k(sn, u10).T
[Atemp, Gtemp] = A_and_G(s0,sn,u10);
A = A+Atemp
G = G+Gtemp
Kd = dot(A,linalg.pinv2(G)) # more accurate than numpy
print(Kd)
## Measure maximum local (across one time step) errors in Ψ(s_{k+1}) - Kd*Ψ(s_k)
local_errors = empty([Samples, NKoopman])
for i in range(Samples):
local_errors[i,:] = abs(Psi_list[i,:]- dot(Kd,Ps0_list[i,:]))
max_local_errors = amax(local_errors, axis = 0)
print('Max local errors in theta: %.5f and dtheta: %.5f ' % tuple(max_local_errors[0:2]))
# Save trained Koopman and basis functions measurements used to obtain it
io.savemat('Data - Koopman_and_BasisFunctions.mat', {'Kd' : Kd, 'max_local_errors' : max_local_errors}) # save variables to Matlab file
|
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author <NAME>, <NAME>
* @version 1.3
* @date Tue Nov 1 19:12:16 EDT 2016
* @see LICENSE (MIT style license file).
*
* Multi-Graph 'MuGraph' Data Structure Using Mutable Sets
*/
package scalation.graphalytics.multi
import scala.collection.mutable.Map
import scala.collection.mutable.{Set => SET}
//import scala.collection.mutable.{HashSet => SET}
import scala.reflect.ClassTag
import scalation.graphalytics.{Pair, Tree}
import scalation.graphalytics.mutable.{Graph, MGraph, MGraphGen}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MuGraph` class stores vertex/edge-labeled multi-directed graphs using
* an adjacency set 'ch' representation, e.g., 'ch = { {1, 2}, {0}, {1} }' means
* that the graph has the following edges { (0, 1), (0, 2), (1, 0), (2, 1) }.
* Optionally, inverse adjacency via the 'pa' array can be stored at the cost
* of nearly doubling the storage requirements.
*----------------------------------------------------------------------------
* @param ch the array of child (adjacency) vertex sets (outgoing edges)
* @param label the array of vertex labels: v -> vertex label
* @param elabel the map of edge labels: (u, v) -> edge label
* @param inverse whether to store inverse adjacency sets (parents)
* @param name the name of the multi-digraph
* @param schema optional schema: map from label to label type
*/
class MuGraph [TLabel: ClassTag] (ch: Array [SET [Int]],
label: Array [TLabel],
val elabel: Map [Pair, SET [TLabel]],
inverse: Boolean = false,
name: String = "g",
schema: Map [TLabel, String] = null)
extends Graph [TLabel] (ch, label, inverse, name) with Cloneable
{
/** Map from schema label type to set of labels
*/
val schemaMap = if (schema == null) null else buildSchemaMap (schema)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the children of vertex 'u' that are connected via an edge labelled
* 'elab'.
* @param u the source vertex
* @param elab the edge label
*/
def children (u: Int, elab: TLabel): SET [Int] =
{
for (v <- ch(u) if elabel((u, v)) contains elab) yield v
} // children
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the parents of vertex 'v' that are connected via an edge labelled
* 'elab'. Requires the parents 'pa' to be added (@see `Graph`).
* @param v the destination vertex
* @param elab the edge label
*/
def parents (v: Int, elab: TLabel): SET [Int] =
{
for (u <- pa(v) if elabel((u, v)) contains elab) yield u
} // children
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Build the schema map from label type to set of labels.
* @param schema the schema - map of label types: label -> label type
*/
def buildSchemaMap (schema: Map [TLabel, String]): Map [String, SET [TLabel]] =
{
val schMap = Map [String, SET [TLabel]] ()
for ((lab, typ) <- schema) schMap += typ -> (schMap.getOrElse (typ, SET ()) + lab)
schMap
} // buildSchemaMap
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clone (make a deep copy) of 'this' multi-digraph.
*/
override def clone: MuGraph [TLabel] =
{
val ch2 = Array.ofDim [SET [Int]] (ch.length)
val label2 = Array.ofDim [TLabel] (ch.length)
for (i <- ch2.indices) {
ch2(i) = SET (ch(i).toArray: _*)
label2(i) = label(i)
} // for
new MuGraph (ch2, label2, elabel.clone, inverse, name)
} // clone
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether the edges in the 'elabel' map correspond to edges in the
* the adjacency list.
*/
def checkElabels: Boolean =
{
for ((u, v) <- elabel.keys if ! (ch(u) contains v)) {
println (s"checkElabels: no such edge from $u to $v")
return false
} // for
true
} // checkElabels
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Make this multi-directed graph work like an undirected graph by making
* sure that for every edge 'u -> v', there is a 'v -> u' edge and that
* they have same edge label.
*/
override def makeUndirected (): MuGraph [TLabel] =
{
super.makeUndirected ()
val edges = elabel.clone.keys
for ((u, v) <- edges) elabel += (v, u) -> elabel(u, v)
this
} // makeUndirected
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert 'this' multi-digraph to a string in a shallow sense.
* Large arrays are not converted. Use 'print' to show all information.
*/
override def toString: String =
{
s"MuGraph (ch.length = ${ch.length}, label.length = ${label.length}" +
s"elabel.size = ${elabel.size}, inverse = $inverse, name = $name)"
} // toString
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the 'i'th row/line of 'this' multi-digraph to a string.
* @param i the 'i'th row/line
* @param clip whether to clip out "Set(" and ")"
*/
override def toLine (i: Int, clip: Boolean = true): String =
{
val ch_i = ch(i).toString.replace ("Set(", "").replace (")", "")
s"$i, ${label(i)}, $ch_i"
} // toLine
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Print 'this' multi-digraph in a deep sense with all the information.
* @param clip whether to clip out "Set(" and ")"
*/
override def printG (clip: Boolean = true)
{
println (s"MuGraph ($name, $inverse, $size")
for (i <- ch.indices) println (toLine (i))
for ((k, v) <- elabel) println (s"$k -> $v")
println (")")
} // printG
} // MuGraph class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MuGraph` companion object provides builder methods and example query
* multi-digraphs.
*/
object MuGraph
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Build an `MuGraph` from a `Graph`.
* @param gr the base `Graph` for building the `MuGraph`
* @param eLab the edge labels
* @param name the name for the new multi-digraph
*/
def apply [TLabel: ClassTag] (gr: Graph [TLabel], eLab: Map [Pair, SET [TLabel]], name: String): MuGraph [TLabel] =
{
new MuGraph (gr.ch, gr.label, eLab, gr.inverse, name)
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Build an `MuGraph` from a `MGraph`.
* @param mgr the base `MGraph` for building the `MuGraph`
* @param name the name for the new multi-digraph
*/
def apply [TLabel: ClassTag] (mgr: MGraph [TLabel], name: String): MuGraph [TLabel] =
{
new MuGraph (mgr.ch, mgr.label, ν(mgr.elabel), mgr.inverse, name)
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Build an `MuGraph` from a `Tree`.
* @param tree the base `Tree` for building the `MuGraph`
* @param name the name for the new multi-digraph
* @param inverse whether to add parent references
*/
def apply (tree: Tree, name: String = "t", inverse: Boolean = false): MuGraph [Double] =
{
MuGraph (Graph (tree, name, inverse), ν (tree.labelMap), name)
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Collect the label(s) into a set.
* @param label the given label(s)
*/
def ν [TLabel: ClassTag] (label: TLabel*): SET [TLabel] = SET (label:_*)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Collect the labels in the map into sets.
* @param labelMap the initial label map
*/
def ν [TLabel: ClassTag] (labelMap: Map [Pair, TLabel]): Map [Pair, SET [TLabel]] =
{
val vmap = Map [Pair, SET [TLabel]] ()
for ((k, v) <- labelMap) vmap += k -> SET (v)
vmap
} // ν
} // MuGraph object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MuGraphTest` object is used to test the `MuGraph` class using examples
* from the `ExampleMuGraphI` object, which contains multi-digraphs whose vertex
* and edge labels are of type `Int`.
* > run-main scalation.graphalytics.mutable.MuGraphTest
*
object MuGraphTest extends App
{
import ExampleMuGraphI._
g1.printG ()
q1.printG ()
g2.printG ()
q2.printG ()
} // MuGraphTest
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MuGraphTest2` object is used to test the `MuGraph` class using examples
* from the `ExampleMuGraphD` object, which contains multi-digraphs whose vertex
* and edge labels are of type `Double`.
* > run-main scalation.graphalytics.mutable.MuGraphTest2
*/
object MuGraphTest2 extends App
{
import ExampleMuGraphD._
g1.printG ()
q1.printG ()
g2.printG ()
q2.printG ()
} // MuGraphTest2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MuGraphTest3` object is used to test the `MuGraph` class using a
* randomly generated multi-digraph.
* > run-main scalation.graphalytics.mutable.MuGraphTest3
*/
object MuGraphTest3 extends App
{
val mgGen = new MGraphGen [Double]
private val nVertices = 20 // number of vertices
private val nLabels = 5 // number of distinct vertex labels
private val eLabels = 3 // number of distinct edge labels
private val outDegree = 2 // average out degree
private val inverse = false // whether inverse adjacency is used (parents)
private val name = "gr" // name of the graph
val mGraph = mgGen.genRandomGraph (nVertices, nLabels, eLabels, outDegree, inverse, name)
MuGraph (mGraph, "mu" + name).printG ()
} // MuGraphTest3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.