text stringlengths 1 1.05M |
|---|
#!/usr/bin/env sh
set -evx
# Install mongod of specified version, unpack and create a link to the bin
# directory: ${TRAVIS_BUILD_DIR}/t/Travis-ci/MongoDB
version=$1
if [ ! $version ]
then
echo "No version given to go for"
exit 1
fi
echo Installing MongoDB version $1
if [ "${TRAVIS_BUILD_DIR}x" == "x" ]
then
TRAVIS_BUILD_DIR='.'
fi
cd ${TRAVIS_BUILD_DIR}/t/Travis-ci
#/bin/ls -l
if [ ! -e mongodb-linux-x86_64-${version}.tgz ]
then
curl -O https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${version}.tgz
fi
# Only get mongod and mongos server programs
tar xvfz mongodb-linux-x86_64-${version}.tgz mongodb-linux-x86_64-${version}/bin/mongod
tar xvfz mongodb-linux-x86_64-${version}.tgz mongodb-linux-x86_64-${version}/bin/mongos
if [ -e ${version} ]
then
rm -rf ${version}
fi
mv mongodb-linux-x86_64-${version}/bin ${version}
rmdir mongodb-linux-x86_64-${version}
|
import sys
import tensorflow as tf
import tensorflow_addons as tfa
from encoder import Encoder
from decoder import Decoder
import utils
class QG(tf.keras.Model):
PAD = 0
GO = 1
EOS = 2
UNK = 3
def __init__(self, params):
super(QG, self).__init__()
self.vocab_size = params['voca_size']
self.embedding_size = params['embedding_size']
self.hidden_size = params['hidden_size']
self.cell_type = params['cell_type']
self.pre_embedding = params['pre_embedding']
self.embedding_trainable = params['embedding_trainable']
self.enc_type = params['enc_type']
self.num_layer = params['num_layer']
# for loss calculation
self.batch_size = params['batch_size']
self.maxlen_s = params['maxlen_s']
self.maxlen_dec_train = params['maxlen_dec_train']
self.maxlen_dec_dev = params['maxlen_dec_dev'] # for loss calculation
self.rnn_dropout = params['dropout']
self.attn = params['attn']
self.beam_width = params['beam_width']
self.length_penalty_weight = params['length_penalty_weight']
self.encoder = Encoder(pre_embedding=self.pre_embedding, vocab_size=self.vocab_size, embedding_dim=self.embedding_size,
embedding_trainable=self.embedding_trainable, enc_type=self.enc_type,
num_layer=self.num_layer, hidden_size=self.hidden_size,
cell_type=self.cell_type, dropout=self.rnn_dropout, batch_sz=self.batch_size
)
if (self.enc_type == 'bi'):
hidden_size = 2 * self.hidden_size
else:
hidden_size = self.hidden_size
self.decoder = Decoder(pre_embedding=self.pre_embedding, vocab_size=self.vocab_size, embedding_dim=self.embedding_size, enc_type=self.enc_type,
attn_type=self.attn,
beam_width=self.beam_width, length_penalty_weight=self.length_penalty_weight,
num_layer=self.num_layer, dec_units=hidden_size,
cell_type=self.cell_type, dropout=self.rnn_dropout, batch_sz=self.batch_size,
max_length_input=self.maxlen_s, max_length_output=self.maxlen_dec_train, embedding_trainable=self.embedding_trainable)
def call(self, inputs, training=False):
if training:
enc_inp, dec_input = inputs
else: # EVLA/PREDICT
enc_inp = inputs
dec_input = None
print("CALL - model - TRAINING: ", training)
enc_hidden = self.encoder.initialize_hidden_state()
print("CALL - model - enc_inp.shape: ", enc_inp.shape)
enc_output, enc_hidden = self.encoder(
enc_inp, enc_hidden, training=training)
print("CALL - model - enc_hidden: [batch_size, hidden_size]", enc_hidden.shape)
print("CALL - model - enc_output [batch_size, max_time, hidden_size]: ", enc_output.shape)
if training:
# Set the AttentionMechanism object with encoder_outputs
self.decoder.attention_mechanism.setup_memory(enc_output)
else:
# From official documentation
# NOTE If you are using the BeamSearchDecoder with a cell wrapped in AttentionWrapper, then you must ensure that:
# The encoder output has been tiled to beam_width via tfa.seq2seq.tile_batch (NOT tf.tile).
# The batch_size argument passed to the get_initial_state method of this wrapper is equal to true_batch_size * beam_width.
# The initial state created with get_initial_state above contains a cell_state value containing properly tiled final state from the encoder.
enc_out = tfa.seq2seq.tile_batch(
enc_output, multiplier=self.beam_width)
print("CALL - model - enc_out.shape = beam_with * [batch_size, max_length_input, rnn_units] :", enc_out.shape)
self.decoder.attention_mechanism.setup_memory(enc_out)
# Create AttentionWrapperState as initial_state for decoder
pred = self.decoder(dec_input, enc_hidden, start_token=self.GO, end_token=self.EOS, training=training)
return pred
@tf.function
def train_step(self, data):
encoder_inp, targ = data
loss = 0
with tf.GradientTape() as tape:
dec_input = targ[:, :-1] # Ignore <end> token
real = targ[:, 1:] # ignore <start> token
pred = self((encoder_inp, dec_input),
training=True) # Forward pass
print("train_step - pred: ", pred)
loss = self.loss(real, pred)
variables = self.encoder.trainable_variables + self.decoder.trainable_variables
gradients = tape.gradient(loss, variables)
self.optimizer.apply_gradients(zip(gradients, variables))
return {"loss": loss}
# # Add attention wrapper to decoder cell
# self.decoder.set_attention_cell(
# encoder_outputs, enc_input_length, encoder_state, self.enc_layer)
# if not (mode == tf.estimator.ModeKeys.PREDICT and self.beam_width > 0):
# logits = self.decoder.call(
# dec_inputs, self.dec_input_length, self.GO, self.EOS)
# predictions = tf.argmax(input=self.logits, axis=-1)
# else: # Beam decoding
# predictions = self.decoder.call(
# dec_inputs, dec_input_length, self.GO, self.EOS)
# self._calculate_loss(predictions, labels, mode)
# return self._update_or_output(mode)
# def _calculate_length(self, inputs):
# input_length = tf.reduce_sum(
# input_tensor=tf.cast(tf.not_equal(inputs, self.PAD), tf.int32), axis=-1)
# return input_length
# def _calculate_loss(self, dec_inputs, mode):
# if mode == tf.estimator.ModeKeys.PREDICT:
# return
# self.labels = tf.concat([dec_inputs[:, 1:], tf.zeros(
# [self.batch_size, 1], dtype=tf.int32)], 1, name='labels')
# maxlen_label = self.maxlen_dec_train if mode == tf.estimator.ModeKeys.TRAIN else self.maxlen_dec_dev
# current_length = tf.shape(input=self.logits)[1]
# def concat_padding():
# num_pad = maxlen_label - current_length
# padding = tf.zeros(
# [self.batch_size, num_pad, self.voca_size], dtype=self.dtype)
# return tf.concat([self.logits, padding], 1)
# def slice_to_maxlen():
# return tf.slice(self.logits, [0, 0, 0], [self.batch_size, maxlen_label, self.voca_size])
# self.logits = tf.cond(pred=current_length < maxlen_label,
# true_fn=concat_padding,
# false_fn=slice_to_maxlen)
# weight_pad = tf.sequence_mask(
# self.dec_input_length, maxlen_label, self.dtype)
# self.loss = tfa.seq2seq.sequence_loss(
# self.logits,
# self.labels,
# weight_pad,
# average_across_timesteps=True,
# average_across_batch=True,
# softmax_loss_function=None # default : sparse_softmax_cross_entropy
# )
|
#!/bin/bash -eu
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
function compile_fuzzer {
path=$1
function=$2
fuzzer=$3
# Compile and instrument all Go files relevant to this fuzz target.
go-fuzz -func $function -o $fuzzer.a $path
# Link Go code ($fuzzer.a) with fuzzing engine to produce fuzz target binary.
$CXX $CXXFLAGS $LIB_FUZZING_ENGINE $fuzzer.a -o $OUT/$fuzzer
}
compile_fuzzer . Fuzz fuzz_json
|
import { customElement } from '../../../../../../runtime';
@customElement({ name: 'beta', template: `<template>BETA <input> <a href="alpha">Alpha</a></template>` })
export class Beta { }
|
const OPEN_ADD_SYSTEM_MODAL = 'OPEN_ADD_SYSTEM_MODAL';
const SELECT_ACTIVE_TAB = 'SELECT_ACTIVE_TAB';
const SET_SELECTED_SYSTEMS_COMPARISON = 'SET_SELECTED_SYSTEMS_COMPARISON';
const HANDLE_SYSTEM_SELECTION = 'HANDLE_SYSTEM_SELECTION';
const HANDLE_BASELINE_SELECTION = 'HANDLE_BASELINE_SELECTION';
const HANDLE_HSP_SELECTION = 'HANDLE_HSP_SELECTION';
const CLEAR_ALL_SELECTIONS = 'CLEAR_ALL_SELECTIONS';
export default {
OPEN_ADD_SYSTEM_MODAL,
SELECT_ACTIVE_TAB,
SET_SELECTED_SYSTEMS_COMPARISON,
HANDLE_SYSTEM_SELECTION,
HANDLE_BASELINE_SELECTION,
HANDLE_HSP_SELECTION,
CLEAR_ALL_SELECTIONS
};
|
import Dialog from '@material-ui/core/Dialog';
import Box from '@material-ui/core/Box';
import Button from '@material-ui/core/Button';
function UserDeleteDialog(props) {
if (!props.open) {
return null;
}
return <Dialog PaperProps={{className: props.classes.dialogPaper}} open={props.open} onClose={props.onClose}>
<Box className={props.classes.deleteDialog}>
<h2>
{props.t('Do you want to delete user ') + props.user.common.name + '?'}
</h2>
<div>
<Button onClick={()=>props.deleteUser(props.user._id)}>Delete</Button>
<Button onClick={props.onClose}>Cancel</Button>
</div>
</Box>
</Dialog>;
}
export default UserDeleteDialog; |
#!/bin/bash
#
# CIS Debian 7 Hardening
#
#
# 6.14 Ensure SNMP Server is not enabled (Not Scored)
#
set -e # One error, it's over
set -u # One variable unset, it's over
PACKAGES='snmpd'
# This function will be called if the script status is on enabled / audit mode
audit () {
for PACKAGE in $PACKAGES; do
is_pkg_installed $PACKAGE
if [ $FNRET = 0 ]; then
crit "$PACKAGE is installed!"
else
ok "$PACKAGE is absent"
fi
done
}
# This function will be called if the script status is on enabled mode
apply () {
for PACKAGE in $PACKAGES; do
is_pkg_installed $PACKAGE
if [ $FNRET = 0 ]; then
crit "$PACKAGE is installed, purging it"
apt-get purge $PACKAGE -y
apt-get autoremove
else
ok "$PACKAGE is absent"
fi
done
}
# This function will check config parameters required
check_config() {
:
}
# Source Root Dir Parameter
if [ ! -r /etc/default/cis-hardening ]; then
echo "There is no /etc/default/cis-hardening file, cannot source CIS_ROOT_DIR variable, aborting"
exit 128
else
. /etc/default/cis-hardening
if [ -z ${CIS_ROOT_DIR:-} ]; then
echo "No CIS_ROOT_DIR variable, aborting"
exit 128
fi
fi
# Main function, will call the proper functions given the configuration (audit, enabled, disabled)
if [ -r $CIS_ROOT_DIR/lib/main.sh ]; then
. $CIS_ROOT_DIR/lib/main.sh
else
echo "Cannot find main.sh, have you correctly defined your root directory? Current value is $CIS_ROOT_DIR in /etc/default/cis-hardening"
exit 128
fi
|
const db = require('../../data/db-config');
const findAll = async () => {
return await db('books');
};
const findBy = (filter) => {
return db('books').where(filter);
};
const findById = (id) => {
return db('books').where({ id }).first();
};
const create = async (book) => {
const [id] = await db('books').insert(book).returning('id');
return findById(id);
};
const update = (id, book) => {
return db('books').where({ id: id }).first().update(book).returning('*');
};
const remove = async (id) => {
return await db('books').where({ id }).del();
};
module.exports = { findAll, findBy, findById, create, update, remove };
|
#!/bin/bash
# Simple script just to generate the initial template, which
# 1. creates IAM Users
# 2. assigns IAM User to the same existing IAM Group(s)
# 3. creates IAM Access Key and Secret for each IAM User
OUTPUT_FILE=tmp_users.template
declare -a USERS=(
"Firstname Lastname"
)
# Add Header
cat > ${OUTPUT_FILE} << EOF
AWSTemplateFormatVersion: '2010-09-09'
Description: >-
Create IAM Users and assign users to the corresponding IAM Groups.
Resources:
EOF
# Add Resources
for user in "${USERS[@]}"
do
echo "Adding $user"
name_no_space=${user//[ ]/}
user_name=$(echo "print('${user//[ ]/.}'.lower())" | python)
cat >> ${OUTPUT_FILE} << EOF
${name_no_space}IamUser:
Type: AWS::IAM::User
Properties:
UserName: ${user_name}
Groups:
- Developers
${name_no_space}IamAccessKey:
DependsOn: ${name_no_space}IamUser
Type: AWS::IAM::AccessKey
Properties:
UserName: ${user_name}
EOF
done
# Add Outputs
echo "Outputs:" >> ${OUTPUT_FILE}
for user in "${USERS[@]}"
do
name_no_space=${user//[ ]/}
user_name=$(echo "print('${user//[ ]/.}'.lower())" | python)
cat >> ${OUTPUT_FILE} << EOF
${name_no_space}IamAccessKeyId:
Value: !Ref ${name_no_space}IamAccessKey
${name_no_space}IamSecretAccessKey:
Value: !GetAtt ${name_no_space}IamAccessKey.SecretAccessKey
EOF
done
|
<reponame>diegosiqueir4/wmss
package de.wwu.wmss.core;
public class Resource {
private String resourceURL = "";
private String resourceLabel = "";
private String action = "";
private String resourceType = "";
public Resource() {
super();
}
public Resource(String url, String label) {
super();
this.resourceURL = url;
this.resourceLabel = label;
}
public String getUrl() {
return resourceURL;
}
public void setUrl(String url) {
this.resourceURL = url;
}
public String getLabel() {
return resourceLabel;
}
public void setLabel(String label) {
this.resourceLabel = label;
}
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
public String getType() {
return resourceType;
}
public void setType(String type) {
this.resourceType = type;
}
}
|
<filename>AP2/src/teorica/thread/BarraProgresso.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package teorica.thread;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author PauloCésar
*/
public class BarraProgresso extends Thread{
public void inicia(){
for(int i=0; i<1000; i++)
System.out.println("Barra de progresso...");
try {
Thread.sleep(5000);
} catch (InterruptedException ex) {
Logger.getLogger(BarraProgresso.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public void run(){
inicia();
}
}
|
package restauthserver;
import fontysmultipurposelibrary.logging.Logger;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
public class PropertyFileHelper {
private PropertyFileHelper() {}
public static String getDbConnectionString() {
Properties properties = new Properties();
String fileName = "/config.properties";
InputStream in = PropertyFileHelper.class.getClass().getResourceAsStream(fileName);
try {
properties.load(in);
} catch (IOException e) {
Logger.getInstance().log(e);
}
return properties.getProperty("ConnectionString");
}
}
|
<filename>FEBioXML/FEBModel.cpp
/*This file is part of the FEBio source code and is licensed under the MIT license
listed below.
See Copyright-FEBio.txt for details.
Copyright (c) 2020 University of Utah, The Trustees of Columbia University in
the City of New York, and others.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.*/
#include "stdafx.h"
#include "FEBModel.h"
#include <FECore/FEModel.h>
#include <FECore/FECoreKernel.h>
#include <FECore/FEMaterial.h>
#include <FECore/FEDomain.h>
#include <FECore/FEShellDomain.h>
#include <FECore/log.h>
//=============================================================================
FEBModel::NodeSet::NodeSet() {}
FEBModel::NodeSet::NodeSet(const FEBModel::NodeSet& set)
{
m_name = set.m_name;
m_node = set.m_node;
}
FEBModel::NodeSet::NodeSet(const string& name) : m_name(name) {}
void FEBModel::NodeSet::SetName(const string& name) { m_name = name; }
const string& FEBModel::NodeSet::Name() const { return m_name; }
void FEBModel::NodeSet::SetNodeList(const vector<int>& node) { m_node = node; }
const vector<int>& FEBModel::NodeSet::NodeList() const { return m_node; }
//=============================================================================
FEBModel::ElementSet::ElementSet() {}
FEBModel::ElementSet::ElementSet(const FEBModel::ElementSet& set)
{
m_name = set.m_name;
m_elem = set.m_elem;
}
FEBModel::ElementSet::ElementSet(const string& name) : m_name(name) {}
void FEBModel::ElementSet::SetName(const string& name) { m_name = name; }
const string& FEBModel::ElementSet::Name() const { return m_name; }
void FEBModel::ElementSet::SetElementList(const vector<int>& elem) { m_elem = elem; }
const vector<int>& FEBModel::ElementSet::ElementList() const { return m_elem; }
//=============================================================================
FEBModel::SurfacePair::SurfacePair() {}
FEBModel::SurfacePair::SurfacePair(const SurfacePair& surfPair)
{
m_name = surfPair.m_name;
m_primary = surfPair.m_primary;
m_secondary = surfPair.m_secondary;
}
const string& FEBModel::SurfacePair::Name() const { return m_name; }
//=============================================================================
FEBModel::Domain::Domain()
{
m_defaultShellThickness = 0.0;
}
FEBModel::Domain::Domain(const FEBModel::Domain& dom)
{
m_spec = dom.m_spec;
m_name = dom.m_name;
m_matName = dom.m_matName;
m_Elem = dom.m_Elem;
m_defaultShellThickness = dom.m_defaultShellThickness;
}
FEBModel::Domain::Domain(const FE_Element_Spec& spec) : m_spec(spec)
{
m_defaultShellThickness = 0.0;
}
const string& FEBModel::Domain::Name() const { return m_name; }
void FEBModel::Domain::SetName(const string& name) { m_name = name; }
void FEBModel::Domain::SetMaterialName(const string& name) { m_matName = name; }
const string& FEBModel::Domain::MaterialName() const { return m_matName; }
void FEBModel::Domain::SetElementList(const vector<ELEMENT>& el) { m_Elem = el; }
const vector<FEBModel::ELEMENT>& FEBModel::Domain::ElementList() const { return m_Elem; }
//=============================================================================
FEBModel::Surface::Surface() {}
FEBModel::Surface::Surface(const FEBModel::Surface& surf)
{
m_name = surf.m_name;
m_Face = surf.m_Face;
}
FEBModel::Surface::Surface(const string& name) : m_name(name) {}
const string& FEBModel::Surface::Name() const { return m_name; }
void FEBModel::Surface::SetName(const string& name) { m_name = name; }
void FEBModel::Surface::SetFacetList(const vector<FEBModel::FACET>& face) { m_Face = face; }
const vector<FEBModel::FACET>& FEBModel::Surface::FacetList() const { return m_Face; }
//=============================================================================
FEBModel::DiscreteSet::DiscreteSet() {}
FEBModel::DiscreteSet::DiscreteSet(const FEBModel::DiscreteSet& set)
{
m_name = set.m_name;
m_elem = set.m_elem;
}
void FEBModel::DiscreteSet::SetName(const string& name) { m_name = name; }
const string& FEBModel::DiscreteSet::Name() const { return m_name; }
void FEBModel::DiscreteSet::AddElement(int n0, int n1) { m_elem.push_back(ELEM{ n0, n1 } ); }
const vector<FEBModel::DiscreteSet::ELEM>& FEBModel::DiscreteSet::ElementList() const { return m_elem; }
//=============================================================================
FEBModel::Part::Part() {}
FEBModel::Part::Part(const std::string& name) : m_name(name) {}
FEBModel::Part::Part(const FEBModel::Part& part)
{
m_name = part.m_name;
m_Node = part.m_Node;
for (size_t i=0; i<part.m_Dom.size() ; ++i) AddDomain (new Domain (*part.m_Dom[i]));
for (size_t i=0; i<part.m_Surf.size(); ++i) AddSurface(new Surface(*part.m_Surf[i]));
for (size_t i=0; i<part.m_NSet.size(); ++i) AddNodeSet(new NodeSet(*part.m_NSet[i]));
for (size_t i=0; i<part.m_ESet.size(); ++i) AddElementSet(new ElementSet(*part.m_ESet[i]));
for (size_t i = 0; i < part.m_SurfPair.size(); ++i) AddSurfacePair(new SurfacePair(*part.m_SurfPair[i]));
for (size_t i = 0; i < part.m_DiscSet.size(); ++i) AddDiscreteSet(new DiscreteSet(*part.m_DiscSet[i]));
}
FEBModel::Part::~Part()
{
for (size_t i=0; i<m_NSet.size(); ++i) delete m_NSet[i];
for (size_t i=0; i<m_Dom.size(); ++i) delete m_Dom[i];
for (size_t i = 0; i<m_Surf.size(); ++i) delete m_Surf[i];
for (size_t i = 0; i < m_SurfPair.size(); ++i) delete m_SurfPair[i];
for (size_t i = 0; i < m_DiscSet.size(); ++i) delete m_DiscSet[i];
}
void FEBModel::Part::SetName(const std::string& name) { m_name = name; }
const string& FEBModel::Part::Name() const { return m_name; }
void FEBModel::Part::AddNodes(const std::vector<NODE>& nodes)
{
size_t N0 = m_Node.size();
size_t N = nodes.size();
if (N > 0)
{
m_Node.resize(N0 + N);
for (int i=0; i<N; ++i)
{
m_Node[N0 + i] = nodes[i];
}
}
}
FEBModel::Domain* FEBModel::Part::FindDomain(const string& name)
{
for (size_t i = 0; i<m_Dom.size(); ++i)
{
Domain* dom = m_Dom[i];
if (dom->Name() == name) return dom;
}
return 0;
}
void FEBModel::Part::AddDomain(FEBModel::Domain* dom) { m_Dom.push_back(dom); }
void FEBModel::Part::AddSurface(FEBModel::Surface* surf) { m_Surf.push_back(surf); }
FEBModel::Surface* FEBModel::Part::FindSurface(const string& name)
{
for (size_t i = 0; i < m_Surf.size(); ++i)
{
Surface* surf = m_Surf[i];
if (surf->Name() == name) return surf;
}
return nullptr;
}
//=============================================================================
FEBModel::FEBModel()
{
}
FEBModel::~FEBModel()
{
for (size_t i=0; i<m_Part.size(); ++i) delete m_Part[i];
m_Part.clear();
}
size_t FEBModel::Parts() const
{
return m_Part.size();
}
FEBModel::Part* FEBModel::GetPart(int i)
{
return m_Part[i];
}
FEBModel::Part* FEBModel::AddPart(const std::string& name)
{
Part* part = new Part(name);
m_Part.push_back(part);
return part;
}
void FEBModel::AddPart(FEBModel::Part* part)
{
m_Part.push_back(part);
}
FEBModel::Part* FEBModel::FindPart(const string& name)
{
for (size_t i=0; i<m_Part.size(); ++i)
{
Part* p = m_Part[i];
if (p->Name() == name) return p;
}
return 0;
}
bool FEBModel::BuildPart(FEModel& fem, Part& part, bool buildDomains, const FETransform& T)
{
// we'll need the kernel for creating domains
FECoreKernel& febio = FECoreKernel::GetInstance();
FEMesh& mesh = fem.GetMesh();
// build node-index lookup table
int noff = -1, maxID = 0;
int N0 = mesh.Nodes();
int NN = part.Nodes();
for (int i=0; i<NN; ++i)
{
int nid = part.GetNode(i).id;
if ((noff < 0) || (nid < noff)) noff = nid;
if (nid > maxID) maxID = nid;
}
vector<int> NLT(maxID - noff + 1, -1);
for (int i=0; i<NN; ++i)
{
int nid = part.GetNode(i).id - noff;
NLT[nid] = i + N0;
}
// build element-index lookup table
int eoff = -1; maxID = 0;
int E0 = mesh.Elements();
int NDOM = part.Domains();
for (int i=0; i<NDOM; ++i)
{
const Domain& dom = part.GetDomain(i);
int NE = dom.Elements();
for (int j=0; j<NE; ++j)
{
int eid = dom.GetElement(j).id;
if ((eoff < 0) || (eid < eoff)) eoff = eid;
if (eid > maxID) maxID = eid;
}
}
vector<int> ELT(maxID - eoff + 1, -1);
int ecount = E0;
for (int i = 0; i<NDOM; ++i)
{
const Domain& dom = part.GetDomain(i);
int NE = dom.Elements();
for (int j = 0; j<NE; ++j)
{
int eid = dom.GetElement(j).id - eoff;
ELT[eid] = ecount++;
}
}
// create the nodes
int nid = N0;
mesh.AddNodes(NN);
int n = 0;
for (int j = 0; j<NN; ++j)
{
NODE& partNode = part.GetNode(j);
FENode& meshNode = mesh.Node(N0 + n++);
meshNode.SetID(++nid);
meshNode.m_r0 = T.Transform(partNode.r);
meshNode.m_rt = meshNode.m_r0;
}
assert(n == NN);
// get the part name
string partName = part.Name();
if (partName.empty() == false) partName += ".";
// process domains
if (buildDomains)
{
int eid = E0;
for (int i = 0; i < NDOM; ++i)
{
const Domain& partDomain = part.GetDomain(i);
// element count
int elems = partDomain.Elements();
// get the element spect
FE_Element_Spec spec = partDomain.ElementSpec();
// get the material
string matName = partDomain.MaterialName();
FEMaterial* mat = fem.FindMaterial(matName.c_str());
if (mat == 0) return false;
// create the domain
FEDomain* dom = febio.CreateDomain(spec, &mesh, mat);
if (dom == 0) return false;
if (dom->Create(elems, spec) == false)
{
return false;
}
dom->SetMatID(mat->GetID() - 1);
string domName = partName + partDomain.Name();
dom->SetName(domName);
// process element data
for (int j = 0; j < elems; ++j)
{
const ELEMENT& domElement = partDomain.GetElement(j);
FEElement& el = dom->ElementRef(j);
el.SetID(++eid);
int ne = el.Nodes();
for (int n = 0; n < ne; ++n) el.m_node[n] = NLT[domElement.node[n] - noff];
}
if (partDomain.m_defaultShellThickness != 0.0)
{
double h0 = partDomain.m_defaultShellThickness;
FEShellDomain* shellDomain = dynamic_cast<FEShellDomain*>(dom);
if (shellDomain)
{
int ne = shellDomain->Elements();
for (int n = 0; n < ne; ++n)
{
FEShellElement& el = shellDomain->Element(n);
for (int k = 0; k < el.Nodes(); ++k) el.m_h0[k] = h0;
}
}
else
{
FEModel* pfem = &fem;
feLogWarningEx(pfem, "Shell thickness assigned on non-shell part %s", partDomain.Name().c_str());
}
}
// add the domain to the mesh
mesh.AddDomain(dom);
// initialize material point data
dom->CreateMaterialPointData();
}
}
// create node sets
int NSets = part.NodeSets();
for (int i = 0; i<NSets; ++i)
{
NodeSet* set = part.GetNodeSet(i);
// create a new node set
FENodeSet* feset = fecore_alloc(FENodeSet, &fem);
// add the name
string name = partName + set->Name();
feset->SetName(name.c_str());
// copy indices
vector<int> nodeList = set->NodeList();
int nn = (int)nodeList.size();
for (int j=0; j<nn; ++j) nodeList[j] = NLT[nodeList[j] - noff];
feset->Add(nodeList);
// add it to the mesh
mesh.AddNodeSet(feset);
}
// create surfaces
int Surfs = part.Surfaces();
for (int i=0; i<Surfs; ++i)
{
Surface* surf = part.GetSurface(i);
int faces = surf->Facets();
// create a new facet set
FEFacetSet* fset = fecore_alloc(FEFacetSet, &fem);
string name = partName + surf->Name();
fset->SetName(name.c_str());
// copy data
fset->Create(faces);
for (int j=0; j<faces; ++j)
{
FACET& srcFacet = surf->GetFacet(j);
FEFacetSet::FACET& face = fset->Face(j);
face.ntype = srcFacet.ntype;
int nf = srcFacet.ntype; // we assume that the type also identifies the number of nodes
for (int n=0; n<nf; ++n) face.node[n] = NLT[srcFacet.node[n] - noff];
}
// add it to the mesh
mesh.AddFacetSet(fset);
}
// create element sets
int ESets = part.ElementSets();
for (int i=0; i<ESets; ++i)
{
ElementSet& eset = *part.GetElementSet(i);
vector<int> elist = eset.ElementList();
int ne = (int) elist.size();
FEElementSet* feset = new FEElementSet(&fem);
string name = partName + eset.Name();
feset->SetName(name);
// If a domain exists with the same name, we assume
// that this element set refers to the that domain (TODO: should actually check this!)
FEDomain* dom = mesh.FindDomain(name);
if (dom) feset->Create(dom);
else
{
// A domain with the same name is not found, but it is possible that this
// set still coincides with a domain, so let's see if we can find it.
// see if all elements belong to the same domain
bool oneDomain = true;
FEElement* el = mesh.FindElementFromID(elist[0]); assert(el);
FEDomain* dom = dynamic_cast<FEDomain*>(el->GetMeshPartition());
for (int i = 1; i < elist.size(); ++i)
{
FEElement* el_i = mesh.FindElementFromID(elist[i]); assert(el);
FEDomain* dom_i = dynamic_cast<FEDomain*>(el_i->GetMeshPartition());
if (dom != dom_i)
{
oneDomain = false;
break;
}
}
// assign indices to element set
if (oneDomain && (dom->Elements() == elist.size()))
feset->Create(dom, elist);
else
{
// Couldn't find a single domain.
// But maybe this set encompasses the entire mesh?
if (elist.size() == mesh.Elements())
{
FEDomainList allDomains;
for (int i = 0; i < mesh.Domains(); ++i) allDomains.AddDomain(&mesh.Domain(i));
feset->Create(allDomains);
}
else
{
feset->Create(elist);
}
}
}
mesh.AddElementSet(feset);
}
// create surface pairs
int SPairs = part.SurfacePairs();
for (int i = 0; i < SPairs; ++i)
{
SurfacePair& spair = *part.GetSurfacePair(i);
string name = partName + spair.Name();
FESurfacePair* fesurfPair = new FESurfacePair(&mesh);
mesh.AddSurfacePair(fesurfPair);
fesurfPair->SetName(name);
FEFacetSet* surf1 = mesh.FindFacetSet(spair.m_primary);
if (surf1 == nullptr) return false;
fesurfPair->SetPrimarySurface(surf1);
FEFacetSet* surf2 = mesh.FindFacetSet(spair.m_secondary);
if (surf2 == nullptr) return false;
fesurfPair->SetSecondarySurface(surf2);
}
// create discrete element sets
int DSets = part.DiscreteSets();
for (int i = 0; i < DSets; ++i)
{
DiscreteSet& dset = *part.GetDiscreteSet(i);
string name = partName + dset.Name();
FEDiscreteSet* fedset = new FEDiscreteSet(&mesh);
mesh.AddDiscreteSet(fedset);
fedset->SetName(name);
const std::vector<DiscreteSet::ELEM>& elemList = dset.ElementList();
for (int j = 0; j < elemList.size(); ++j)
{
int n0 = NLT[elemList[j].node[0] - noff];
int n1 = NLT[elemList[j].node[1] - noff];
fedset->add(n0, n1);
}
}
return true;
}
|
#!/usr/bin/env sh
# This script is used for testing using Travis
# It is intended to work on their VM set up: Ubuntu 12.04 LTS
# A minimal current TL is installed adding only the packages that are
# required
# See if there is a cached version of TL available
export PATH=/tmp/texlive/bin/x86_64-linux:$PATH
if ! command -v texlua > /dev/null; then
# Obtain TeX Live
wget https://mirror.ctan.org/systems/texlive/tlnet/install-tl-unx.tar.gz
tar -xzf install-tl-unx.tar.gz
cd install-tl-20*
# Install a minimal system
./install-tl --profile=../support/texlive.profile
cd ..
fi
# Update tlmgr itself
tlmgr update --self
# The test framework itself
tlmgr install l3build
# Required to build plain and LaTeX formats including (u)pLaTeX
tlmgr install latex-bin luahbtex platex uplatex tex xetex
# Then get the rest of required LaTeX
tlmgr install amsmath tools
# Assuming a 'basic' font set up, metafont is required to avoid
# warnings with some packages and errors with others
tlmgr install metafont mfware
# Dependencies for tests that are not auto-resolved
tlmgr install bibtex lualatex-math
# For the doc target and testing l3doc
tlmgr install \
alphalph \
amsfonts \
bookmark \
booktabs \
catchfile \
colortbl \
csquotes \
dvips \
ec \
enumitem \
epstopdf \
epstopdf-pkg \
everysel \
fancyvrb \
hologo \
hyperref \
lipsum \
listings \
makeindex \
mathpazo \
metalogo \
oberdiek \
pgf \
psnfss \
ragged2e \
siunitx \
times \
underscore \
units
# Keep no backups (not required, simply makes cache bigger)
tlmgr option -- autobackup 0
# Update the TL install but add nothing new
tlmgr update --all --no-auto-install
|
<filename>shaders/blurbycoch.c
#include "blurbycoc.c"
#ifdef FRAGMENT_SHADER
void main(void)
{
vec2 d=vec2(1.0/scrnsz.x, 0.0)
gl_FragColor=vec4(gaussblur(g_texcoord).rgb,1.0);
}
#endif
|
<reponame>wujia28762/Tmate
package com.honyum.elevatorMan.adapter;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.honyum.elevatorMan.R;
import com.honyum.elevatorMan.data.RepairTaskInfo;
import java.util.List;
/**
* Created by 李有鬼 on 2017/1/9 0009
*/
public class RepairTaskAdapter extends MyBaseAdapter<RepairTaskInfo> {
public RepairTaskAdapter(Context context, List<RepairTaskInfo> dataSource) {
super(context, dataSource);
}
@Override
public View getItemView(int position, View convertView, ViewGroup parent) {
ViewHolder vh;
if (convertView == null) {
convertView = inflater.inflate(R.layout.layout_repair_task_item, parent, false);
vh = new ViewHolder();
vh.tvNumber = (TextView) convertView.findViewById(R.id.tv_number);
vh.tvName = (TextView) convertView.findViewById(R.id.tv_name);
vh.tvTel = (TextView) convertView.findViewById(R.id.tv_tel);
vh.tvState = (TextView) convertView.findViewById(R.id.tv_state);
convertView.setTag(vh);
} else {
vh = (ViewHolder) convertView.getTag();
}
RepairTaskInfo info = getItem(position);
vh.tvNumber.setText("" + (position + 1) + "");
vh.tvName.setText(info.getTel());
vh.tvTel.setText(info.getPhenomenon());
vh.tvState.setText(info.getCreateTime());
return convertView;
}
private class ViewHolder {
private TextView tvNumber, tvName, tvTel, tvState;
}
}
|
#!/usr/bin/env bash
# Copyright 2017 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
#
# This script will build perfops-cli and calculate hash for each
# (PERFOPS_BUILD_PLATFORMS, PERFOPS_BUILD_ARCHS) pair.
# PERFOPS_BUILD_PLATFORMS="linux" PERFOPS_BUILD_ARCHS="amd64" ./hack/build-all.sh
# can be called to build only for linux-amd64
set -e
VERSION=$(git describe --tags --dirty)
COMMIT_HASH=$(git rev-parse --short HEAD 2>/dev/null)
DATE=$(date --iso-8601)
VERSION_PKG="github.com/ProspectOne/perfops-cli/cmd"
API_PKG="github.com/ProspectOne/perfops-cli/api"
GO_BUILD_CMD="go build -a -installsuffix cgo"
GO_BUILD_LDFLAGS="-s -w -X $VERSION_PKG.commitHash=$COMMIT_HASH -X $VERSION_PKG.buildDate=$DATE -X $VERSION_PKG.version=$VERSION -X $API_PKG.libVersion=$VERSION"
if [ -z "$PERFOPS_BUILD_PLATFORMS" ]; then
PERFOPS_BUILD_PLATFORMS="linux darwin"
fi
if [ -z "$PERFOPS_BUILD_ARCHS" ]; then
PERFOPS_BUILD_ARCHS="amd64"
fi
mkdir -p release
for OS in ${PERFOPS_BUILD_PLATFORMS[@]}; do
for ARCH in ${PERFOPS_BUILD_ARCHS[@]}; do
echo "Building for $OS/$ARCH"
GOARCH=$ARCH GOOS=$OS CGO_ENABLED=0 $GO_BUILD_CMD -ldflags "$GO_BUILD_LDFLAGS"\
-o "release/perfops-$OS-$ARCH" .
sha256sum "release/perfops-$OS-$ARCH" > "release/perfops-$OS-$ARCH".sha256
done
done
|
// message.model.ts
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document } from 'mongoose';
@Schema()
export class Message extends Document {
@Prop({ required: true })
sender: string;
@Prop({ required: true })
recipient: string;
@Prop({ required: true })
content: string;
@Prop({ default: Date.now })
timestamp: Date;
}
export const MessageSchema = SchemaFactory.createForClass(Message);
// messages.service.ts
import { Injectable } from '@nestjs/common';
import { InjectModel } from '@nestjs/mongoose';
import { Model } from 'mongoose';
import { Message } from './message.model';
@Injectable()
export class MessagesService {
constructor(@InjectModel(Message.name) private messageModel: Model<Message>) {}
async createMessage(sender: string, recipient: string, content: string): Promise<Message> {
const createdMessage = new this.messageModel({ sender, recipient, content });
return createdMessage.save();
}
async getMessagesByRecipient(recipient: string): Promise<Message[]> {
return this.messageModel.find({ recipient }).exec();
}
async updateMessage(id: string, content: string): Promise<Message> {
return this.messageModel.findByIdAndUpdate(id, { content }, { new: true }).exec();
}
async deleteMessage(id: string): Promise<Message> {
return this.messageModel.findByIdAndDelete(id).exec();
}
}
// messages.controller.ts
import { Controller, Get, Post, Patch, Delete, Param, Body } from '@nestjs/common';
import { MessagesService } from './messages.service';
import { Message } from './message.model';
@Controller('messages')
export class MessagesController {
constructor(private readonly messagesService: MessagesService) {}
@Post()
async createMessage(@Body() messageData: Message): Promise<Message> {
const { sender, recipient, content } = messageData;
return this.messagesService.createMessage(sender, recipient, content);
}
@Get(':recipient')
async getMessagesByRecipient(@Param('recipient') recipient: string): Promise<Message[]> {
return this.messagesService.getMessagesByRecipient(recipient);
}
@Patch(':id')
async updateMessage(@Param('id') id: string, @Body('content') content: string): Promise<Message> {
return this.messagesService.updateMessage(id, content);
}
@Delete(':id')
async deleteMessage(@Param('id') id: string): Promise<Message> {
return this.messagesService.deleteMessage(id);
}
} |
A hash table might be a suitable data structure for efficient look up of objects with one to many relationship. A hash table is a type of data structure, in which data is stored in a searchable key-value store. The keys are generated by a hash function, which maps the data items to integers. To look up an object, the hash table can use the hash function to quickly determine the right slot in the hash table. Each slot in the hash table can be linked to multiple data items, thus enabling efficient look up of objects with one to many relationship. |
python selection.py \
--data_root=data/DAVIS-2017-trainval/ \
--data_list=data/list/DAVIS_2017_val.txt \
--resize_h=448 \
--resize_w=832 \
--gpu='0' \
--select_file=select_files/davis17.txt \
--restore_select=pretrained_models/davis17.pth
|
import Airtable from 'airtable'
// @ts-ignore
const airtable = new Airtable({ apiKey: '<KEY>' })
const base = airtable.base('appjycbdxoV3Nsovw');
// The table names in airtable
export type TableName =
'Technology Type' |
'Purpose' |
'Data Type' |
'Data Process' |
'Access' |
'Storage' |
'Accountability'
export interface Option {
name: string;
iconShortname: string;
description: string;
}
export interface AirtableData {
techType: Option[];
purpose: Option[];
dataType: Option[];
dataProcess: Option[];
access: Option[];
storage: Option[];
accountable: Option[];
}
export async function getAirtableOptionsForTable(tableName: TableName): Promise<Option[]> {
let options: Array<Option> = []
return new Promise<Option[]>(function (resolve, reject) {
base(tableName).select().eachPage(function page(records: Array<any>, fetchNextPage: () => void) {
records.forEach((record) => {
const option: Option = {
name: record.get('Name'),
description: record.get('Description'),
iconShortname: record.get('Icon Shortname'),
}
options.push(option)
});
fetchNextPage();
}, function done(err: Error) {
if (err) {
reject(err);
}
resolve(options)
});
});
}
export async function getAirtableData(): Promise<AirtableData> {
// return a local cache if it exists, else query Airtable
const airtableData = sessionStorage.getItem('airtabledata')
if (airtableData) {
return JSON.parse(airtableData) as AirtableData
} else {
const [techType, purpose, dataType, access, storage, dataProcess, accountable] = await Promise.all([
await getAirtableOptionsForTable('Technology Type'),
await getAirtableOptionsForTable('Purpose'),
await getAirtableOptionsForTable('Data Type'),
await getAirtableOptionsForTable('Access'),
await getAirtableOptionsForTable('Storage'),
await getAirtableOptionsForTable('Data Process'),
await getAirtableOptionsForTable('Accountability'),
])
const airtableDataObject: AirtableData = { techType, purpose, dataType, access, storage, dataProcess, accountable }
// cache data for later
sessionStorage.setItem('airtabledata', JSON.stringify(airtableDataObject));
return airtableDataObject
}
} |
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestHead;
import java.io.Serializable;
public class UploadVideoRequest extends RequestBean {
private RequestHead head;
private RequestBody body;
public RequestHead getHead() {
return head;
}
public void setHead(RequestHead head) {
this.head = head;
}
public RequestBody getBody() {
return body;
}
public void setBody(RequestBody body) {
this.body = body;
}
public class RequestBody implements Serializable {
public String getVideo() {
return video;
}
public void setVideo(String video) {
this.video = video;
}
/**
* 本地音频文件Base64编码
*/
private String video;
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2865-1
#
# Security announcement date: 2016-01-08 00:00:00 UTC
# Script generation date: 2017-02-01 21:03:30 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - libgnutlsxx27:2.12.14-5ubuntu3.11
# - libgnutls26:2.12.14-5ubuntu3.11
# - libgnutls-openssl27:2.12.14-5ubuntu3.11
# - libgnutls-openssl27:2.12.14-5ubuntu3.11
# - libgnutls26:2.12.14-5ubuntu3.11
#
# Last versions recommanded by security team:
# - libgnutlsxx27:2.12.14-5ubuntu3.13
# - libgnutls26:2.12.14-5ubuntu3.13
# - libgnutls-openssl27:2.12.14-5ubuntu3.13
# - libgnutls-openssl27:2.12.14-5ubuntu3.13
# - libgnutls26:2.12.14-5ubuntu3.13
#
# CVE List:
# - CVE-2015-7575
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libgnutlsxx27=2.12.14-5ubuntu3.13 -y
sudo apt-get install --only-upgrade libgnutls26=2.12.14-5ubuntu3.13 -y
sudo apt-get install --only-upgrade libgnutls-openssl27=2.12.14-5ubuntu3.13 -y
sudo apt-get install --only-upgrade libgnutls-openssl27=2.12.14-5ubuntu3.13 -y
sudo apt-get install --only-upgrade libgnutls26=2.12.14-5ubuntu3.13 -y
|
#!/bin/sh
set -eu
docker run -v "$(pwd):/var/task" -w /var/task mhart/alpine-node:12 /bin/sh -c \
'yarn lint && yarn test --coverage'
|
package com.prisma.integration
import com.prisma.IgnoreSQLite
import org.scalatest.{FlatSpec, Matchers}
class ChangingFromRelationToScalarOrBackSpec extends FlatSpec with Matchers with IntegrationBaseSpec {
"Changing a field from scalar to relation" should "work when there is no data yet" taggedAs (IgnoreSQLite) in {
val schema =
"""type A {
| id: ID! @id
| a: String! @unique
| b: String
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
val (project, _) = setupProject(schema)
val schema1 =
"""type A {
| id: ID! @id
| a: String! @unique
| b: B @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
deployServer.deploySchema(project, schema1)
}
"Changing a field from scalar to relation" should "work when there is already data and should delete the old column" taggedAs (IgnoreSQLite) in {
val schema =
"""type A {
| id: ID! @id
| a: String! @unique
| b: String
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
val (project, _) = setupProject(schema)
apiServer.query("""mutation{createA(data:{a:"A", b: "B"}){a}}""", project)
val as = apiServer.query("""{as{a}}""", project)
as.toString should be("""{"data":{"as":[{"a":"A"}]}}""")
val schema1 =
"""type A {
| id: ID! @id
| a: String! @unique
| b: B @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
deployServer.deploySchemaThatMustWarn(project, schema1, force = true)
}
"Changing a relation to scalar" should "work when there is no data yet" in {
val schema =
"""type A {
| id: ID! @id
| a: String! @unique
| b: B @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
val (project, _) = setupProject(schema)
val schema1 =
"""type A {
| id: ID! @id
| a: String! @unique
| b: String
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
deployServer.deploySchema(project, schema1)
}
"Changing a relation to scalar" should "work when there is already data" in {
val schema =
"""type A {
| id: ID! @id
| a: String! @unique
| b: B @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
val (project, _) = setupProject(schema)
apiServer.query("""mutation{createA(data:{a:"A", b: {create:{b: "B"}}}){a}}""", project)
val as = apiServer.query("""{as{a, b{b}}}""", project)
as.toString should be("""{"data":{"as":[{"a":"A","b":{"b":"B"}}]}}""")
val schema1 =
"""type A {
| id: ID! @id
| a: String! @unique
| b: String
|}
|
|type B {
| id: ID! @id
| b: String! @unique
|}"""
deployServer.deploySchemaThatMustWarn(project, schema1, force = true)
}
}
|
#/bin/sh
# Generate test result data for xstormy16 GAS testing.
# This script is machine generated.
# It is intended to be run in the testsuite source directory.
#
# Syntax: build.sh /path/to/build/gas
if [ $# = 0 ] ; then
if [ ! -x ../gas/as-new ] ; then
echo "Usage: $0 [/path/to/gas/build]"
else
BUILD=`pwd`/../gas
fi
else
BUILD=$1
fi
if [ ! -x $BUILD/as-new ] ; then
echo "$BUILD is not a gas build directory"
exit 1
fi
# Put results here, so we preserve the existing set for comparison.
rm -rf tmpdir
mkdir tmpdir
cd tmpdir
function gentest {
rm -f a.out
$BUILD/as-new ${1}.s -o a.out
echo "#as:" >${1}.d
echo "#objdump: -dr" >>${1}.d
echo "#name: $1" >>${1}.d
$BUILD/../binutils/objdump -dr a.out | sed -e 's/(/\\(/g' -e 's/)/\\)/g' -e 's/\$/\\$/g' -e 's/\[/\\\[/g' -e 's/\]/\\\]/g' -e 's/[+]/\\+/g' -e 's/[.]/\\./g' -e 's/[*]/\\*/g' | sed -e 's/^.*file format.*$/.*: +file format .*/' >>${1}.d
rm -f a.out
}
# Now come all the testcases.
cat > allinsn.s <<EOF
.data
foodata: .word 42
.text
footext:
.text
.global movlmemimm
movlmemimm:
mov.b 0,#0
mov.w 255,#65535
mov.w 128,#32768
mov.b 127,#32767
mov.w 1,#1
mov.w 81,#64681
mov.w 247,#42230
mov.b 84,#16647
.text
.global movhmemimm
movhmemimm:
mov.b 0x7f00+0,#0
mov.w 0x7f00+255,#65535
mov.w 0x7f00+128,#32768
mov.b 0x7f00+127,#32767
mov.w 0x7f00+1,#1
mov.b 0x7f00+165,#1944
mov.w 0x7f00+186,#11517
mov.b 0x7f00+63,#25556
.text
.global movlgrmem
movlgrmem:
mov.b r0,0
mov.w r7,255
mov.w r4,128
mov.b r3,127
mov.w r1,1
mov.w r6,179
mov.w r0,183
mov.b r3,41
.text
.global movhgrmem
movhgrmem:
mov.b r0,0x7f00+0
mov.w r7,0x7f00+255
mov.w r4,0x7f00+128
mov.b r3,0x7f00+127
mov.w r1,0x7f00+1
mov.b r2,0x7f00+114
mov.w r2,0x7f00+210
mov.w r5,0x7f00+181
.text
.global movlmemgr
movlmemgr:
mov.b 0,r0
mov.w 255,r7
mov.w 128,r4
mov.b 127,r3
mov.w 1,r1
mov.w 137,r0
mov.w 26,r0
mov.b 127,r4
.text
.global movhmemgr
movhmemgr:
mov.b 0x7f00+0,r0
mov.w 0x7f00+255,r7
mov.w 0x7f00+128,r4
mov.b 0x7f00+127,r3
mov.w 0x7f00+1,r1
mov.w 0x7f00+98,r3
mov.w 0x7f00+135,r7
mov.b 0x7f00+229,r2
.text
.global movgrgri
movgrgri:
mov.b r0,(r0)
mov.w r7,(r15)
mov.w r4,(r8)
mov.b r3,(r7)
mov.w r1,(r1)
mov.w r6,(r4)
mov.b r0,(r12)
mov.w r5,(r9)
.text
.global movgrgripostinc
movgrgripostinc:
mov.b r0,(r0++)
mov.w r7,(r15++)
mov.w r4,(r8++)
mov.b r3,(r7++)
mov.w r1,(r1++)
mov.w r4,(r8++)
mov.w r3,(r12++)
mov.b r6,(r4++)
.text
.global movgrgripredec
movgrgripredec:
mov.b r0,(--r0)
mov.w r7,(--r15)
mov.w r4,(--r8)
mov.b r3,(--r7)
mov.w r1,(--r1)
mov.w r5,(--r9)
mov.w r4,(--r14)
mov.b r4,(--r7)
.text
.global movgrigr
movgrigr:
mov.b (r0),r0
mov.w (r15),r7
mov.w (r8),r4
mov.b (r7),r3
mov.w (r1),r1
mov.w (r4),r3
mov.b (r3),r6
mov.w (r7),r0
.text
.global movgripostincgr
movgripostincgr:
mov.b (r0++),r0
mov.w (r15++),r7
mov.w (r8++),r4
mov.b (r7++),r3
mov.w (r1++),r1
mov.w (r12++),r5
mov.b (r4++),r2
mov.b (r11++),r6
.text
.global movgripredecgr
movgripredecgr:
mov.b (--r0),r0
mov.w (--r15),r7
mov.w (--r8),r4
mov.b (--r7),r3
mov.w (--r1),r1
mov.b (--r8),r3
mov.b (--r11),r4
mov.w (--r1),r6
.text
.global movgrgrii
movgrgrii:
mov.b r0,(r0,0)
mov.w r7,(r15,-1)
mov.w r4,(r8,-2048)
mov.b r3,(r7,2047)
mov.w r1,(r1,1)
mov.w r6,(r8,-452)
mov.w r4,(r11,572)
mov.b r1,(r1,-1718)
.text
.global movgrgriipostinc
movgrgriipostinc:
mov.b r0,(r0++,0)
mov.w r7,(r15++,-1)
mov.w r4,(r8++,-2048)
mov.b r3,(r7++,2047)
mov.w r1,(r1++,1)
mov.w r6,(r0++,-64)
mov.b r7,(r15++,1060)
mov.b r0,(r7++,847)
.text
.global movgrgriipredec
movgrgriipredec:
mov.b r0,(--r0,0)
mov.w r7,(--r15,-1)
mov.w r4,(--r8,-2048)
mov.b r3,(--r7,2047)
mov.w r1,(--r1,1)
mov.w r0,(--r15,1780)
mov.w r6,(--r1,1506)
mov.w r7,(--r3,-2033)
.text
.global movgriigr
movgriigr:
mov.b (r0,0),r0
mov.w (r15,-1),r7
mov.w (r8,-2048),r4
mov.b (r7,2047),r3
mov.w (r1,1),r1
mov.w (r7,1948),r5
mov.b (r3,-844),r4
mov.w (r15,1704),r0
.text
.global movgriipostincgr
movgriipostincgr:
mov.b (r0++,0),r0
mov.w (r15++,-1),r7
mov.w (r8++,-2048),r4
mov.b (r7++,2047),r3
mov.w (r1++,1),r1
mov.w (r2++,-176),r7
mov.w (r8++,1389),r4
mov.b (r3++,47),r0
.text
.global movgriipredecgr
movgriipredecgr:
mov.b (--r0,0),r0
mov.w (--r15,-1),r7
mov.w (--r8,-2048),r4
mov.b (--r7,2047),r3
mov.w (--r1,1),r1
mov.b (--r8,1004),r4
mov.w (--r14,-1444),r2
mov.b (--r5,-927),r4
.text
.global movgrgr
movgrgr:
mov r0,r0
mov r15,r15
mov r8,r8
mov r7,r7
mov r1,r1
mov r9,r14
mov r7,r15
mov r12,r15
.text
.global movimm8
movimm8:
mov Rx,#0
mov Rx,#255
mov Rx,#128
mov Rx,#127
mov Rx,#1
mov Rx,#136
mov Rx,#83
mov Rx,#104
.text
.global movwimm8
movwimm8:
mov.w Rx,#0
mov.w Rx,#255
mov.w Rx,#128
mov.w Rx,#127
mov.w Rx,#1
mov.w Rx,#92
mov.w Rx,#97
mov.w Rx,#4
.text
.global movgrimm8
movgrimm8:
mov r0,#0
mov r7,#255
mov r4,#128
mov r3,#127
mov r1,#1
mov r2,#206
mov r4,#55
mov r2,#3
.text
.global movwgrimm8
movwgrimm8:
mov.w r0,#0
mov.w r7,#255
mov.w r4,#128
mov.w r3,#127
mov.w r1,#1
mov.w r4,#243
mov.w r3,#55
mov.w r2,#108
.text
.global movgrimm16
movgrimm16:
mov r0,#0
mov r15,#65535
mov r8,#32768
mov r7,#32767
mov r1,#1
mov r4,#20066
mov r3,#7190
mov r2,#15972
.text
.global movwgrimm16
movwgrimm16:
mov.w r0,#0
mov.w r15,#65535
mov.w r8,#32768
mov.w r7,#32767
mov.w r1,#1
mov.w r6,#16648
mov.w r8,#26865
mov.w r10,#20010
.text
.global movlowgr
movlowgr:
mov.b r0,RxL
mov.b r15,RxL
mov.b r8,RxL
mov.b r7,RxL
mov.b r1,RxL
mov.b r11,RxL
mov.b r5,RxL
mov.b r2,RxL
.text
.global movhighgr
movhighgr:
mov.b r0,RxH
mov.b r15,RxH
mov.b r8,RxH
mov.b r7,RxH
mov.b r1,RxH
mov.b r2,RxH
mov.b r7,RxH
mov.b r2,RxH
.text
.global movfgrgri
movfgrgri:
movf.b r0,(r0)
movf.w r7,(r15)
movf.w r4,(r8)
movf.b r3,(r7)
movf.w r1,(r1)
movf.b r6,(r15)
movf.b r1,(r10)
movf.b r6,(r1)
.text
.global movfgrgripostinc
movfgrgripostinc:
movf.b r0,(r0++)
movf.w r7,(r15++)
movf.w r4,(r8++)
movf.b r3,(r7++)
movf.w r1,(r1++)
movf.b r2,(r5++)
movf.w r5,(r10++)
movf.w r7,(r5++)
.text
.global movfgrgripredec
movfgrgripredec:
movf.b r0,(--r0)
movf.w r7,(--r15)
movf.w r4,(--r8)
movf.b r3,(--r7)
movf.w r1,(--r1)
movf.w r6,(--r10)
movf.b r1,(--r14)
movf.w r3,(--r7)
.text
.global movfgrigr
movfgrigr:
movf.b (r0),r0
movf.w (r15),r7
movf.w (r8),r4
movf.b (r7),r3
movf.w (r1),r1
movf.b (r5),r4
movf.b (r3),r4
movf.w (r12),r3
.text
.global movfgripostincgr
movfgripostincgr:
movf.b (r0++),r0
movf.w (r15++),r7
movf.w (r8++),r4
movf.b (r7++),r3
movf.w (r1++),r1
movf.b (r9++),r5
movf.w (r10++),r4
movf.b (r9++),r1
.text
.global movfgripredecgr
movfgripredecgr:
movf.b (--r0),r0
movf.w (--r15),r7
movf.w (--r8),r4
movf.b (--r7),r3
movf.w (--r1),r1
movf.b (--r0),r2
movf.w (--r11),r2
movf.b (--r10),r5
.text
.global movfgrgrii
movfgrgrii:
movf.b r0,(r8,r0,0)
movf.w r7,(r15,r15,-1)
movf.w r4,(r12,r8,-2048)
movf.b r3,(r11,r7,2047)
movf.w r1,(r9,r1,1)
movf.b r7,(r15,r0,1473)
movf.w r2,(r8,r9,-1522)
movf.w r2,(r13,r1,480)
.text
.global movfgrgriipostinc
movfgrgriipostinc:
movf.b r0,(r8,r0++,0)
movf.w r7,(r15,r15++,-1)
movf.w r4,(r12,r8++,-2048)
movf.b r3,(r11,r7++,2047)
movf.w r1,(r9,r1++,1)
movf.b r1,(r8,r2++,1398)
movf.w r4,(r8,r9++,-778)
movf.w r1,(r13,r14++,1564)
.text
.global movfgrgriipredec
movfgrgriipredec:
movf.b r0,(r8,--r0,0)
movf.w r7,(r15,--r15,-1)
movf.w r4,(r12,--r8,-2048)
movf.b r3,(r11,--r7,2047)
movf.w r1,(r9,--r1,1)
movf.b r6,(r8,--r7,254)
movf.w r5,(r12,--r12,1673)
movf.b r0,(r8,--r10,-38)
.text
.global movfgriigr
movfgriigr:
movf.b (r8,r0,0),r0
movf.w (r15,r15,-1),r7
movf.w (r12,r8,-2048),r4
movf.b (r11,r7,2047),r3
movf.w (r9,r1,1),r1
movf.w (r15,r2,-1636),r3
movf.w (r14,r12,1626),r1
movf.b (r11,r14,1540),r0
.text
.global movfgriipostincgr
movfgriipostincgr:
movf.b (r8,r0++,0),r0
movf.w (r15,r15++,-1),r7
movf.w (r12,r8++,-2048),r4
movf.b (r11,r7++,2047),r3
movf.w (r9,r1++,1),r1
movf.b (r15,r13++,466),r3
movf.b (r11,r11++,250),r4
movf.b (r10,r10++,-1480),r7
.text
.global movfgriipredecgr
movfgriipredecgr:
movf.b (r8,--r0,0),r0
movf.w (r15,--r15,-1),r7
movf.w (r12,--r8,-2048),r4
movf.b (r11,--r7,2047),r3
movf.w (r9,--r1,1),r1
movf.b (r13,--r10,-608),r0
movf.b (r9,--r11,831),r7
movf.w (r15,--r15,-2036),r6
.text
.global maskgrgr
maskgrgr:
mask r0,r0
mask r15,r15
mask r8,r8
mask r7,r7
mask r1,r1
mask r4,r0
mask r6,r11
mask r8,r4
.text
.global maskgrimm16
maskgrimm16:
mask r0,#0
mask r15,#65535
mask r8,#32768
mask r7,#32767
mask r1,#1
mask r7,#18153
mask r15,#7524
mask r14,#34349
.text
.global pushgr
pushgr:
push r0
push r15
push r8
push r7
push r1
push r9
push r4
push r3
.text
.global popgr
popgr:
pop r0
pop r15
pop r8
pop r7
pop r1
pop r3
pop r2
pop r12
.text
.global swpn
swpn:
swpn r0
swpn r15
swpn r8
swpn r7
swpn r1
swpn r15
swpn r4
swpn r3
.text
.global swpb
swpb:
swpb r0
swpb r15
swpb r8
swpb r7
swpb r1
swpb r2
swpb r12
swpb r2
.text
.global swpw
swpw:
swpw r0,r0
swpw r15,r15
swpw r8,r8
swpw r7,r7
swpw r1,r1
swpw r12,r4
swpw r8,r2
swpw r5,r13
.text
.global andgrgr
andgrgr:
and r0,r0
and r15,r15
and r8,r8
and r7,r7
and r1,r1
and r2,r2
and r15,r5
and r7,r5
.text
.global andimm8
andimm8:
and Rx,#0
and Rx,#255
and Rx,#128
and Rx,#127
and Rx,#1
and Rx,#206
and Rx,#11
and Rx,#232
.text
.global andgrimm16
andgrimm16:
and r0,#0
and r15,#65535
and r8,#32768
and r7,#32767
and r1,#1
and r10,#17229
and r11,#61451
and r5,#46925
.text
.global orgrgr
orgrgr:
or r0,r0
or r15,r15
or r8,r8
or r7,r7
or r1,r1
or r3,r5
or r14,r15
or r5,r12
.text
.global orimm8
orimm8:
or Rx,#0
or Rx,#255
or Rx,#128
or Rx,#127
or Rx,#1
or Rx,#4
or Rx,#38
or Rx,#52
.text
.global orgrimm16
orgrimm16:
or r0,#0
or r15,#65535
or r8,#32768
or r7,#32767
or r1,#1
or r2,#64563
or r2,#18395
or r1,#63059
.text
.global xorgrgr
xorgrgr:
xor r0,r0
xor r15,r15
xor r8,r8
xor r7,r7
xor r1,r1
xor r14,r1
xor r9,r9
xor r12,r8
.text
.global xorimm8
xorimm8:
xor Rx,#0
xor Rx,#255
xor Rx,#128
xor Rx,#127
xor Rx,#1
xor Rx,#208
xor Rx,#126
xor Rx,#55
.text
.global xorgrimm16
xorgrimm16:
xor r0,#0
xor r15,#65535
xor r8,#32768
xor r7,#32767
xor r1,#1
xor r15,#56437
xor r3,#901
xor r2,#37017
.text
.global notgr
notgr:
not r0
not r15
not r8
not r7
not r1
not r4
not r3
not r3
.text
.global addgrgr
addgrgr:
add r0,r0
add r15,r15
add r8,r8
add r7,r7
add r1,r1
add r12,r7
add r1,r10
add r14,r14
.text
.global addgrimm4
addgrimm4:
add r0,#0
add r15,#15
add r8,#8
add r7,#7
add r1,#1
add r7,#0
add r10,#9
add r7,#8
.text
.global addimm8
addimm8:
add Rx,#0
add Rx,#255
add Rx,#128
add Rx,#127
add Rx,#1
add Rx,#25
add Rx,#247
add Rx,#221
.text
.global addgrimm16
addgrimm16:
add r0,#0
add r15,#255
add r8,#128
add r7,#127
add r1,#1
add r3,#99
add r0,#15
add r7,#214
.text
.global adcgrgr
adcgrgr:
adc r0,r0
adc r15,r15
adc r8,r8
adc r7,r7
adc r1,r1
adc r2,r13
adc r14,r10
adc r2,r15
.text
.global adcgrimm4
adcgrimm4:
adc r0,#0
adc r15,#15
adc r8,#8
adc r7,#7
adc r1,#1
adc r15,#1
adc r1,#3
adc r6,#11
.text
.global adcimm8
adcimm8:
adc Rx,#0
adc Rx,#255
adc Rx,#128
adc Rx,#127
adc Rx,#1
adc Rx,#225
adc Rx,#75
adc Rx,#18
.text
.global adcgrimm16
adcgrimm16:
adc r0,#0
adc r15,#65535
adc r8,#32768
adc r7,#32767
adc r1,#1
adc r13,#63129
adc r3,#23795
adc r11,#49245
.text
.global subgrgr
subgrgr:
sub r0,r0
sub r15,r15
sub r8,r8
sub r7,r7
sub r1,r1
sub r8,r8
sub r9,r9
sub r9,r15
.text
.global subgrimm4
subgrimm4:
sub r0,#0
sub r15,#15
sub r8,#8
sub r7,#7
sub r1,#1
sub r2,#15
sub r12,#9
sub r8,#4
.text
.global subimm8
subimm8:
sub Rx,#0
sub Rx,#255
sub Rx,#128
sub Rx,#127
sub Rx,#1
sub Rx,#205
sub Rx,#153
sub Rx,#217
.text
.global subgrimm16
subgrimm16:
sub r0,#0
sub r15,#65535
sub r8,#32768
sub r7,#32767
sub r1,#1
sub r3,#51895
sub r11,#23617
sub r10,#7754
.text
.global sbcgrgr
sbcgrgr:
sbc r0,r0
sbc r15,r15
sbc r8,r8
sbc r7,r7
sbc r1,r1
sbc r11,r2
sbc r9,r1
sbc r4,r15
.text
.global sbcgrimm4
sbcgrimm4:
sbc r0,#0
sbc r15,#15
sbc r8,#8
sbc r7,#7
sbc r1,#1
sbc r10,#11
sbc r11,#10
sbc r13,#10
.text
.global sbcgrimm8
sbcgrimm8:
sbc Rx,#0
sbc Rx,#255
sbc Rx,#128
sbc Rx,#127
sbc Rx,#1
sbc Rx,#137
sbc Rx,#224
sbc Rx,#156
.text
.global sbcgrimm16
sbcgrimm16:
sbc r0,#0
sbc r15,#65535
sbc r8,#32768
sbc r7,#32767
sbc r1,#1
sbc r0,#32507
sbc r7,#8610
sbc r14,#20373
.text
.global incgr
incgr:
inc r0
inc r15
inc r8
inc r7
inc r1
inc r13
inc r1
inc r11
.text
.global incgrimm2
incgrimm2:
inc r0,#0
inc r15,#3
inc r8,#2
inc r7,#1
inc r1,#1
inc r14,#1
inc r5,#0
inc r12,#3
.text
.global decgr
decgr:
dec r0
dec r15
dec r8
dec r7
dec r1
dec r12
dec r8
dec r10
.text
.global decgrimm2
decgrimm2:
dec r0,#0
dec r15,#3
dec r8,#2
dec r7,#1
dec r1,#1
dec r5,#0
dec r13,#0
dec r13,#2
.text
.global rrcgrgr
rrcgrgr:
rrc r0,r0
rrc r15,r15
rrc r8,r8
rrc r7,r7
rrc r1,r1
rrc r8,r4
rrc r10,r14
rrc r15,r9
.text
.global rrcgrimm4
rrcgrimm4:
rrc r0,#0
rrc r15,#15
rrc r8,#8
rrc r7,#7
rrc r1,#1
rrc r11,#3
rrc r14,#12
rrc r2,#15
.text
.global rlcgrgr
rlcgrgr:
rlc r0,r0
rlc r15,r15
rlc r8,r8
rlc r7,r7
rlc r1,r1
rlc r15,r3
rlc r15,r7
rlc r15,r10
.text
.global rlcgrimm4
rlcgrimm4:
rlc r0,#0
rlc r15,#15
rlc r8,#8
rlc r7,#7
rlc r1,#1
rlc r8,#2
rlc r2,#6
rlc r6,#10
.text
.global shrgrgr
shrgrgr:
shr r0,r0
shr r15,r15
shr r8,r8
shr r7,r7
shr r1,r1
shr r13,r2
shr r7,r8
shr r6,r8
.text
.global shrgrimm
shrgrimm:
shr r0,#0
shr r15,#15
shr r8,#8
shr r7,#7
shr r1,#1
shr r9,#13
shr r2,#7
shr r8,#8
.text
.global shlgrgr
shlgrgr:
shl r0,r0
shl r15,r15
shl r8,r8
shl r7,r7
shl r1,r1
shl r2,r3
shl r0,r3
shl r2,r1
.text
.global shlgrimm
shlgrimm:
shl r0,#0
shl r15,#15
shl r8,#8
shl r7,#7
shl r1,#1
shl r6,#13
shl r3,#6
shl r15,#15
.text
.global asrgrgr
asrgrgr:
asr r0,r0
asr r15,r15
asr r8,r8
asr r7,r7
asr r1,r1
asr r5,r10
asr r3,r5
asr r6,r11
.text
.global asrgrimm
asrgrimm:
asr r0,#0
asr r15,#15
asr r8,#8
asr r7,#7
asr r1,#1
asr r13,#4
asr r0,#13
asr r6,#3
.text
.global set1grimm
set1grimm:
set1 r0,#0
set1 r15,#15
set1 r8,#8
set1 r7,#7
set1 r1,#1
set1 r6,#10
set1 r13,#1
set1 r13,#15
.text
.global set1grgr
set1grgr:
set1 r0,r0
set1 r15,r15
set1 r8,r8
set1 r7,r7
set1 r1,r1
set1 r6,r0
set1 r6,r7
set1 r14,r2
.text
.global set1lmemimm
set1lmemimm:
set1 0,#0
set1 255,#7
set1 128,#4
set1 127,#3
set1 1,#1
set1 244,#3
set1 55,#7
set1 252,#5
.text
.global set1hmemimm
set1hmemimm:
set1 0x7f00+0,#0
set1 0x7f00+255,#7
set1 0x7f00+128,#4
set1 0x7f00+127,#3
set1 0x7f00+1,#1
set1 0x7f00+10,#3
set1 0x7f00+99,#4
set1 0x7f00+148,#3
.text
.global clr1grimm
clr1grimm:
clr1 r0,#0
clr1 r15,#15
clr1 r8,#8
clr1 r7,#7
clr1 r1,#1
clr1 r12,#0
clr1 r8,#11
clr1 r7,#7
.text
.global clr1grgr
clr1grgr:
clr1 r0,r0
clr1 r15,r15
clr1 r8,r8
clr1 r7,r7
clr1 r1,r1
clr1 r3,r3
clr1 r0,r1
clr1 r15,r0
.text
.global clr1lmemimm
clr1lmemimm:
clr1 0,#0
clr1 255,#7
clr1 128,#4
clr1 127,#3
clr1 1,#1
clr1 114,#7
clr1 229,#4
clr1 86,#1
.text
.global clr1hmemimm
clr1hmemimm:
clr1 0x7f00+0,#0
clr1 0x7f00+255,#7
clr1 0x7f00+128,#4
clr1 0x7f00+127,#3
clr1 0x7f00+1,#1
clr1 0x7f00+44,#3
clr1 0x7f00+212,#5
clr1 0x7f00+67,#7
.text
.global cbwgr
cbwgr:
cbw r0
cbw r15
cbw r8
cbw r7
cbw r1
cbw r8
cbw r11
cbw r3
.text
.global revgr
revgr:
rev r0
rev r15
rev r8
rev r7
rev r1
rev r1
rev r1
rev r14
.text
.global bgr
bgr:
br r0
br r15
br r8
br r7
br r1
br r0
br r15
br r12
.text
.global jmp
jmp:
jmp r8,r0
jmp r9,r15
jmp r9,r8
jmp r8,r7
jmp r9,r1
jmp r9,r7
jmp r9,r5
jmp r8,r12
.text
.global jmpf
jmpf:
jmpf 0
jmpf 16777215
jmpf 8388608
jmpf 8388607
jmpf 1
jmpf 10731629
jmpf 15094866
jmpf 1464024
.text
.global callrgr
callrgr:
callr r0
callr r15
callr r8
callr r7
callr r1
callr r1
callr r12
callr r8
.text
.global callgr
callgr:
call r8,r0
call r9,r15
call r9,r8
call r8,r7
call r9,r1
call r9,r6
call r9,r14
call r8,r12
.text
.global callfimm
callfimm:
callf 0
callf 16777215
callf 8388608
callf 8388607
callf 1
callf 13546070
callf 10837983
callf 15197875
.text
.global icallrgr
icallrgr:
icallr r0
icallr r15
icallr r8
icallr r7
icallr r1
icallr r15
icallr r12
icallr r9
.text
.global icallgr
icallgr:
icall r8,r0
icall r9,r15
icall r9,r8
icall r8,r7
icall r9,r1
icall r9,r10
icall r8,r15
icall r8,r10
.text
.global icallfimm
icallfimm:
icallf 0
icallf 16777215
icallf 8388608
icallf 8388607
icallf 1
icallf 9649954
icallf 1979758
icallf 7661640
.text
.global iret
iret:
iret
.text
.global ret
ret:
ret
.text
.global mul
mul:
mul
.text
.global div
div:
div
.text
.global sdiv
sdiv:
sdiv
.text
.global divlh
divlh:
divlh
.text
.global sdivlh
sdivlh:
sdivlh
.text
.global nop
nop:
nop
ret
.text
.global halt
halt:
halt
.text
.global hold
hold:
hold
.text
.global holdx
holdx:
holdx
.text
.global brk
brk:
brk
.text
.global bccgrgr
bccgrgr:
bge r0,r0,0+(.+4)
bz r15,r15,-1+(.+4)
bpl r8,r8,-2048+(.+4)
bls r7,r7,2047+(.+4)
bnc r1,r1,1+(.+4)
bc r3,r13,1799+(.+4)
bge r1,r10,-2019+(.+4)
bz r0,r5,-1132+(.+4)
.text
.global bccgrimm8
bccgrimm8:
bge r0,#0,0+(.+4)
bz r7,#255,-1+(.+4)
bpl r4,#128,-2048+(.+4)
bls r3,#127,2047+(.+4)
bnc r1,#1,1+(.+4)
bnc r3,#8,1473+(.+4)
bnz.b r5,#203,1619+(.+4)
bc r7,#225,978+(.+4)
.text
.global bccimm16
bccimm16:
bge Rx,#0,0+(.+4)
bz Rx,#65535,-1+(.+4)
bpl Rx,#32768,-128+(.+4)
bls Rx,#32767,127+(.+4)
bnc Rx,#1,1+(.+4)
bz.b Rx,#30715,4+(.+4)
bnv Rx,#62266,-13+(.+4)
bnv Rx,#48178,108+(.+4)
.text
.global bngrimm4
bngrimm4:
bn r0,#0,0+(.+4)
bn r15,#15,-1+(.+4)
bn r8,#8,-2048+(.+4)
bn r7,#7,2047+(.+4)
bn r1,#1,1+(.+4)
bn r11,#3,-1975+(.+4)
bn r15,#4,-1205+(.+4)
bn r10,#8,1691+(.+4)
.text
.global bngrgr
bngrgr:
bn r0,r0,0+(.+4)
bn r15,r15,-1+(.+4)
bn r8,r8,-2048+(.+4)
bn r7,r7,2047+(.+4)
bn r1,r1,1+(.+4)
bn r4,r3,1181+(.+4)
bn r5,r2,77+(.+4)
bn r3,r7,631+(.+4)
.text
.global bnlmemimm
bnlmemimm:
bn 0,#0,0+(.+4)
bn 255,#7,-1+(.+4)
bn 128,#4,-2048+(.+4)
bn 127,#3,2047+(.+4)
bn 1,#1,1+(.+4)
bn 153,#7,-847+(.+4)
bn 204,#0,-1881+(.+4)
bn 242,#7,1396+(.+4)
.text
.global bnhmemimm
bnhmemimm:
bn 0x7f00+0,#0,0+(.+4)
bn 0x7f00+255,#7,-1+(.+4)
bn 0x7f00+128,#4,-2048+(.+4)
bn 0x7f00+127,#3,2047+(.+4)
bn 0x7f00+1,#1,1+(.+4)
bn 0x7f00+185,#3,-614+(.+4)
bn 0x7f00+105,#1,-668+(.+4)
bn 0x7f00+79,#7,1312+(.+4)
.text
.global bpgrimm4
bpgrimm4:
bp r0,#0,0+(.+4)
bp r15,#15,-1+(.+4)
bp r8,#8,-2048+(.+4)
bp r7,#7,2047+(.+4)
bp r1,#1,1+(.+4)
bp r0,#12,1075+(.+4)
bp r1,#5,551+(.+4)
bp r6,#8,1588+(.+4)
.text
.global bpgrgr
bpgrgr:
bp r0,r0,0+(.+4)
bp r15,r15,-1+(.+4)
bp r8,r8,-2048+(.+4)
bp r7,r7,2047+(.+4)
bp r1,r1,1+(.+4)
bp r4,r9,-614+(.+4)
bp r9,r10,-1360+(.+4)
bp r4,r1,407+(.+4)
.text
.global bplmemimm
bplmemimm:
bp 0,#0,0+(.+4)
bp 255,#7,-1+(.+4)
bp 128,#4,-2048+(.+4)
bp 127,#3,2047+(.+4)
bp 1,#1,1+(.+4)
bp 193,#3,-398+(.+4)
bp 250,#2,-1553+(.+4)
bp 180,#6,579+(.+4)
.text
.global bphmemimm
bphmemimm:
bp 0x7f00+0,#0,0+(.+4)
bp 0x7f00+255,#7,-1+(.+4)
bp 0x7f00+128,#4,-2048+(.+4)
bp 0x7f00+127,#3,2047+(.+4)
bp 0x7f00+1,#1,1+(.+4)
bp 0x7f00+195,#1,-432+(.+4)
bp 0x7f00+129,#5,-1508+(.+4)
bp 0x7f00+56,#3,1723+(.+4)
.text
.global bcc
bcc:
bge 0+(.+2)
bz -1+(.+2)
bpl -128+(.+2)
bls 127+(.+2)
bnc 1+(.+2)
bnz.b 48+(.+2)
bnc -7+(.+2)
bnz.b 74+(.+2)
.text
.global br
br:
br 0+(.+2)
br -2+(.+2)
br -2048+(.+2)
br 2046+(.+2)
br 1+(.+2)
br 1472+(.+2)
br 1618+(.+2)
br 978+(.+2)
.text
.global callrimm
callrimm:
callr 0+(.+2)
callr -2+(.+2)
callr -2048+(.+2)
callr 2046+(.+2)
callr 1+(.+2)
callr 1472+(.+2)
callr 1618+(.+2)
callr 978+(.+2)
movgrgrsi:
mov.b r0,(r0,extsym)
mov.w r7,(r15,extsym-1)
mov.w r4,(r8,extsym-2048)
mov.b r3,(r7,extsym+2047)
mov.w r1,(r1,extsym+1)
mov.w r6,(r8,extsym-452)
mov.w r4,(r11,extsym+572)
mov.b r1,(r1,extsym-1718)
.text
.global movgrgriipostinc
movgrgrsipostinc:
mov.b r0,(r0++,extsym)
mov.w r7,(r15++,extsym-1)
mov.w r4,(r8++,extsym-2048)
mov.b r3,(r7++,extsym+2047)
mov.w r1,(r1++,extsym+1)
mov.w r6,(r0++,extsym-64)
mov.b r7,(r15++,extsym+1060)
mov.b r0,(r7++,extsym+847)
.text
.global movgrgriipredec
movgrgrsipredec:
mov.b r0,(--r0,extsym)
mov.w r7,(--r15,extsym-1)
mov.w r4,(--r8,extsym-2048)
mov.b r3,(--r7,extsym+2047)
mov.w r1,(--r1,extsym+1)
mov.w r0,(--r15,extsym+1780)
mov.w r6,(--r1,extsym+1506)
mov.w r7,(--r3,extsym-2033)
.text
.global movgriigr
movgrsigr:
mov.b (r0,extsym),r0
mov.w (r15,extsym-1),r7
mov.w (r8,extsym-2048),r4
mov.b (r7,extsym+2047),r3
mov.w (r1,extsym+1),r1
mov.w (r7,extsym+1948),r5
mov.b (r3,extsym-844),r4
mov.w (r15,extsym+1704),r0
.text
.global movgriipostincgr
movgrsipostincgr:
mov.b (r0++,extsym),r0
mov.w (r15++,extsym-1),r7
mov.w (r8++,extsym-2048),r4
mov.b (r7++,extsym+2047),r3
mov.w (r1++,extsym+1),r1
mov.w (r2++,extsym-176),r7
mov.w (r8++,extsym+1389),r4
mov.b (r3++,extsym+47),r0
.text
.global movgriipredecgr
movgrsipredecgr:
mov.b (--r0,extsym),r0
mov.w (--r15,extsym-1),r7
mov.w (--r8,extsym-2048),r4
mov.b (--r7,extsym+2047),r3
mov.w (--r1,extsym+1),r1
mov.b (--r8,extsym+1004),r4
mov.w (--r14,extsym-1444),r2
mov.b (--r5,extsym-927),r4
EOF
# Finally, generate the .d file.
gentest allinsn
|
<gh_stars>10-100
#include "filelist.hpp"
static std::string ToLowercase (const std::string& str)
{
std::string res = str;
for (char& c : res) {
c = std::tolower (c);
}
return res;
}
File::File () :
path (),
content ()
{
}
File::File (const std::string& path, const Buffer& content) :
path (path),
content (content)
{
}
const std::string& File::GetPath () const
{
return path;
}
#ifdef EMSCRIPTEN
emscripten::val File::GetContentEmscripten () const
{
emscripten::val Uint8Array = emscripten::val::global ("Uint8Array");
return Uint8Array.new_ (emscripten::typed_memory_view (content.size (), content.data ()));
}
#endif
FileList::FileList () :
files ()
{
}
void FileList::AddFile (const std::string& path, const Buffer& content)
{
files.push_back (File (path, content));
}
size_t FileList::FileCount () const
{
return files.size ();
}
File& FileList::GetFile (size_t index)
{
return files[index];
}
File* FileList::GetFile (const std::string& path)
{
std::string fileName = GetFileName (path);
for (File& file : files) {
std::string currFileName = GetFileName (file.path);
if (currFileName == fileName) {
return &file;
}
}
return nullptr;
}
const File& FileList::GetFile (size_t index) const
{
return const_cast<FileList*> (this)->GetFile (index);
}
const File* FileList::GetFile (const std::string& path) const
{
return const_cast<FileList*> (this)->GetFile (path);
}
#ifdef EMSCRIPTEN
void FileList::AddFileEmscripten (const std::string& path, const emscripten::val& content)
{
Buffer contentArr = emscripten::vecFromJSArray<std::uint8_t> (content);
AddFile (path, contentArr);
}
#endif
std::string GetFileName (const std::string& path)
{
size_t lastSeparator = path.find_last_of ('/');
if (lastSeparator == std::wstring::npos) {
lastSeparator = path.find_last_of ('\\');
}
if (lastSeparator == std::wstring::npos) {
return ToLowercase (path);
}
std::string fileName = path.substr (lastSeparator + 1, path.length () - lastSeparator - 1);
return ToLowercase (fileName);
}
|
<reponame>rcarlosdasilva/weixin<gh_stars>1-10
package io.github.rcarlosdasilva.weixin.core.exception;
public class CanNotFetchOpenPlatformLicensorAccessTokenException extends RuntimeException {
private static final long serialVersionUID = 2813748303142990709L;
}
|
package edu.ncsu.csc316.hub_manager.ui;
import java.util.Scanner;
import edu.ncsu.csc316.hub_manager.manager.AirlineHubManager;
/**
* Allows the user to interact with the program AirlineHubManager
* @author <NAME>
*
*/
public class AirlineHubManagerUI {
/**
* The main method which starts interaction between the user
* and the implemented methods within AirlineHubManager
* @param args the argument parameter
*/
public static void main(String[] args) {
@SuppressWarnings("resource")
Scanner console = new Scanner(System.in);
boolean filesRead = false;
AirlineHubManager manager = null;
do {
System.out.print("Enter name of file: ");
String pathToAirportsFile = "input/" + console.next();
manager = new AirlineHubManager(pathToAirportsFile);
filesRead = manager.validRead();
} while (!filesRead);
boolean valid = true;
int option = 0;
System.out.print("Menu: \n[1] Generate Flight Connections\n[2] Produce Hub Report\n[3] Exit ");
System.out.print("\nOption -> ");
do {
if (!console.hasNextInt()) {
console.next();
valid = false;
System.out.println("Option is not an integer.");
System.out.print("\nMenu: \n[1] Generate Flight Connections\n[2] Produce Hub Report\n[3] Exit ");
System.out.print("\nOption -> ");
} else {
option = console.nextInt();
if (option < 1 || option > 3) {
valid = false;
System.out.println("Invalid option.");
System.out.print("\nMenu: \n[1] Generate Flight Connections\n[2] Produce Hub Report\n[3] Exit ");
System.out.print("\nOption -> ");
} else {
valid = true;
}
}
} while(!valid);
if (option == 1) {
System.out.print("\n" + manager.getMinimumFlights());
} else if (option == 2) {
System.out.print("\n" + manager.getPossibleHubs());
} else {
System.out.println("\nGoodbye!");
}
}
}
|
using System;
using System.Collections.Generic;
public class Product
{
public string Id { get; set; }
public string Name { get; set; }
public decimal Price { get; set; }
}
public class AlternateRepository
{
private Dictionary<string, Product> products;
public AlternateRepository()
{
products = new Dictionary<string, Product>();
}
public void AddProduct(string id, string name, decimal price)
{
if (products.ContainsKey(id))
{
throw new ArgumentException("Product with the same id already exists");
}
products[id] = new Product { Id = id, Name = name, Price = price };
}
public void UpdateProduct(string id, string name, decimal price)
{
if (!products.ContainsKey(id))
{
throw new KeyNotFoundException("Product with the given id does not exist");
}
products[id].Name = name;
products[id].Price = price;
}
public Product GetProduct(string id)
{
if (!products.ContainsKey(id))
{
return null;
}
return products[id];
}
public List<Product> GetProductsInPriceRange(decimal minPrice, decimal maxPrice)
{
List<Product> productsInPriceRange = new List<Product>();
foreach (var product in products.Values)
{
if (product.Price >= minPrice && product.Price <= maxPrice)
{
productsInPriceRange.Add(product);
}
}
return productsInPriceRange;
}
} |
def replaceVowels(string)
vowels = ["a", "e", "i", "o", "u"]
new_string = ""
string.each_char do |char|
if vowels.include? char
new_string += "-"
else
new_string += char
end
end
return new_string
end
puts replaceVowels("Hello World!") |
#!/usr/bin/env bash
set -e -o pipefail
cd `dirname $0`
# Track payload size functions
source ../scripts/ci/payload-size.sh
source ./_payload-limits.sh
# Workaround https://github.com/yarnpkg/yarn/issues/2165
# Yarn will cache file://dist URIs and not update Angular code
readonly cache=.yarn_local_cache
function rm_cache {
rm -rf $cache
}
rm_cache
mkdir $cache
trap rm_cache EXIT
# We need to install `ng` but don't want to do it globally so we place it into `.ng-cli` folder.
(
mkdir -p .ng-cli
cd .ng-cli
# workaround for https://github.com/yarnpkg/yarn/pull/4464 which causes cli to be installed into the root node_modules
echo '{"name": "ng-cli"}' > package.json
yarn init -y
yarn add @angular/cli@$ANGULAR_CLI_VERSION --cache-folder ../$cache
)
./ng-cli-create.sh cli-hello-world
for testDir in $(ls | grep -v node_modules) ; do
[[ -d "$testDir" ]] || continue
echo "#################################"
echo "Running integration test $testDir"
echo "#################################"
(
cd $testDir
# Workaround for https://github.com/yarnpkg/yarn/issues/2256
rm -f yarn.lock
rm -rf dist
yarn install --cache-folder ../$cache
yarn test || exit 1
# Track payload size for cli-hello-world and hello_world__closure
if [[ $testDir == cli-hello-world ]] || [[ $testDir == hello_world__closure ]]; then
if [[ $testDir == cli-hello-world ]]; then
yarn build
fi
trackPayloadSize "$testDir" "dist/*.js" true false
fi
)
done
trackPayloadSize "umd" "../dist/packages-dist/*/bundles/*.umd.min.js" false false
|
<reponame>mikrowelt/tmc24-components
import Sidebar from './src/sidebar';
export default Sidebar;
|
def find_min_max(a, b, c):
max_num = max(a, b, c)
min_num = min(a, b, c)
return min_num, max_num
if __name__ == '__main__':
a = 4
b = 3
c = 7
min_num, max_num = find_min_max(a, b, c)
print(f"Minimum number is {min_num} and maximum number is {max_num}") |
<filename>app/controllers/menu/iosNavBar.js
var args = arguments[0] || {};
var parentWindow = args.parentWindow;
function closeWindow(){
Alloy.Globals.navigationWindow = null;
parentWindow.close();
} |
#pragma once
#include <thread>
#include "../handles.h"
#include "../repositories/system_status_repository.h"
#include "basic_command.h"
/**
* @file fan_command.h
* @author Group 7 - Informatica
*/
namespace goliath::commands {
class SynchronizeSystemStatusCommand : public BasicCommand {
public:
SynchronizeSystemStatusCommand(const size_t &id,
std::shared_ptr<repositories::SystemStatusRepository> systemStatusRepository,
double enableFanThreshold,
double disableFanThreshold);
private:
static const std::string TEMPERATURE_FILE;
std::shared_ptr<repositories::SystemStatusRepository> systemStatusRepository;
double enableFanThreshold;
double disableFanThreshold;
void execute(handles::HandleMap &handles, const proto::CommandMessage &message) override;
};
}
|
<filename>tests/pipeline_test.py<gh_stars>0
import math
import os
import unittest
from pipeline import Pipeline
import pandas as pd
from spacy.tokens.doc import Doc as sp_Doc
from utilities.spacy_utilities import Spacy_Manager
class PipelineTests(unittest.TestCase):
# Set up and helper functions
def setUp(self) -> None:
test_data = [
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[2, 2, 2, 2, 2],
[3, 3, 3, 3, 3],
[4, 4, 4, 4, 4],
[5, 5, 5, 5, 5]
]
test_secondary_data = [
[10],
[11],
[12],
[13],
[14],
[15]
]
self.test_df = pd.DataFrame(test_data, columns=['test_col', 'm1', 'm2', 'm3', 'm4'])
self.secondary_test_df = pd.DataFrame(test_secondary_data, columns=['a'])
self._log_path = './test_logs.json'
return super().setUp()
def tearDown(self) -> None:
try:
os.remove(self._log_path)
except:
print('Error raised when deleting log file.')
return super().tearDown()
@staticmethod
def simple_extraction_fn(data):
return data
# Test functions
def test_standard_configuration(self):
pre_extraction_fns = [
lambda x: x + 1,
lambda x: x * 2
]
post_extraction_fns = [
lambda x: x - 1
]
batch_size = 3
def save_fn(df: pd.DataFrame):
assert(df.shape[1] == self.test_df.shape[1])
assert(df.shape[0] == self.test_df.shape[0])
p = Pipeline(
data_save_fn=save_fn,
pre_extraction_fns=pre_extraction_fns,
feature_extraction_fn=PipelineTests.simple_extraction_fn,
post_extraction_fns=post_extraction_fns,
text_column_name='test_col',
ngram_column_name='test_col',
batch_size=batch_size,
log_filepath=self._log_path
)
p.start([self.test_df.copy(deep=True)])
def test_no_processing_fns(self):
pre_extraction_fns = []
post_extraction_fns = []
batch_size = 3
def save_fn(df: pd.DataFrame):
assert(df.shape[1] == self.test_df.shape[1])
assert(df.shape[0] == self.test_df.shape[0])
p = Pipeline(
data_save_fn=save_fn,
pre_extraction_fns=pre_extraction_fns,
feature_extraction_fn=PipelineTests.simple_extraction_fn,
post_extraction_fns=post_extraction_fns,
text_column_name='test_col',
ngram_column_name='test_col',
batch_size=batch_size,
log_filepath=self._log_path
)
p.start([self.test_df.copy(deep=True)])
def test_multiple_dataframes(self):
pre_extraction_fns = [
lambda x: x + 1,
lambda x: x * 2
]
post_extraction_fns = [
lambda x: x - 1
]
batch_size = 3
def save_fn(df: pd.DataFrame):
assert(df.shape[1] == self.test_df.shape[1] + self.secondary_test_df.shape[1])
assert(df.shape[0] == self.test_df.shape[0])
assert(df.shape[0] == self.secondary_test_df.shape[0])
p = Pipeline(
data_save_fn=save_fn,
pre_extraction_fns=pre_extraction_fns,
feature_extraction_fn=PipelineTests.simple_extraction_fn,
post_extraction_fns=post_extraction_fns,
text_column_name='test_col',
ngram_column_name='test_col',
batch_size=batch_size,
log_filepath=self._log_path
)
p.start([self.test_df.copy(deep=True)], [iter([self.secondary_test_df.copy(deep=True)])])
def test_sp_docs(self):
batch_size = 3
test_text_df = pd.DataFrame([
["Lorem ipsum dolor sit amet consectetur adipiscing", 0],
["elit sed do eiusmod tempor incididunt ut labore et dolore magna aliqua", 1],
["Ut enim ad minim veniam quis nostrud exercitation", 2],
["ullamco laboris nisi ut aliquip ex ea commodo consequat", 3],
["Duis aute irure dolor in reprehenderit in voluptate", 4],
["velit esse cillum dolore eu fugiat nulla pariatur", 5]
], columns=['text', 'm1'])
def save_fn(df: pd.DataFrame):
assert(df.shape[0] == test_text_df.shape[0])
assert(df.shape[1] == test_text_df.shape[1] + 1)
assert(df.loc[:, 'text_spdocs'].dtype == sp_Doc)
p = Pipeline(
data_save_fn=save_fn,
pre_extraction_fns=[],
feature_extraction_fn=PipelineTests.simple_extraction_fn,
post_extraction_fns=[],
text_column_name='text',
ngram_column_name='text',
batch_size=batch_size,
use_spacy=True,
log_filepath=self._log_path
)
p.start([test_text_df.copy(deep=True)])
def test_split_df(self):
batch_size = 4
p = Pipeline(
data_save_fn=None,
pre_extraction_fns=[],
feature_extraction_fn=PipelineTests.simple_extraction_fn,
post_extraction_fns=[],
text_column_name='',
ngram_column_name='',
batch_size=batch_size,
log_filepath=self._log_path
)
res = p._split_df(self.test_df)
res_concat = pd.concat(res, axis=0, ignore_index=True)
assert(len(res) == math.ceil(self.test_df.shape[0] / batch_size))
assert(sum([len(x) for x in res]) == self.test_df.shape[0])
assert(res_concat.shape[0] == self.test_df.shape[0])
assert(res_concat.shape[1] == self.test_df.shape[1])
assert((res_concat == self.test_df).all(axis=None))
|
<gh_stars>0
function getAllFeedback() {
$.get("/feedback/all", function (data) {
if (data) {
data.forEach(feedback => {
var radio_resolved_checked = "";
var radio_unresolved_checked = "";
if (feedback.status) {
radio_resolved_checked = "checked";
} else {
radio_unresolved_checked = "checked";
}
var bstring = "\
<div class='col s6'>\
<ul class='collapsible curved'>\
<li>\
<div class='collapsible-header '><i class='material-icons'>filter_drama</i>"+feedback.user.name+"</div>\
<div class='collapsible-body '><span>"+feedback.description+"</span>\
<!-- radio buttons -->\
<form action='#'>\
<p>\
<label>\
<input name='group1' "+radio_resolved_checked+" type='radio' class='with-gap />\
<span>Resolved</span>\
</label>\
<label>\
<input name='group1' "+radio_unresolved_checked+" type='radio' class='with-gap' />\
<span>Not Resolved</span>\
</label>\
</form>\
</div>\
</li>\
</ul>\
</div>";
$("#feedback").append(bstring);
});
}
});
} |
class PointerEventHandler {
func handle(event: PointerEvent) {
switch event {
case .Mouse(.Button(let buttonEvent)):
switch buttonEvent {
case .Left:
print("Left mouse button clicked")
case .Right:
print("Right mouse button clicked")
case .Middle:
print("Middle mouse button clicked")
}
default:
break
}
}
} |
#!/bin/bash
. ./scripts/utils/utils.sh
function print_usage {
usage_header ${0}
usage_option " -n <network> : Network to use (localhost, yeouido, euljiro or mainnet)"
usage_option " -r : Name of the address"
usage_option " -a : Address value"
usage_footer
exit 1
}
function process {
if [[ ("$network" == "") || ("$name" == "") || ("$address" == "") ]]; then
print_usage
fi
package="address_registrar"
cli_config=$(cat ./config/${package}/${network}/tbears_cli_config.json | jq '.keyStore = "./config/keystores/'${network}'/operator.icx"')
echo $cli_config >"${cli_config_file:=$(mktemp)}"
command=$(cat <<-COMMAND
tbears sendtx <(
python3 ./scripts/score/dynamic_call/register.py
${network@Q}
${name@Q}
${address@Q}
)
-c ${cli_config_file}
COMMAND
)
txresult=$(./scripts/icon/txresult.sh -n "${network}" -c "${command}" -p address_registrar)
echo -e "${txresult}"
}
# Parameters
while getopts "n:r:a:" option; do
case "${option}" in
n)
network=${OPTARG}
;;
r)
name=${OPTARG}
;;
a)
address=${OPTARG}
;;
*)
print_usage
;;
esac
done
shift $((OPTIND-1))
process |
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.client.logger.option;
import java.util.List;
import java.util.Map;
import com.alibaba.nacos.client.logger.Level;
import com.alibaba.nacos.client.logger.Logger;
/**
* <pre>
* 激活Logger的选项,包括:
* Appender/Layout
* Level
* Additivity
* Aysnc
* 请参考具体的实现逻辑
* </pre>
*
* @author zhuyong 2014年3月20日 上午10:20:51
*/
public interface ActivateOption {
/**
* 设置ConsoleAppender,生产环境慎用
*
* @param target System.out or System.err
* @param encoding 编码
*/
void activateConsoleAppender(String target, String encoding);
/**
* 设置FileAppender,日志按天回滚
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
*/
void activateAppender(String productName, String file, String encoding);
/**
* 设置AsyncAppender,内嵌DailyRollingFileAppender,日志按天回滚,参考 {@link ActivateOption#activateAsync(int, int)}
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
*/
@Deprecated
void activateAsyncAppender(String productName, String file, String encoding);
/**
* 设置AsyncAppender,内嵌DailyRollingFileAppender,日志按天回滚,参考 {@link ActivateOption#activateAsync(int, int)}
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
* @param queueSize 等待队列大小
* @param discardingThreshold discardingThreshold,该参数仅对logback实现有效,log4j和log4j2无效
*/
@Deprecated
void activateAsyncAppender(String productName, String file, String encoding, int queueSize,
int discardingThreshold);
/**
* 设置按天和文件大小回滚
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
* @param size 文件大小,如300MB,支持KB,MB,GB,该参数对log4j实现不生效,log4j2和logback有效
*/
void activateAppenderWithTimeAndSizeRolling(String productName, String file, String encoding, String size);
/**
* <pre>
* 设置按日期格式和文件大小回滚
* 说明:Log4j 对日期格式不生效,只有按大小回滚,同时不支持备份文件,即达到文件大小直接截断,如果需要备份文件,请参考带 maxBackupIndex 参数的方法
* </pre>
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
* @param size 文件大小,如300MB,支持KB,MB,GB
* @param datePattern 日期格式,如yyyy-MM-dd 或 yyyy-MM,请自行保证格式正确,该参数对log4j实现不生效,log4j2和logback有效
*/
void activateAppenderWithTimeAndSizeRolling(String productName, String file, String encoding, String size,
String datePattern);
/**
* <pre>
* 设置按日期格式、文件大小、最大备份文件数回滚
* 说明:
* 1、Log4j 对日期格式不生效,只有按大小、备份文件数回滚,备份文件数 maxBackupIndex 参数必须是 >= 0 的整数,为0时表示直接截断,不备份
* 2、备份日志格式说明:
* Log4j:notify.log.1, notify.log.2,即备份文件以 .1 .2结尾,序号从1开始
* Logback: notify.log.2014-09-19.0, notify.log.2014-09-19.1,即中间会带日期格式,同时序号从0开始
* </pre>
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
* @param size 文件大小,如300MB,支持KB,MB,GB
* @param datePattern 日期格式,如yyyy-MM-dd 或 yyyy-MM,请自行保证格式正确,该参数对log4j实现不生效,log4j2和logback有效
* @param maxBackupIndex 最大备份文件数,如10(对于 Logback,则是保留10天的文件,但是这10天内的文件则会按大小回滚)
*/
void activateAppenderWithTimeAndSizeRolling(String productName, String file, String encoding, String size,
String datePattern, int maxBackupIndex);
/**
* <pre>
* 设置按文件大小、最大备份文件数回滚
* 说明:
* 1、Log4j 备份文件数 maxBackupIndex 参数必须是 >= 0 的整数,为0时表示直接截断,不备份
* 2、备份日志格式说明:
* Log4j:notify.log.1, notify.log.2,即备份文件以 .1 .2结尾,序号从1开始
* Logback: notify.log.1, notify.log.1
* </pre>
*
* @param productName 中间件产品名,如hsf, tddl
* @param file 日志文件名,如hsf.log,支持子目录,如client/hsf.log
* @param encoding 编码
* @param size 文件大小,如300MB,支持KB,MB,GB
* @param maxBackupIndex 最大备份文件数,如10
*/
void activateAppenderWithSizeRolling(String productName, String file, String encoding, String size,
int maxBackupIndex);
/**
* 将当前logger对象的appender设置为异步Appender
* 注意:此logger需要提前进行Appender的初始化
*
* @param queueSize 等待队列大小
* @param discardingThreshold discardingThreshold,该参数仅对logback实现有效,log4j和log4j2无效
* @since 0.2.2
*/
void activateAsync(int queueSize, int discardingThreshold);
/**
* 将当前logger对象的appender设置为异步Appender
* 注意:此logger需要提前进行Appender的初始化
*
* @param args AsyncAppender配置参数,请自行保证参数的正确性,要求每个Object[]有3个元素,第一个为set方法名,第二个为方法类型数组,第三个为对应的参数值,如
* args.add(new Object[] { "setBufferSize", new Class<?>[] { int.class }, queueSize });
* @since 0.2.3
*/
void activateAsync(List<Object[]> args);
/**
* 使用logger对象的appender来初始化当前logger
*
* @param logger
*/
void activateAppender(Logger logger);
/**
* 设置日志级别
*
* @param level 日志级别
* @see Level
*/
void setLevel(Level level);
/**
* 获取日志级别
* @return level
*/
Level getLevel();
/**
* 设置日志是否Attach到Parent
*
* @param additivity true or false
*/
void setAdditivity(boolean additivity);
/**
* 获取所属的产品名
* @return 所属的产品名
*/
String getProductName();
}
|
<reponame>zouvier/BlockChain-Voting
import type { ProjectPathsConfig, SolcConfig } from "../../types";
export interface CacheEntry {
lastModificationDate: number;
contentHash: string;
sourceName: string;
solcConfig: SolcConfig;
imports: string[];
versionPragmas: string[];
artifacts: string[];
}
export interface Cache {
_format: string;
files: Record<string, CacheEntry>;
}
export declare class SolidityFilesCache {
private _cache;
static createEmpty(): SolidityFilesCache;
static readFromFile(solidityFilesCachePath: string): Promise<SolidityFilesCache>;
constructor(_cache: Cache);
removeNonExistingFiles(): Promise<void>;
writeToFile(solidityFilesCachePath: string): Promise<void>;
addFile(absolutePath: string, entry: CacheEntry): void;
getEntries(): CacheEntry[];
getEntry(file: string): CacheEntry | undefined;
removeEntry(file: string): void;
hasFileChanged(absolutePath: string, contentHash: string, solcConfig?: SolcConfig): boolean;
}
export declare function getSolidityFilesCachePath(paths: ProjectPathsConfig): string;
//# sourceMappingURL=solidity-files-cache.d.ts.map |
var mongoose = require("mongoose");
var Schema = mongoose.Schema;
var MovieSchema = new Schema({
id: Number,
moviename: {
type: String,
unique: true
},
year: String,
plot: String,
poster: String,
date: { type: Date, default: Date.now }
});
module.exports = mongoose.model("Movie", MovieSchema);
|
#!/bin/sh
framework_version="$1"
podspec_path="PhyKitCocoapod.podspec"
echo "$framework_version"
function updatePodspec {
i_podspec_path="$1"
i_version="$2"
version_string="automaticVersion = '$i_version'"
echo "Updating podspec version to $i_version"
sed -i '' "1s/.*/${version_string}/" "$i_podspec_path"
}
function updateGit {
i_version_tag="$1"
# Stage all changes
git add -A
# Commit and push changes
git commit -e
git push -u origin HEAD
# Delete current version tag from any commits if it exists
# git tag -d "$framework_version"
# git fetch
# git push origin --delete "$framework_version"
# git tag -d "$framework_version"
# Update tag
git tag -fa "$i_version_tag" -m "Automatically updating tag to $i_version_tag"
git push origin "refs/tags/$i_version_tag"
}
function pushPod {
i_podspec_path="$1"
pod trunk push "$i_podspec_path"
}
updatePodspec "$podspec_path" "$framework_version"
updateGit "$framework_version"
pushPod "$podspec_path"
|
<gh_stars>0
import React, {PropTypes} from "react";
import {bindActionCreators} from "redux";
import {connect} from "react-redux";
import * as dialogActions from "../../actions/dialogActions";
import {
Button, ButtonType,
Dialog, DialogType
} from "office-ui-fabric-react";
/**
* Dialog Demo
*/
const DialogDemo = (props) => {
let {showDialog} = props;
// Method to hide the dialog
let hide = () => {
// Hide the dialog
props.actions.hide();
}
// The button click event
let onClick = (event) => {
// Disable postback
event.preventDefault();
// Show the dialog
props.actions.show();
};
// Render the component
return (
<div>
<Button
buttonType={ButtonType.normal}
onClick={event => onClick(event)}
>Open Dialog</Button>
<Dialog
onDismiss={hide}
isBlocking={true}
isOpen={showDialog}
title="Dialog Example"
type={DialogType.close}>
<h5>This is where you dialog components go.</h5>
</Dialog>
</div>
);
};
/**
* Properties
*/
DialogDemo.propTypes = {
showDialog: PropTypes.bool
};
/**
* Connections
*/
export default connect(
/**
* State to Property Mapper
*/
(state, ownProps) => {
return {
showDialog: state.dialog.showDialog,
};
},
/**
* Actions Mapper
*/
(dispatch) => {
return {
actions: bindActionCreators(dialogActions, dispatch)
};
}
)(DialogDemo); |
<gh_stars>0
'use strict';
const util = require('util');
const path = require('path');
const fs = require('fs');
const flatten = require('uvwlib/lib/utils/flatten');
function resolve () {
return path.resolve.apply(null, flatten(arguments));
}
const utils = {
inspect: function (obj, opts = { showHidden: false, depth: null }) {
return util.inspect(obj, opts);
},
log: function () {
console.log.apply(console, Array.prototype.map.call(arguments, x => utils.inspect(x)));
},
resolve: resolve,
isDirectory: function () {
try {
return fs.statSync(resolve.apply(null, arguments)).isDirectory();
} catch (ex) {
return false;
}
},
isFile: function () {
try {
return fs.statSync(resolve.apply(null, arguments)).isFile();
} catch (ex) {
return false;
}
},
hasFileExt: function (basedir, fname, exts) {
for (var i=0; i<exts.length; i++) {
var f = path.resolve(basedir, fname + exts[i]);
if (utils.isFile(f)) return {
file: f,
ext: exts[i]
};
}
},
readFile: function () {
var len = arguments.length, opts;
if (len === 0) return;
var opts = arguments[len-1];
if (!opts || typeof opts === 'object') len--;
opts = opts || {};
var names = [].slice.call(arguments, 0, len);
if (opts.relPath) names.push(opts.relPath);
var fpath = resolve.apply(null, names);
try {
var stats = fs.statSync(fpath);
if (stats.isDirectory() && opts.defaultFile) {
fpath = path.join(fpath, opts.defaultFile);
stats = fs.statSync(fpath);
}
var extName = opts.extName;
if (extName && extName[0] !== '.') extName = '.' + extName;
if (extName && stats.isFile()) {
var ext = path.extname(fpath);
if (!ext || ext !== extName) {
var fname = path.basename(fpath, ext) + extName;
fpath = path.resolve(path.dirname(fpath), fname);
stats = fs.statSync(fpath);
}
}
if (stats.isFile()) return fs.readFileSync(fpath, opts.encoding || 'utf8');
} catch(ex) {
console.error(ex);
}
// undefined otherwise
},
};
utils.isDir = utils.isFolder = utils.isDirectory;
module.exports = utils;
|
package com.ur.urcap.examples.idletime.impl;
import com.ur.urcap.api.contribution.ProgramNodeContribution;
import com.ur.urcap.api.contribution.ProgramNodeService;
import com.ur.urcap.api.domain.URCapAPI;
import com.ur.urcap.api.domain.data.DataModel;
import java.io.InputStream;
public class IdleTimeProgramNodeService implements ProgramNodeService {
@Override
public String getId() {
return "IdleTime";
}
@Override
public boolean isDeprecated() {
return false;
}
@Override
public boolean isChildrenAllowed() {
return true;
}
@Override
public String getTitle() {
return "Idle Time";
}
@Override
public InputStream getHTML() {
return this.getClass().getResourceAsStream("/com/ur/urcap/examples/idletime/impl/IdleTimeProgramNode.html");
}
@Override
public ProgramNodeContribution createNode(URCapAPI api, DataModel model) {
return new IdleTimeProgramNodeContribution(api, model);
}
}
|
export DOWNLOAD_PATH="data/"
#wget https://www.mpi-inf.mpg.de/departments/computer-vision-and-multimodal-computing/research/gaze-based-human-computer-interaction/appearance-based-gaze-estimation-in-the-wild-mpiigaze/ -P $download_path
# test download path
wget https://www.mpi-inf.mpg.de/fileadmin/inf/d2/xucong/MPIIGaze/model.png -P $DOWNLOAD_PATH
file=/data/model.png
if [-f $file];
then
echo "successfully downloaded"
fi
wget http://datasets.d2.mpi-inf.mpg.de/MPIIGaze/MPIIFaceGaze.zip -P $DOWNLOAD_PATH
unzip code/MPIIFaceGaze.zip
wget http://datasets.d2.mpi-inf.mpg.de/MPIIGaze/MPIIFaceGaze_normalized.zip -P $DOWNLOAD_PATH
unzip code/MPIIFaceGaze_normalized.zip
|
package com.twu.biblioteca;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.ByteArrayInputStream;
import java.util.Scanner;
import static com.twu.biblioteca.Library.*;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
/**
* Created by alexa on 1/02/2019.
*/
public class LibraryTest {
Library library;
@Rule
public final ExpectedException failure = ExpectedException.none();
@Before
public void setUp() {
library = new Library();
}
/**
* The tests below are for a list of books up until the next commenting
*/
@Test
public void shouldPrintOneBookDetailsWhenThereIsOneBook() {
library.addBook(new Book("Title 1", "Author 1", "2000"));
assertThat(library.getLibraryItems(BOOKS),
is(" | 1 | Title 1 | Author 1 | 2000 |\n"));
}
@Test
public void shouldPrintBothBookDetailsWhenThereAreTwoBooks() {
library.addBook(new Book("Title 1", "Author 1", "2000"));
library.addBook(new Book("Title 2", "Author 2", "2002"));
assertThat(library.getLibraryItems(BOOKS),
is(" | 1 | Title 1 | Author 1 | 2000 |\n" +
" | 2 | Title 2 | Author 2 | 2002 |\n"));
}
@Test
public void shouldPrintBookDetailsWithColumns() {
library.addBook(new Book("Title 3", "Author 3", "2019"));
//60
String expected =
" | No | Title | Author | Year |\n" +
"---------------------------------------------------------\n" +
" | 1 | Title 3 | Author 3 | 2019 |\n";
assertThat(library.getAllBookDetailsWithColumn(), is(expected));
}
@Test
public void shouldPrintOnlyAvailableBooks() {
library.addBook(new Book("Title 2", "Author 2", "2000", false));
library.addBook(new Book("Title 3", "Author 3", "2001", true));
library.addBook(new Book("Title 4", "Author 4", "2002", true));
String expected =
" | 1 | Title 3 | Author 3 | 2001 |\n" +
" | 2 | Title 4 | Author 4 | 2002 |\n";
assertThat(library.getLibraryItems(BOOKS), is(expected));
}
@Test
public void shouldReturnTheCorrectBookAccordingToTitle() {
Book book = new Book("Title 6", "Author 6", "2019");
library.addBook(new Book("Title 5", "Author 5", "2019"));
library.addBook(book);
library.addBook(new Book("Title 7", "Author 7", "2019"));
assertThat(library.findItem("Title 6", BOOKS).getFullDetail(),
is(book.getFullDetail()));
}
@Test
public void shouldReturnNullWhenThereIsNoBookWithDesiredTitle() {
assertThat(library.findItem("", BOOKS), is(nullValue()));
}
@Test
public void shouldGetUserInputAndCheckOutABookCorrectly() {
Book wantToBorrow = new Book("Title 9", "Author 9", "2019");
library.addBook(wantToBorrow);
String data = "Title 9";
System.setIn(new ByteArrayInputStream(data.getBytes()));
library.borrowProcess(new Scanner(System.in), null, BOOKS);
assertThat(wantToBorrow.isAvailable(), is(false));
}
@Test
public void shouldGetUserInputAndReturnABookCorrectly() {
Book wantToBorrow = new Book("Title 10", "Author 9", "2019", false);
library.addBook(wantToBorrow);
String data = "Title 10";
System.setIn(new ByteArrayInputStream(data.getBytes()));
library.borrowProcess(new Scanner(System.in), null, BOOKS);
assertThat(wantToBorrow.isAvailable(), is(false));
}
@Test
public void shouldShowBorrowedBookWithLibraryNumber() {
Book book = new Book("Title 1", "Author 1", "2019", false);
Customer customer = new Customer("123-4567", "name name", "phone", "email", "pass");
book.setBorrowedBy(customer);
Book book2 = new Book("Title 2", "Author 2", "2019", false);
Customer customer2 = new Customer("123-4568", "name name", "phone", "email", "pass");
book2.setBorrowedBy(customer2);
library.addBook(book);
library.addBook(book2);
String expected =
" | 1 | 123-4567 | Title 1 | Author 1 | 2019 |\n" +
" | 2 | 123-4568 | Title 2 | Author 2 | 2019 |\n";
assertThat(library.printBorrowedBooks(), is(expected));
}
/**
* The tests below are for a list of movies
*/
@Test
public void shouldPrintOneMovieDetailsWhenThereIsOneBook() {
library.addMovie(new Movie("Title 1", "2000", "Director 1", 10));
assertThat(library.getLibraryItems(MOVIES),
is(" | 1 | Title 1 | 2000 | Director 1 | 10 |\n"));
}
@Test
public void shouldPrintBothMovieDetailsWhenThereAreTwoBooks() {
library.addMovie(new Movie("Title 1", "2000", "Director 1", 1));
library.addMovie(new Movie("Title 2", "2002", "Director 2", 5));
assertThat(library.getLibraryItems(MOVIES),
is(" | 1 | Title 1 | 2000 | Director 1 | 1 |\n" +
" | 2 | Title 2 | 2002 | Director 2 | 5 |\n"));
}
@Test
public void shouldPrintOnlyAvailableMovies() {
library.addMovie(new Movie("Title 2", "2000", "Director 2", false));
library.addMovie(new Movie("Title 3", "2000", "Director 3", true));
library.addMovie(new Movie("Title 4", "2000", "Director 4", true));
String expected =
" | 1 | Title 3 | 2000 | Director 3 | Unrated |\n" +
" | 2 | Title 4 | 2000 | Director 4 | Unrated |\n";
assertThat(library.getLibraryItems(MOVIES), is(expected));
}
@Test
public void shouldPrintMovieDetailsWithColumns() {
library.addMovie(new Movie("Title 3", "2003", "Director 3", 7));
//65
String expected =
" | No | Title | Year | Director | Rating |\n" +
"-----------------------------------------------------------------\n" +
" | 1 | Title 3 | 2003 | Director 3 | 7 |\n";
assertThat(library.getAllMovieDetailsWithColumn(), is(expected));
}
@Test
public void shouldReturnTheCorrectMovieAccordingToTitle() {
Movie movie = new Movie("Title 2", "2019", "Director 2");
library.addMovie(new Movie("Title 1", "2019", "Director 1"));
library.addMovie(movie);
library.addMovie(new Movie("Title 3", "2019", "Director 3"));
assertThat(library.findItem("Title 2", MOVIES).getFullDetail(),
is(movie.getFullDetail()));
}
@Test
public void shouldReturnNullWhenThereIsNoMovieWithDesiredTitle() {
assertThat(library.findItem("", MOVIES), is(nullValue()));
}
@Test
public void shouldGetUserInputAndCheckOutAMovieCorrectly() {
Movie wantToBorrow = new Movie("Title 1", "2000", "Director 1");
library.addMovie(wantToBorrow);
String data = "Title 1";
System.setIn(new ByteArrayInputStream(data.getBytes()));
library.borrowProcess(new Scanner(System.in), null, MOVIES);
assertThat(wantToBorrow.isAvailable(), is(false));
}
@Test
public void shouldReturnOnlyBooks() {
library.addBook(new Book("Title 1", "Author 1", "2000"));
library.addMovie(new Movie("Title 3", "2003", "Director 3", 7));
assertThat(library.getLibraryItems(BOOKS),
is(" | 1 | Title 1 | Author 1 | 2000 |\n"));
}
}
|
#!/bin/bash
CORE=`lscpu | grep socket | grep -o "[0-9]*"`
HYPER=`lscpu | grep core | grep -o "[0-9]*"`
MAX_THREAD=`expr $(($CORE*$HYPER))`
SELECT_GRAPH=$1
SAMPLE_RATIO=$2
PARALLEL=$3
MINIMUM_RATIO=$4
AVERAGE=$5
rm *.txt
rm *.ans
rm *.exectime
rm *.rwtime
if [ "$(($3+1))" -gt "$MAX_THREAD" ]
then
echo "Exceed Max Thread $MAX_THREAD"
else
echo "My Proposal"
mpirun -np $(($3+1)) ./SubGorder-eval-final $SELECT_GRAPH $SAMPLE_RATIO $3 $MINIMUM_RATIO $AVERAGE
for j in `seq 1 $AVERAGE`
do
./../ligra/utils/SNAPtoAdj $3"-"$j"-sample.txt" $3"-"$j".adj"
#./../ligra/apps/PageRank $3"-"$j".adj" | grep -o "[0-9.]*" >> $TRUE_GRAPH"-"$i."exectime"
./../ligra/apps/PageRank $3"-"$j".adj" | grep -o "[0-9.]*"
./../ligra/apps/PageRankDelta $3"-"$j".adj" | grep -o "[0-9.]*"
done
rm *.adj
rm *.txt
rm *.ans
rm *.exectime
rm *.rwtime
echo "Before"
mpirun -np $(($3+1)) ./SubGorder-eval-final $SELECT_GRAPH $SAMPLE_RATIO $3 1 $AVERAGE
for j in `seq 1 $AVERAGE`
do
./../ligra/utils/SNAPtoAdj $3"-"$j"-sample.txt" $3"-"$j".adj"
#./../ligra/apps/PageRank $3"-"$j".adj" | grep -o "[0-9.]*" >> $TRUE_GRAPH"-"$i."exectime"
./../ligra/apps/PageRank $3"-"$j".adj" | grep -o "[0-9.]*"
./../ligra/apps/PageRankDelta $3"-"$j".adj" | grep -o "[0-9.]*"
done
rm *.adj
rm *.txt
rm *.txt
rm *.ans
rm *.exectime
rm *.rwtime
fi
|
"""A DLite storage plugin for BSON written in Python."""
import os
import bson as pybson # Must be pymongo.bson
import dlite
from dlite.options import Options
from dlite.utils import instance_from_dict
class bson(DLiteStorageBase):
"""DLite storage plugin for BSON."""
def open(self, uri, options=None):
"""Open `uri`.
The `options` argument provies additional input to the driver.
Which options that are supported varies between the plugins.
It should be a valid URL query string of the form:
key1=value1;key2=value2...
An ampersand (&) may be used instead of the semicolon (;).
Typical options supported by most drivers include:
- mode : append (default) | r | w
Valid values are:
- append Append to existing file or create new file
- r Open existing file for read-only
- w Truncate existing file or create new file
After the options are passed, this method may set attribute
`writable` to True if it is writable and to False otherwise.
If `writable` is not set, it is assumed to be True.
The BSON data is translated to JSON.
"""
self.options = Options(options, defaults='mode=append')
self.mode = dict(r='rb', w='wb', append='rb+')[self.options.mode]
if self.mode == 'rb' and not os.path.exists(uri):
raise FileNotFoundError(f"Did not find URI '{uri}'")
self.writable = False if 'rb' in self.mode else True
self.uri = uri
self.d = {}
if self.mode in ('rb', 'rb+'):
with open(uri, self.mode) as f:
bson_data = f.read()
if pybson.is_valid(bson_data):
self.d = pybson.decode(bson_data)
if not self.d:
raise EOFError(f"Failed to read BSON data from '{uri}'")
else:
raise EOFError(f"Invalid BSON data in source '{uri}'")
def close(self):
"""Close this storage and write the data to file.
Assumes the data to store is in JSON format.
"""
if self.writable:
if self.mode == 'rb+' and not os.path.exists(self.uri):
mode = 'wb'
else:
mode = self.mode
for uuid in self.queue():
props = self.d[uuid]['properties']
if type(props) == dict: # Metadata props is list
for key in props.keys():
if type(props[key]) in (bytearray, bytes):
props[key] = props[key].hex()
self.d[uuid]['properties'] = props
with open(self.uri, mode) as f:
f.write(pybson.encode(self.d))
def load(self, uuid):
"""Load `uuid` from current storage and return it
as a new instance.
"""
if uuid in self.d.keys():
return instance_from_dict(self.d[uuid])
else:
raise KeyError(f"Instance with id '{uuid}' not found")
def save(self, inst):
"""Store `inst` in the current storage."""
self.d[inst.uuid] = inst.asdict()
def queue(self, pattern=None):
"""Generator method that iterates over all UUIDs in
the storage who's metadata URI matches global pattern
`pattern`.
"""
for uuid, d in self.d.items():
if pattern and dlite.globmatch(pattern, d['meta']):
continue
yield uuid
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Run a Hadoop command on all worker hosts.
function hadoop_usage
{
echo "Usage: hadoop-daemons.sh [--config confdir] [--hosts hostlistfile] (start|stop|status) <hadoop-command> <args...>"
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_HOME}" ]]; then
HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
else
HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
# shellcheck disable=SC2034
HADOOP_NEW_CONFIG=true
if [[ -f "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh" ]]; then
. "${HADOOP_LIBEXEC_DIR}/hdfs-config.sh"
else
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hdfs-config.sh." 2>&1
exit 1
fi
if [[ $# = 0 ]]; then
hadoop_exit_with_usage 1
fi
daemonmode=$1
shift
if [[ -z "${HADOOP_HDFS_HOME}" ]]; then
hdfsscript="${HADOOP_HOME}/bin/hdfs"
else
hdfsscript="${HADOOP_HDFS_HOME}/bin/hdfs"
fi
hadoop_error "WARNING: Use of this script to ${daemonmode} HDFS daemons is deprecated."
hadoop_error "WARNING: Attempting to execute replacement \"hdfs --workers --daemon ${daemonmode}\" instead."
#
# Original input was usually:
# hadoop-daemons.sh (shell options) (start|stop) (datanode|...) (daemon options)
# we're going to turn this into
# hdfs --workers --daemon (start|stop) (rest of options)
#
for (( i = 0; i < ${#HADOOP_USER_PARAMS[@]}; i++ ))
do
if [[ "${HADOOP_USER_PARAMS[$i]}" =~ ^start$ ]] ||
[[ "${HADOOP_USER_PARAMS[$i]}" =~ ^stop$ ]] ||
[[ "${HADOOP_USER_PARAMS[$i]}" =~ ^status$ ]]; then
unset HADOOP_USER_PARAMS[$i]
fi
done
${hdfsscript} --workers --daemon "${daemonmode}" "${HADOOP_USER_PARAMS[@]}"
|
from bs4 import BeautifulSoup
# Given HTML code snippet
html_code = '''
</div>
</div>
</div>
</div>
</td>
</tr>
@endforeach
</tbody>
</table>
</div>
</div>
</div>
'''
# Parse the HTML code using BeautifulSoup
soup = BeautifulSoup(html_code, 'html.parser')
# Find the table element
table = soup.find('table')
# Extract table data
table_data = []
if table:
for row in table.find_all('tr'):
row_data = [cell.get_text(strip=True) for cell in row.find_all(['td', 'th'])]
table_data.append(row_data)
# Output the extracted table data
for row in table_data:
print('\t'.join(row)) |
<gh_stars>1-10
package im.status.keycard.applet;
import java.util.Arrays;
/**
* Tiny BER-TLV implementation. Not for general usage, but fast and easy to use for this project.
*/
public class TinyBERTLV {
public static final byte TLV_BOOL = (byte) 0x01;
public static final byte TLV_INT = (byte) 0x02;
public static final int END_OF_TLV = (int) 0xffffffff;
private byte[] buffer;
private int pos;
public TinyBERTLV(byte[] buffer) {
this.buffer = buffer;
this.pos = 0;
}
/**
* Enters a constructed TLV with the given tag
*
* @param tag the tag to enter
* @return the length of the TLV
* @throws IllegalArgumentException if the next tag does not match the given one
*/
public int enterConstructed(int tag) throws IllegalArgumentException {
checkTag(tag, readTag());
return readLength();
}
/**
* Reads a primitive TLV with the given tag
*
* @param tag the tag to read
* @return the body of the TLV
* @throws IllegalArgumentException if the next tag does not match the given one
*/
public byte[] readPrimitive(int tag) throws IllegalArgumentException {
checkTag(tag, readTag());
int len = readLength();
pos += len;
return Arrays.copyOfRange(buffer, (pos - len), pos);
}
/**
* Reads a boolean TLV.
*
* @return the boolean value of the TLV
* @throws IllegalArgumentException if the next tag is not a boolean
*/
public boolean readBoolean() throws IllegalArgumentException {
byte[] val = readPrimitive(TLV_BOOL);
return ((val[0] & 0xff) == 0xff);
}
/**
* Reads an integer TLV.
*
* @return the integer value of the TLV
* @throws IllegalArgumentException if the next tlv is not an integer or is of unsupported length
*/
public int readInt() throws IllegalArgumentException {
byte[] val = readPrimitive(TLV_INT);
switch (val.length) {
case 1:
return val[0] & 0xff;
case 2:
return ((val[0] & 0xff) << 8) | (val[1] & 0xff);
case 3:
return ((val[0] & 0xff) << 16) | ((val[1] & 0xff) << 8) | (val[2] & 0xff);
case 4:
return ((val[0] & 0xff) << 24) | ((val[1] & 0xff) << 16) | ((val[2] & 0xff) << 8) | (val[3] & 0xff);
default:
throw new IllegalArgumentException("Integers of length " + val.length + " are unsupported");
}
}
/**
* Low-level method to unread the last read tag. Only valid if the previous call was readTag(). Does nothing if the
* end of the TLV has been reached.
*/
public void unreadLastTag() {
if (pos < buffer.length) {
pos--;
}
}
/**
* Reads the next tag. The current implementation only reads tags on one byte. Can be extended if needed.
*
* @return the tag
*/
public int readTag() {
return (pos < buffer.length) ? buffer[pos++] : END_OF_TLV;
}
/**
* Reads the next tag. The current implementation only reads length on one and two bytes. Can be extended if needed.
*
* @return the tag
*/
public int readLength() {
int len = buffer[pos++] & 0xff;
if (len == 0x81) {
len = buffer[pos++] & 0xff;
}
return len;
}
private void checkTag(int expected, int actual) throws IllegalArgumentException {
if (expected != actual) {
unreadLastTag();
throw new IllegalArgumentException("Expected tag: " + expected + ", received: " + actual);
}
}
}
|
def sort_list(List):
for i in range(len(List)-1, 0, -1):
for j in range(i):
if List[j] < List[j+1]:
List[j], List[j+1] = List[j+1], List[j]
return List
List = [3, 4, 1, 2]
sorted_list = sort_list(List)
print(sorted_list) # [4, 3, 2, 1] |
<gh_stars>1-10
class ApplicationController < ActionController::Base
before_action :authenticate_user!
before_action :configure_permitted_parameters, if: :devise_controller?
private
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: [:role])
end
def after_sign_in_path_for(resource)
if resource.role == 'producer'
if resource.producer_user
producer_id = resource.producer_user.producer_id
producer_path(producer_id)
else
new_producer_path()
end
else
pantries_path
end
end
end
|
#!/bin/bash
function help() {
echo "USAGE: scripts/startLocalCluster.sh [options]"
echo ""
echo "OPTIONS:"
echo " -a/--nagents # agents (odd integer default: 1))"
echo " -c/--ncoordinators # coordinators (odd integer default: 1))"
echo " -d/--ndbservers # db servers (odd integer default: 2))"
echo " -s/--secondaries Start secondaries (0|1 default: 0)"
echo " -t/--transport Protocol (ssl|tcp default: tcp)"
echo " -j/--jwt-secret JWT-Secret (string default: )"
echo " --log-level-a Log level (agency) (INFO|DEBUG|TRACE default: INFO)"
echo " --log-level-c Log level (cluster) (INFO|DEBUG|TRACE default: INFO)"
echo " -i/--interactive Interactive mode (C|D|R default: '')"
echo " -x/--xterm XTerm command (default: xterm)"
echo " -o/--xterm-options XTerm options (default: --geometry=80x43)"
echo ""
echo "EXAMPLES:"
echo " scripts/startLocalCluster.sh"
echo " scripts/startLocalCluster.sh -a 1 -c 1 -d 3 -t ssl"
echo " scripts/startLocalCluster.sh -a 3 -c 1 -d 2 -t tcp -i C"
}
# defaults
NRAGENTS=1
NRDBSERVERS=2
NRCOORDINATORS=1
POOLSZ=""
TRANSPORT="tcp"
LOG_LEVEL="INFO"
XTERM="x-terminal-emulator"
XTERMOPTIONS="--geometry=80x43"
SECONDARIES=0
BUILD="build"
JWT_SECRET=""
while [[ ${1} ]]; do
case "${1}" in
-a|--agency-size)
NRAGENTS=${2}
shift
;;
-c|--ncoordinators)
NRCOORDINATORS=${2}
shift
;;
-d|--ndbservers)
NRDBSERVERS=${2}
shift
;;
-s|--secondaries)
SECONDARIES=${2}
shift
;;
-t|--transport)
TRANSPORT=${2}
shift
;;
--log-level-agency)
LOG_LEVEL_AGENCY=${2}
shift
;;
--log-level-cluster)
LOG_LEVEL_CLUSTER=${2}
shift
;;
-i|--interactive)
INTERACTIVE_MODE=${2}
shift
;;
-j|--jwt-secret)
JWT_SECRET=${2}
shift
;;
-x|--xterm)
XTERM=${2}
shift
;;
-o|--xterm-options)
XTERMOPTIONS=${2}
shift
;;
-h|--help)
help
exit 1
;;
-B|--build)
BUILD=${2}
shift
;;
*)
echo "Unknown parameter: ${1}" >&2
help
exit 1
;;
esac
if ! shift; then
echo 'Missing parameter argument.' >&2
return 1
fi
done
if [ "$POOLSZ" == "" ] ; then
POOLSZ=$NRAGENTS
fi
printf "Starting agency ... \n"
printf " # agents: %s," "$NRAGENTS"
printf " # db servers: %s," "$NRDBSERVERS"
printf " # coordinators: %s," "$NRCOORDINATORS"
printf " transport: %s\n" "$TRANSPORT"
if [ ! -d arangod ] || [ ! -d arangosh ] || [ ! -d UnitTests ] ; then
echo Must be started in the main ArangoDB source directory.
exit 1
fi
if [[ $(( $NRAGENTS % 2 )) == 0 ]]; then
echo "**ERROR: Number of agents must be odd! Bailing out."
exit 1
fi
if [ ! -d arangod ] || [ ! -d arangosh ] || [ ! -d UnitTests ] ; then
echo "Must be started in the main ArangoDB source directory! Bailing out."
exit 1
fi
if [ ! -z "$INTERACTIVE_MODE" ] ; then
if [ "$INTERACTIVE_MODE" == "C" ] ; then
COORDINATORCONSOLE=1
echo "Starting one coordinator in terminal with --console"
elif [ "$INTERACTIVE_MODE" == "D" ] ; then
CLUSTERDEBUGGER=1
echo Running cluster in debugger.
elif [ "$INTERACTIVE_MODE" == "R" ] ; then
RRDEBUGGER=1
echo Running cluster in rr with --console.
fi
fi
MINP=0.5
MAXP=2.5
SFRE=5.0
COMP=1000
BASE=4001
NATH=$(( $NRDBSERVERS + $NRCOORDINATORS + $NRAGENTS ))
rm -rf cluster
if [ -d cluster-init ];then
cp -a cluster-init cluster
fi
mkdir -p cluster
if [ -z "$JWT_SECRET" ];then
AUTHENTICATION="--server.authentication false"
AUTHORIZATION_HEADER=""
else
AUTHENTICATION="--server.jwt-secret $JWT_SECRET"
AUTHORIZATION_HEADER="Authorization: bearer $(jwtgen -a HS256 -s $JWT_SECRET -c 'iss=arangodb' -c 'preferred_username=root')"
fi
if [ "$TRANSPORT" == "ssl" ]; then
SSLKEYFILE="--ssl.keyfile UnitTests/server.pem"
CURL="curl --insecure $CURL_AUTHENTICATION -s -f -X GET https:"
else
SSLKEYFILE=""
CURL="curl -s -f $CURL_AUTHENTICATION -X GET http:"
fi
echo Starting agency ...
for aid in `seq 0 $(( $NRAGENTS - 1 ))`; do
port=$(( $BASE + $aid ))
${BUILD}/bin/arangod \
-c none \
--agency.activate true \
--agency.compaction-step-size $COMP \
--agency.election-timeout-min $MINP \
--agency.election-timeout-max $MAXP \
--agency.endpoint $TRANSPORT://localhost:$BASE \
--agency.my-address $TRANSPORT://localhost:$port \
--agency.pool-size $NRAGENTS \
--agency.size $NRAGENTS \
--agency.supervision true \
--agency.supervision-frequency $SFRE \
--agency.supervision-grace-period 15 \
--agency.wait-for-sync false \
--database.directory cluster/data$port \
--javascript.app-path ./js/apps \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
--javascript.v8-contexts 1 \
--server.endpoint $TRANSPORT://0.0.0.0:$port \
--server.statistics false \
--server.threads 16 \
--log.file cluster/$port.log \
--log.force-direct true \
$AUTHENTICATION \
$SSLKEYFILE \
> cluster/$port.stdout 2>&1 &
done
start() {
if [ "$1" == "dbserver" ]; then
ROLE="PRIMARY"
elif [ "$1" == "coordinator" ]; then
ROLE="COORDINATOR"
fi
TYPE=$1
PORT=$2
mkdir cluster/data$PORT
echo Starting $TYPE on port $PORT
mkdir -p cluster/apps$PORT
${BUILD}/bin/arangod \
-c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4001 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4002 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4003 \
--cluster.my-address $TRANSPORT://127.0.0.1:$PORT \
--server.endpoint $TRANSPORT://0.0.0.0:$PORT \
--cluster.my-local-info $TYPE:127.0.0.1:$PORT \
--cluster.my-role $ROLE \
--log.file cluster/$PORT.log \
--log.level info \
--server.statistics true \
--server.threads 5 \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
--javascript.app-path cluster/apps$PORT \
--log.force-direct true \
$AUTHENTICATION \
$SSLKEYFILE \
> cluster/$PORT.stdout 2>&1 &
}
startTerminal() {
if [ "$1" == "dbserver" ]; then
ROLE="PRIMARY"
elif [ "$1" == "coordinator" ]; then
ROLE="COORDINATOR"
fi
TYPE=$1
PORT=$2
mkdir cluster/data$PORT
echo Starting $TYPE on port $PORT
$XTERM $XTERMOPTIONS -e ${BUILD}/bin/arangod \
-c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4001 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4002 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4003 \ \
--cluster.my-address $TRANSPORT://127.0.0.1:$PORT \
--server.endpoint $TRANSPORT://0.0.0.0:$PORT \
--cluster.my-local-info $TYPE:127.0.0.1:$PORT \
--cluster.my-role $ROLE \
--log.file cluster/$PORT.log \
--log.level info \
--server.statistics true \
--server.threads 5 \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
--javascript.app-path ./js/apps \
$AUTHENTICATION \
$SSLKEYFILE \
--console &
}
startDebugger() {
if [ "$1" == "dbserver" ]; then
ROLE="PRIMARY"
elif [ "$1" == "coordinator" ]; then
ROLE="COORDINATOR"
fi
TYPE=$1
PORT=$2
mkdir cluster/data$PORT
echo Starting $TYPE on port $PORT with debugger
${BUILD}/bin/arangod \
-c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4001 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4002 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4003 \ \
--cluster.my-address $TRANSPORT://127.0.0.1:$PORT \
--server.endpoint $TRANSPORT://0.0.0.0:$PORT \
--cluster.my-local-info $TYPE:127.0.0.1:$PORT \
--cluster.my-role $ROLE \
--log.file cluster/$PORT.log \
--log.level info \
--server.statistics false \
--server.threads 5 \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
--javascript.app-path ./js/apps \
$SSLKEYFILE \
$AUTHENTICATION &
$XTERM $XTERMOPTIONS -e gdb ${BUILD}/bin/arangod -p $! &
}
startRR() {
if [ "$1" == "dbserver" ]; then
ROLE="PRIMARY"
elif [ "$1" == "coordinator" ]; then
ROLE="COORDINATOR"
fi
TYPE=$1
PORT=$2
mkdir cluster/data$PORT
echo Starting $TYPE on port $PORT with rr tracer
$XTERM $XTERMOPTIONS -e rr ${BUILD}/bin/arangod \
-c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4001 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4002 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4003 \ \
--cluster.my-address $TRANSPORT://127.0.0.1:$PORT \
--server.endpoint $TRANSPORT://0.0.0.0:$PORT \
--cluster.my-local-info $TYPE:127.0.0.1:$PORT \
--cluster.my-role $ROLE \
--log.file cluster/$PORT.log \
--log.level info \
--server.statistics true \
--server.threads 5 \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
--javascript.app-path ./js/apps \
$AUTHENTICATION \
$SSLKEYFILE \
--console &
}
PORTTOPDB=`expr 8629 + $NRDBSERVERS - 1`
for p in `seq 8629 $PORTTOPDB` ; do
if [ "$CLUSTERDEBUGGER" == "1" ] ; then
startDebugger dbserver $p
elif [ "$RRDEBUGGER" == "1" ] ; then
startRR dbserver $p
else
start dbserver $p
fi
done
PORTTOPCO=`expr 8530 + $NRCOORDINATORS - 1`
for p in `seq 8530 $PORTTOPCO` ; do
if [ "$CLUSTERDEBUGGER" == "1" ] ; then
startDebugger coordinator $p
elif [ $p == "8530" -a ! -z "$COORDINATORCONSOLE" ] ; then
startTerminal coordinator $p
elif [ "$RRDEBUGGER" == "1" ] ; then
startRR coordinator $p
else
start coordinator $p
fi
done
if [ "$CLUSTERDEBUGGER" == "1" ] ; then
echo Waiting for you to setup debugger windows, hit RETURN to continue!
read
fi
echo Waiting for cluster to come up...
testServer() {
PORT=$1
while true ; do
if [ -z "$AUTHORIZATION_HEADER" ]; then
${CURL}//127.0.0.1:$PORT/_api/version > /dev/null 2>&1
else
${CURL}//127.0.0.1:$PORT/_api/version -H "$AUTHORIZATION_HEADER" > /dev/null 2>&1
fi
if [ "$?" != "0" ] ; then
echo Server on port $PORT does not answer yet.
else
echo Server on port $PORT is ready for business.
break
fi
sleep 1
done
}
for p in `seq 8629 $PORTTOPDB` ; do
testServer $p
done
for p in `seq 8530 $PORTTOPCO` ; do
testServer $p
done
if [ "$SECONDARIES" == "1" ] ; then
let index=1
PORTTOPSE=`expr 8729 + $NRDBSERVERS - 1`
for PORT in `seq 8729 $PORTTOPSE` ; do
mkdir cluster/data$PORT
CLUSTER_ID="Secondary$index"
echo Registering secondary $CLUSTER_ID for "DBServer$index"
curl -f -X PUT --data "{\"primary\": \"DBServer$index\", \"oldSecondary\": \"none\", \"newSecondary\": \"$CLUSTER_ID\"}" -H "Content-Type: application/json" localhost:8530/_admin/cluster/replaceSecondary
echo Starting Secondary $CLUSTER_ID on port $PORT
${BUILD}/bin/arangod \
-c none \
--database.directory cluster/data$PORT \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4001 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4002 \
--cluster.agency-endpoint $TRANSPORT://127.0.0.1:4003 \ \
--cluster.my-address $TRANSPORT://127.0.0.1:$PORT \
--server.endpoint $TRANSPORT://0.0.0.0:$PORT \
--cluster.my-id $CLUSTER_ID \
--log.file cluster/$PORT.log \
--server.statistics true \
--javascript.startup-directory ./js \
--javascript.module-directory ./enterprise/js \
$AUTHENTICATION \
$SSLKEYFILE \
--javascript.app-path ./js/apps \
> cluster/$PORT.stdout 2>&1 &
let index=$index+1
done
fi
echo Done, your cluster is ready at
for p in `seq 8530 $PORTTOPCO` ; do
echo " ${BUILD}/bin/arangosh --server.endpoint $TRANSPORT://127.0.0.1:$p"
done
|
/*
*
* Copyright © ${year} ${name}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gwtproject.resources.rg;
import static org.gwtproject.resources.client.ClientBundle.Source;
import static org.gwtproject.resources.client.CssResource.ClassName;
import static org.gwtproject.resources.client.CssResource.Import;
import static org.gwtproject.resources.client.CssResource.ImportedWithPrefix;
import static org.gwtproject.resources.client.CssResource.NotStrict;
import static org.gwtproject.resources.client.CssResource.Shared;
import static org.gwtproject.resources.ext.TreeLogger.ERROR;
import static org.gwtproject.resources.ext.TreeLogger.Type;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_ALLOWED_AT_RULES;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_ALLOWED_FUNCTIONS;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_CONVERSION_MODE;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_ENABLE_GSS;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_OBFUSCATION_PREFIX;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_CSS_RESOURCE_STYLE;
import static org.gwtproject.resources.rg.resource.ConfigurationProperties.KEY_GSS_DEFAULT_IN_UIBINDER;
import com.google.auto.common.MoreElements;
import com.google.auto.common.MoreTypes;
import com.google.common.base.CaseFormat;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.css.MinimalSubstitutionMap;
import com.google.common.css.PrefixingSubstitutionMap;
import com.google.common.css.SourceCode;
import com.google.common.css.SourceCodeLocation;
import com.google.common.css.SubstitutionMap;
import com.google.common.css.compiler.ast.CssCompilerPass;
import com.google.common.css.compiler.ast.CssCompositeValueNode;
import com.google.common.css.compiler.ast.CssDefinitionNode;
import com.google.common.css.compiler.ast.CssNumericNode;
import com.google.common.css.compiler.ast.CssTree;
import com.google.common.css.compiler.ast.CssValueNode;
import com.google.common.css.compiler.ast.ErrorManager;
import com.google.common.css.compiler.ast.GssError;
import com.google.common.css.compiler.ast.GssFunction;
import com.google.common.css.compiler.ast.GssParser;
import com.google.common.css.compiler.ast.GssParserException;
import com.google.common.css.compiler.passes.AbbreviatePositionalValues;
import com.google.common.css.compiler.passes.CheckDependencyNodes;
import com.google.common.css.compiler.passes.CollectConstantDefinitions;
import com.google.common.css.compiler.passes.CollectMixinDefinitions;
import com.google.common.css.compiler.passes.ColorValueOptimizer;
import com.google.common.css.compiler.passes.ConstantDefinitions;
import com.google.common.css.compiler.passes.CreateComponentNodes;
import com.google.common.css.compiler.passes.CreateConditionalNodes;
import com.google.common.css.compiler.passes.CreateConstantReferences;
import com.google.common.css.compiler.passes.CreateDefinitionNodes;
import com.google.common.css.compiler.passes.CreateForLoopNodes;
import com.google.common.css.compiler.passes.CreateMixins;
import com.google.common.css.compiler.passes.CreateStandardAtRuleNodes;
import com.google.common.css.compiler.passes.CreateVendorPrefixedKeyframes;
import com.google.common.css.compiler.passes.CssClassRenaming;
import com.google.common.css.compiler.passes.DisallowDuplicateDeclarations;
import com.google.common.css.compiler.passes.EliminateEmptyRulesetNodes;
import com.google.common.css.compiler.passes.EliminateUnitsFromZeroNumericValues;
import com.google.common.css.compiler.passes.EliminateUselessRulesetNodes;
import com.google.common.css.compiler.passes.HandleUnknownAtRuleNodes;
import com.google.common.css.compiler.passes.MarkNonFlippableNodes;
import com.google.common.css.compiler.passes.MarkRemovableRulesetNodes;
import com.google.common.css.compiler.passes.MergeAdjacentRulesetNodesWithSameDeclarations;
import com.google.common.css.compiler.passes.MergeAdjacentRulesetNodesWithSameSelector;
import com.google.common.css.compiler.passes.ProcessComponents;
import com.google.common.css.compiler.passes.ProcessKeyframes;
import com.google.common.css.compiler.passes.ProcessRefiners;
import com.google.common.css.compiler.passes.ReplaceConstantReferences;
import com.google.common.css.compiler.passes.ReplaceMixins;
import com.google.common.css.compiler.passes.ResolveCustomFunctionNodes;
import com.google.common.css.compiler.passes.SplitRulesetNodes;
import com.google.common.css.compiler.passes.UnrollLoops;
import com.google.common.css.compiler.passes.ValidatePropertyValues;
import com.google.common.io.ByteSource;
import com.google.common.io.Resources;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.Adler32;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.MirroredTypesException;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import org.apache.commons.io.IOUtils;
import org.gwtproject.i18n.shared.cldr.LocaleInfo;
import org.gwtproject.resources.client.ClientBundle;
import org.gwtproject.resources.client.CssResource;
import org.gwtproject.resources.client.CssResourceBase;
import org.gwtproject.resources.context.AptContext;
import org.gwtproject.resources.converter.Css2Gss;
import org.gwtproject.resources.converter.Css2GssConversionException;
import org.gwtproject.resources.ext.ConfigurationProperty;
import org.gwtproject.resources.ext.DefaultConfigurationProperty;
import org.gwtproject.resources.ext.PropertyOracle;
import org.gwtproject.resources.ext.ResourceContext;
import org.gwtproject.resources.ext.ResourceGeneratorUtil;
import org.gwtproject.resources.ext.ResourceOracle;
import org.gwtproject.resources.ext.SelectionProperty;
import org.gwtproject.resources.ext.TreeLogger;
import org.gwtproject.resources.ext.UnableToCompleteException;
import org.gwtproject.resources.rg.gss.BooleanConditionCollector;
import org.gwtproject.resources.rg.gss.CollectAndRemoveConstantDefinitions;
import org.gwtproject.resources.rg.gss.CreateRuntimeConditionalNodes;
import org.gwtproject.resources.rg.gss.CssPrinter;
import org.gwtproject.resources.rg.gss.ExtendedEliminateConditionalNodes;
import org.gwtproject.resources.rg.gss.ExternalClassesCollector;
import org.gwtproject.resources.rg.gss.GwtGssFunctionMapProvider;
import org.gwtproject.resources.rg.gss.ImageSpriteCreator;
import org.gwtproject.resources.rg.gss.PermutationsCollector;
import org.gwtproject.resources.rg.gss.RecordingBidiFlipper;
import org.gwtproject.resources.rg.gss.RenamingSubstitutionMap;
import org.gwtproject.resources.rg.gss.RuntimeConditionalBlockCollector;
import org.gwtproject.resources.rg.gss.ValidateRuntimeConditionalNode;
import org.gwtproject.resources.rg.util.SourceWriter;
import org.gwtproject.resources.rg.util.StringSourceWriter;
import org.gwtproject.resources.rg.util.Util;
/** @author <NAME> Created by treblereel 12/1/18 */
public class GssResourceGenerator extends AbstractCssResourceGenerator {
// To be sure to avoid conflict during the style classes renaming between different GssResources,
// we will create a different prefix for each GssResource. We use a MinimalSubstitutionMap
// that will create a String with 1-6 characters in length but keeping the length of the prefix
// as short as possible. For instance if we have two GssResources to compile, the prefix
// for the first resource will be 'a' and the prefix for the second resource will be 'b' and so on
private static final SubstitutionMap resourcePrefixBuilder = new MinimalSubstitutionMap();
private static final String KEY_CLASS_PREFIX = "cssResourcePrefix";
private static final String KEY_BY_CLASS_AND_METHOD = "cssResourceClassAndMethod";
private static final String KEY_HAS_CACHED_DATA = "hasCachedData";
private static final String KEY_SHARED_METHODS = "sharedMethods";
private static final char[] BASE32_CHARS =
new char[] {
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
'S', 'T', 'U', 'V', 'W', 'X', 'Y', '0', '1', '2', '3', '4', '5', '6'
};
// We follow CSS specification to detect the charset:
// - Authors using an @charset rule must place the rule at the very beginning of the style sheet,
// preceded by no characters.
// - @charset must be written literally, i.e., the 10 characters '@charset "' (lowercase, no
// backslash escapes), followed by the encoding name, followed by '";'.
// see: http://www.w3.org/TR/CSS2/syndata.html#charset
private static final Pattern CHARSET = Pattern.compile("^@charset \"([^\"]*)\";");
private static final int CHARSET_MIN_LENGTH = "@charset \"\";".length();
/*
* TODO(dankurka): This is a nasty hack to get the compiler to output all @def's
* it has seen in a compile. Once GSS migration is done this needs to be removed.
*/
private static boolean shouldEmitVariables;
private static PrintWriter printWriter;
private static Set<String> writtenAtDefs = new HashSet<>();
static {
String varFileName = System.getProperty("emitGssVarNameFile");
shouldEmitVariables = varFileName != null;
if (shouldEmitVariables) {
try {
File file = new File(varFileName);
file.createNewFile();
printWriter = new PrintWriter(new FileOutputStream(file));
} catch (Exception e) {
System.err.println("Error while opening file");
e.printStackTrace();
System.exit(-1);
}
}
}
private final GssOptions gssOptions;
private Map<ExecutableElement, CssParsingResult> cssParsingResultMap;
private Set<String> allowedNonStandardFunctions;
private LoggerErrorManager errorManager;
private List<String> ignoredMethods = new ArrayList<>();
private String obfuscationPrefix;
private CssObfuscationStyle obfuscationStyle;
private Set<String> allowedAtRules;
private Map<TypeElement, Map<String, String>> replacementsByClassAndMethod;
private Map<ExecutableElement, String> replacementsForSharedMethods;
public GssResourceGenerator(GssOptions gssOptions) {
this.gssOptions = gssOptions;
}
public static GssOptions getGssOptions(ResourceContext context, TreeLogger logger)
throws UnableToCompleteException {
PropertyOracle propertyOracle = context.getGeneratorContext().getPropertyOracle();
boolean gssEnabled =
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_ENABLE_GSS)
.asSingleBooleanValue();
boolean gssDefaultInUiBinder =
propertyOracle
.getConfigurationProperty(logger, KEY_GSS_DEFAULT_IN_UIBINDER)
.asSingleBooleanValue();
AutoConversionMode conversionMode =
Enum.valueOf(
AutoConversionMode.class,
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_CONVERSION_MODE)
.asSingleValue()
.toUpperCase(Locale.ROOT));
return new GssOptions(gssEnabled, conversionMode, gssDefaultInUiBinder);
}
private static boolean checkPropertyIsSingleValueAndBoolean(
ConfigurationProperty property, TreeLogger logger) {
List<String> values = property.getValues();
if (values.size() > 1) {
logger.log(
Type.ERROR,
"The configuration property "
+ property.getName()
+ " is used in "
+ "a conditional css and cannot be a multi-valued property");
return false;
}
String value = values.get(0);
if (!"true".equals(value) && !"false".equals(value)) {
logger.log(
Type.ERROR,
"The configuration property "
+ property.getName()
+ " is used in "
+ "a conditional css. Its value must be either \"true\" or \"false\"");
return false;
}
return true;
}
@Override
public void prepare(TreeLogger logger, ResourceContext context, ExecutableElement method)
throws UnableToCompleteException {
if (!MoreTypes.asElement(method.getReturnType()).getKind().equals(ElementKind.INTERFACE)) {
logger.log(ERROR, "Return type must be an interface");
throw new UnableToCompleteException();
}
URL[] resourceUrls = findResources(logger, context, method, gssOptions.isEnabled());
if (resourceUrls.length == 0) {
logger.log(ERROR, "At least one source must be specified");
throw new UnableToCompleteException();
}
CssParsingResult cssParsingResult =
parseResources(Lists.newArrayList(resourceUrls), context, logger);
cssParsingResultMap.put(method, cssParsingResult);
}
/**
* Temporary method needed when GSS and the old CSS syntax are both supported by the sdk. It aims
* to choose the right resource file according to whether gss is enabled or not. If gss is
* enabled, it will try to find the resource file ending by .gss first. If GSS is disabled it will
* try to find the .css file. This logic is applied even if a {@link ClientBundle.Source}
* annotation is used to define the resource file.
*
* <p>This method can be deleted once the support for the old CssResource is removed and use
* directly ResourceGeneratorUtil.findResources().
*/
static URL[] findResources(
TreeLogger logger, ResourceContext context, ExecutableElement method, boolean gssEnabled)
throws UnableToCompleteException {
boolean isSourceAnnotationUsed = method.getAnnotation(Source.class) != null;
ResourceOracle resourceOracle = context.getGeneratorContext().getResourcesOracle();
if (!isSourceAnnotationUsed) {
// ResourceGeneratorUtil will try to find automatically the resource file. Give him the right
// extension to use first
String[] extensions =
gssEnabled ? new String[] {".gss", ".css"} : new String[] {".css", ".gss"};
return resourceOracle.findResources(logger, method, extensions);
}
// find the original resource files specified by the @Source annotation
URL[] originalResources = resourceOracle.findResources(logger, method);
URL[] resourcesToUse = new URL[originalResources.length];
String preferredExtension = gssEnabled ? ".gss" : ".css";
// Try to find all the resources by using the preferred extension according to whether gss is
// enabled or not. If one file with the preferred extension is missing, return the original
// resource files otherwise return the preferred files.
String[] sourceFiles = method.getAnnotation(Source.class).value();
for (int i = 0; i < sourceFiles.length; i++) {
String original = sourceFiles[i];
if (!original.endsWith(preferredExtension) && original.length() > 4) {
String preferredFile = original.substring(0, original.length() - 4) + preferredExtension;
// try to find the resource relative to the package
String path =
MoreElements.getPackage(method).getQualifiedName().toString().replace('.', '/') + '/';
URL preferredUrl = resourceOracle.findResource(path + preferredFile);
if (preferredUrl == null) {
// if it doesn't exist, assume it is absolute
preferredUrl = resourceOracle.findResource(preferredFile);
}
if (preferredUrl == null) {
// avoid to mix gss and css, if one file with the preferred extension is missing
return originalResources;
}
logger.log(
Type.DEBUG,
"Preferred resource file found: "
+ preferredFile
+ ". This file "
+ "will be used in replacement of "
+ original);
resourcesToUse[i] = preferredUrl;
} else {
// gss and css files shouldn't be used together for a same resource. So if one of the file
// is using the the preferred extension, return the original resources. If the dev has mixed
// gss and ccs files, that will fail later.
return originalResources;
}
}
return resourcesToUse;
}
private CssParsingResult parseResources(
List<URL> resources, ResourceContext context, TreeLogger logger)
throws UnableToCompleteException {
List<SourceCode> sourceCodes = new ArrayList<>(resources.size());
ImmutableMap.Builder<String, String> constantNameMappingBuilder = ImmutableMap.builder();
// assert that we only support either gss or css on one resource.
boolean css = ensureEitherCssOrGss(resources, logger);
if (css && gssOptions.isAutoConversionOff()) {
logger.log(
Type.ERROR,
"Your ClientBundle is referencing css files instead of gss. "
+ "You will need to either convert these files to gss using the "
+ "converter tool or turn on auto convertion in your gwt.xml file. "
+ "Note: Autoconversion will be removed in the next version of GWT, "
+ "you will need to move to gss."
+ "Add this line to your gwt.xml file to temporary avoid this:"
+ "<set-configuration-property name=\"CssResource.conversionMode\""
+ " value=\"strict\" /> "
+ "Details on how to migrate to GSS can be found at: http://goo.gl/tEQnmJ");
throw new UnableToCompleteException();
}
if (css) {
String concatenatedCss = concatCssFiles(resources, logger);
ConversionResult result = convertToGss(concatenatedCss, context, logger);
if (shouldEmitVariables) {
write(result.defNameMapping.keySet());
}
String gss = result.gss;
String name = "[auto-converted gss files from : " + resources + "]";
sourceCodes.add(new SourceCode(name, gss));
constantNameMappingBuilder.putAll(result.defNameMapping);
} else {
for (URL stylesheet : resources) {
sourceCodes.add(readUrlContent(stylesheet, logger));
}
}
CssTree tree;
try {
tree = new GssParser(sourceCodes).parse();
} catch (GssParserException e) {
logger.log(ERROR, "Unable to parse CSS", e);
throw new UnableToCompleteException();
}
// create more explicit nodes
finalizeTree(tree);
checkErrors();
// collect boolean conditions that have to be mapped to configuration properties
BooleanConditionCollector booleanConditionCollector =
new BooleanConditionCollector(tree.getMutatingVisitController());
booleanConditionCollector.runPass();
// collect permutations axis used in conditionals.
PermutationsCollector permutationsCollector =
new PermutationsCollector(tree.getMutatingVisitController());
permutationsCollector.runPass();
return new CssParsingResult(
tree,
permutationsCollector.getPermutationAxes(),
booleanConditionCollector.getBooleanConditions(),
constantNameMappingBuilder.build());
}
private void finalizeTree(CssTree cssTree) throws UnableToCompleteException {
new CheckDependencyNodes(cssTree.getMutatingVisitController(), errorManager, false).runPass();
// Don't continue if errors exist
checkErrors();
new CreateStandardAtRuleNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
new CreateMixins(cssTree.getMutatingVisitController(), errorManager).runPass();
new CreateDefinitionNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
new CreateConstantReferences(cssTree.getMutatingVisitController()).runPass();
new CreateConditionalNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
new CreateRuntimeConditionalNodes(cssTree.getMutatingVisitController()).runPass();
new CreateForLoopNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
new CreateComponentNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
new ValidatePropertyValues(cssTree.getVisitController(), errorManager).runPass();
new HandleUnknownAtRuleNodes(
cssTree.getMutatingVisitController(), errorManager, allowedAtRules, true, false)
.runPass();
new ProcessKeyframes(cssTree.getMutatingVisitController(), errorManager, true, true).runPass();
new CreateVendorPrefixedKeyframes(cssTree.getMutatingVisitController(), errorManager).runPass();
new UnrollLoops(cssTree.getMutatingVisitController(), errorManager).runPass();
new ProcessRefiners(cssTree.getMutatingVisitController(), errorManager, true).runPass();
new MarkNonFlippableNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
}
private void checkErrors() throws UnableToCompleteException {
if (errorManager.hasErrors()) {
throw new UnableToCompleteException();
}
}
private static synchronized void write(Set<String> variables) {
for (String atDef : variables) {
if (writtenAtDefs.add(atDef)) {
printWriter.println("@def " + atDef + " 1px;");
}
}
printWriter.flush();
}
public static SourceCode readUrlContent(URL fileUrl, TreeLogger logger)
throws UnableToCompleteException {
TreeLogger branchLogger =
logger.branch(TreeLogger.DEBUG, "Reading GSS stylesheet " + fileUrl.toExternalForm());
try {
ByteSource byteSource = Resources.asByteSource(fileUrl);
// default charset
Charset charset = Charsets.UTF_8;
// check if the stylesheet doesn't include a @charset at-rule
String styleSheetCharset = extractCharset(byteSource);
if (styleSheetCharset != null) {
try {
charset = Charset.forName(styleSheetCharset);
} catch (UnsupportedCharsetException e) {
logger.log(Type.ERROR, "Unsupported charset found: " + styleSheetCharset);
throw new UnableToCompleteException();
}
}
String fileContent = byteSource.asCharSource(charset).read();
// If the stylesheet specified a charset, we have to remove the at-rule otherwise the GSS
// compiler will fail.
if (styleSheetCharset != null) {
int charsetAtRuleLength = CHARSET_MIN_LENGTH + styleSheetCharset.length();
// replace charset at-rule by blanks to keep correct source location of the rest of
// the stylesheet.
fileContent =
Strings.repeat(" ", charsetAtRuleLength) + fileContent.substring(charsetAtRuleLength);
}
return new SourceCode(fileUrl.getFile(), fileContent);
} catch (IOException e) {
branchLogger.log(TreeLogger.ERROR, "Unable to parse CSS", e);
}
throw new UnableToCompleteException();
}
private static String extractCharset(ByteSource byteSource) throws IOException {
String firstLine = byteSource.asCharSource(Charsets.UTF_8).readFirstLine();
if (firstLine != null) {
Matcher matcher = CHARSET.matcher(firstLine);
if (matcher.matches()) {
return matcher.group(1);
}
}
return null;
}
private ConversionResult convertToGss(
String concatenatedCss, ResourceContext context, TreeLogger logger)
throws UnableToCompleteException {
File tempFile = null;
FileOutputStream fos = null;
try {
// We actually need a URL for the old CssResource to work. So create a temp file.
tempFile = File.createTempFile(UUID.randomUUID() + "css_converter", "css.tmp");
fos = new FileOutputStream(tempFile);
IOUtils.write(concatenatedCss, fos);
fos.close();
ConfigurationPropertyMatcher configurationPropertyMatcher =
new ConfigurationPropertyMatcher(context, logger);
Css2Gss converter =
new Css2Gss(
tempFile.toURI().toURL(),
logger,
gssOptions.isLenientConversion(),
configurationPropertyMatcher);
String gss = converter.toGss();
if (configurationPropertyMatcher.error) {
throw new UnableToCompleteException();
}
return new ConversionResult(gss, converter.getDefNameMapping());
} catch (Css2GssConversionException e) {
String message = "An error occurs during the automatic conversion: " + e.getMessage();
if (!gssOptions.isLenientConversion()) {
message +=
"\n You should try to change the faulty css to fix this error. If you are "
+ "unable to change the css, you can setup the automatic conversion to be lenient. Add "
+ "the following line to your gwt.xml file: "
+ "<set-configuration-property name=\"CssResource.conversionMode\" value=\"lenient\" />";
}
logger.log(Type.ERROR, message, e);
throw new UnableToCompleteException();
} catch (IOException e) {
logger.log(Type.ERROR, "Error while writing temporary css file", e);
throw new UnableToCompleteException();
} finally {
if (tempFile != null) {
tempFile.delete();
}
if (fos != null) {
IOUtils.closeQuietly(fos);
}
}
}
public static String concatCssFiles(List<URL> resources, TreeLogger logger)
throws UnableToCompleteException {
StringBuffer buffer = new StringBuffer();
for (URL stylesheet : resources) {
try {
String fileContent = Resources.asByteSource(stylesheet).asCharSource(Charsets.UTF_8).read();
buffer.append(fileContent);
buffer.append("\n");
} catch (IOException e) {
logger.log(ERROR, "Unable to parse CSS", e);
throw new UnableToCompleteException();
}
}
return buffer.toString();
}
private boolean ensureEitherCssOrGss(List<URL> resources, TreeLogger logger)
throws UnableToCompleteException {
boolean css = resources.get(0).toString().endsWith(".css");
for (URL stylesheet : resources) {
if (css && !stylesheet.toString().endsWith(".css")) {
logger.log(Type.ERROR, "Only either css files or gss files are supported on one interface");
throw new UnableToCompleteException();
} else if (!css && !stylesheet.toString().endsWith(".gss")) {
logger.log(Type.ERROR, "Only either css files or gss files are supported on one interface");
throw new UnableToCompleteException();
}
}
return css;
}
@Override
protected String getCssExpression(
TreeLogger logger, ResourceContext context, ExecutableElement method) {
CssTree cssTree = cssParsingResultMap.get(method).tree;
String standard = printCssTree(cssTree);
// TODO add configuration properties for swapLtrRtlInUrl, swapLeftRightInUrl and
// shouldFlipConstantReferences booleans
RecordingBidiFlipper recordingBidiFlipper =
new RecordingBidiFlipper(cssTree.getMutatingVisitController(), false, false, true);
recordingBidiFlipper.runPass();
if (recordingBidiFlipper.nodeFlipped()) {
String reversed = printCssTree(cssTree);
return LocaleInfo.class.getName()
+ ".getCurrentLocale().isRTL() ? "
+ reversed
+ " : "
+ standard;
} else {
return standard;
}
}
// TODO FIX REPLACEMENT
private String printCssTree(CssTree tree) {
CssPrinter cssPrinterPass = new CssPrinter(tree);
cssPrinterPass.runPass();
return cssPrinterPass.getCompactPrintedString();
}
@Override
public String createAssignment(
TreeLogger logger, ResourceContext context, ExecutableElement method)
throws UnableToCompleteException {
CssParsingResult cssParsingResult = cssParsingResultMap.get(method);
CssTree cssTree = cssParsingResult.tree;
RenamingResult renamingResult = doClassRenaming(cssTree, method, logger, context);
// TODO : Should we foresee configuration properties for simplifyCss and eliminateDeadCode
// booleans ?
ConstantDefinitions constantDefinitions =
optimizeTree(cssParsingResult, context, true, true, logger);
checkErrors();
Set<String> externalClasses = revertRenamingOfExternalClasses(cssTree, renamingResult);
checkErrors();
// Validate that classes not assigned to one of the interface methods are external
validateExternalClasses(externalClasses, renamingResult.externalClassCandidate, method, logger);
SourceWriter sw = new StringSourceWriter();
sw.println("new " + method.getReturnType() + "() {");
sw.indent();
writeMethods(
logger,
context,
method,
sw,
constantDefinitions,
cssParsingResult.originalConstantNameMapping,
renamingResult.mapping);
sw.outdent();
sw.println("}");
// CssResourceGenerator.outputCssMapArtifact(logger, context, method, actualReplacements);
return sw.toString();
}
private ConstantDefinitions optimizeTree(
CssParsingResult cssParsingResult,
ResourceContext context,
boolean simplifyCss,
boolean eliminateDeadStyles,
TreeLogger logger)
throws UnableToCompleteException {
CssTree cssTree = cssParsingResult.tree;
// Collect mixin definitions and replace mixins
CollectMixinDefinitions collectMixinDefinitions =
new CollectMixinDefinitions(cssTree.getMutatingVisitController(), errorManager);
collectMixinDefinitions.runPass();
new ReplaceMixins(
cssTree.getMutatingVisitController(),
errorManager,
collectMixinDefinitions.getDefinitions())
.runPass();
new ProcessComponents<>(cssTree.getMutatingVisitController(), errorManager).runPass();
RuntimeConditionalBlockCollector runtimeConditionalBlockCollector =
new RuntimeConditionalBlockCollector(cssTree.getVisitController());
runtimeConditionalBlockCollector.runPass();
Set<String> trueCompileTimeConditions =
ImmutableSet.<String>builder()
.addAll(
getCurrentDeferredBindingProperties(
context, cssParsingResult.permutationAxes, logger))
.addAll(
getTrueConfigurationProperties(context, cssParsingResult.trueConditions, logger))
.build();
new ExtendedEliminateConditionalNodes(
cssTree.getMutatingVisitController(),
trueCompileTimeConditions,
runtimeConditionalBlockCollector.getRuntimeConditionalBlock())
.runPass();
new ValidateRuntimeConditionalNode(
cssTree.getVisitController(), errorManager, gssOptions.isLenientConversion())
.runPass();
// Don't continue if errors exist
checkErrors();
CollectConstantDefinitions collectConstantDefinitionsPass =
new CollectConstantDefinitions(cssTree);
collectConstantDefinitionsPass.runPass();
ReplaceConstantReferences replaceConstantReferences =
new ReplaceConstantReferences(
cssTree,
collectConstantDefinitionsPass.getConstantDefinitions(),
false,
errorManager,
false);
replaceConstantReferences.runPass();
new ImageSpriteCreator(cssTree.getMutatingVisitController(), context, errorManager).runPass();
Map<String, GssFunction> gssFunctionMap = new GwtGssFunctionMapProvider(context).get();
new ResolveCustomFunctionNodes(
cssTree.getMutatingVisitController(),
errorManager,
gssFunctionMap,
true,
allowedNonStandardFunctions)
.runPass();
// collect the final value of the constants and remove them.
collectConstantDefinitionsPass = new CollectAndRemoveConstantDefinitions(cssTree);
collectConstantDefinitionsPass.runPass();
if (simplifyCss) {
// Eliminate empty rules.
new EliminateEmptyRulesetNodes(cssTree.getMutatingVisitController()).runPass();
// Eliminating units for zero values.
new EliminateUnitsFromZeroNumericValues(cssTree.getMutatingVisitController()).runPass();
// Optimize color values.
new ColorValueOptimizer(cssTree.getMutatingVisitController()).runPass();
// Compress redundant top-right-bottom-left value lists.
new AbbreviatePositionalValues(cssTree.getMutatingVisitController()).runPass();
}
if (eliminateDeadStyles) {
// Report errors for duplicate declarations
new DisallowDuplicateDeclarations(cssTree.getVisitController(), errorManager).runPass();
// Split rules by selector and declaration.
new SplitRulesetNodes(cssTree.getMutatingVisitController()).runPass();
// Dead code elimination.
new MarkRemovableRulesetNodes(cssTree).runPass();
new EliminateUselessRulesetNodes(cssTree).runPass();
// Merge of rules with same selector.
new MergeAdjacentRulesetNodesWithSameSelector(cssTree).runPass();
new EliminateUselessRulesetNodes(cssTree).runPass();
// Merge of rules with same styles.
new MergeAdjacentRulesetNodesWithSameDeclarations(cssTree).runPass();
new EliminateUselessRulesetNodes(cssTree).runPass();
new MarkNonFlippableNodes(cssTree.getMutatingVisitController(), errorManager).runPass();
}
return collectConstantDefinitionsPass.getConstantDefinitions();
}
private Set<String> getTrueConfigurationProperties(
ResourceContext context, Set<String> configurationProperties, TreeLogger logger)
throws UnableToCompleteException {
ImmutableSet.Builder<String> setBuilder = ImmutableSet.builder();
PropertyOracle oracle = context.getGeneratorContext().getPropertyOracle();
for (String property : configurationProperties) {
SelectionProperty confProp = oracle.getSelectionProperty(logger, property);
if (!"true".equals(confProp.getCurrentValue())
&& !"false".equals(confProp.getCurrentValue())) {
logger.log(
Type.ERROR,
"The eval property "
+ confProp.getName()
+ " is used in "
+ "a conditional css. Its value must be either \"true\" or \"false\"");
throw new UnableToCompleteException();
}
if ("true".equals(confProp.getCurrentValue())) {
setBuilder.add(property);
}
}
return setBuilder.build();
}
private Set<String> getCurrentDeferredBindingProperties(
ResourceContext context, List<String> permutationAxes, TreeLogger logger)
throws UnableToCompleteException {
PropertyOracle oracle = context.getGeneratorContext().getPropertyOracle();
ImmutableSet.Builder<String> setBuilder = ImmutableSet.builder();
for (String permutationAxis : permutationAxes) {
SelectionProperty selProp = oracle.getSelectionProperty(logger, permutationAxis);
String propValue = selProp.getCurrentValue();
setBuilder.add(permutationAxis + ":" + propValue);
}
return setBuilder.build();
}
private void validateExternalClasses(
Set<String> externalClasses,
Set<String> externalClassCandidates,
ExecutableElement method,
TreeLogger logger)
throws UnableToCompleteException {
if (!isStrictResource(method)) {
return;
}
boolean hasError = false;
for (String candidate : externalClassCandidates) {
if (!externalClasses.contains(candidate)) {
logger.log(
Type.ERROR,
"The following non-obfuscated class is present in a strict "
+ "CssResource: "
+ candidate
+ ". Fix by adding String accessor "
+ "method(s) to the CssResource interface for obfuscated classes, "
+ "or use an @external declaration for unobfuscated classes.");
hasError = true;
}
}
if (hasError) {
throw new UnableToCompleteException();
}
}
private boolean isStrictResource(ExecutableElement method) {
NotStrict notStrict = method.getAnnotation(NotStrict.class);
return notStrict == null;
}
/**
* When the tree is fully processed, we can now collect the external classes and revert the
* renaming for these classes. We cannot collect the external classes during the original renaming
* because some external at-rule could be located inside a conditional block and could be removed
* when these blocks are evaluated.
*/
private Set<String> revertRenamingOfExternalClasses(
CssTree cssTree, RenamingResult renamingResult) {
ExternalClassesCollector externalClassesCollector =
new ExternalClassesCollector(cssTree.getMutatingVisitController(), errorManager);
externalClassesCollector.runPass();
Map<String, String> styleClassesMapping = renamingResult.mapping;
// set containing all the style classes before the renaming.
Set<String> allStyleClassSet = Sets.newHashSet(styleClassesMapping.keySet());
// add the style classes that aren't associated to a method
allStyleClassSet.addAll(renamingResult.externalClassCandidate);
Set<String> externalClasses =
externalClassesCollector.getExternalClassNames(
allStyleClassSet, renamingResult.externalClassCandidate);
final Map<String, String> revertMap = new HashMap<>(externalClasses.size());
for (String external : externalClasses) {
revertMap.put(styleClassesMapping.get(external), external);
// override the mapping
styleClassesMapping.put(external, external);
}
SubstitutionMap revertExternalClasses = key -> revertMap.get(key);
new CssClassRenaming(cssTree.getMutatingVisitController(), revertExternalClasses, null)
.runPass();
return externalClasses;
}
private RenamingResult doClassRenaming(
CssTree cssTree, ExecutableElement method, TreeLogger logger, ResourceContext context)
throws UnableToCompleteException {
Map<String, Map<String, String>> replacementsWithPrefix =
computeReplacements(method, logger, context);
RenamingSubstitutionMap substitutionMap = new RenamingSubstitutionMap(replacementsWithPrefix);
new CssClassRenaming(cssTree.getMutatingVisitController(), substitutionMap, null).runPass();
Map<String, String> mapping = replacementsWithPrefix.get("");
mapping =
Maps.newHashMap(Maps.filterKeys(mapping, Predicates.in(substitutionMap.getStyleClasses())));
return new RenamingResult(mapping, substitutionMap.getExternalClassCandidates());
}
private Map<String, Map<String, String>> computeReplacements(
ExecutableElement method, TreeLogger logger, ResourceContext context)
throws UnableToCompleteException {
Map<String, Map<String, String>> replacementsWithPrefix = new HashMap<>();
Elements elements = context.getGeneratorContext().getAptContext().elements;
replacementsWithPrefix.put(
"",
computeReplacementsForType(
(TypeElement) MoreTypes.asElement(method.getReturnType()),
context.getGeneratorContext().getAptContext()));
// Process the Import annotation if any
Import imp = method.getAnnotation(Import.class);
if (imp != null) {
boolean fail = false;
for (TypeMirror type : getImportType(imp)) {
TypeElement importType = elements.getTypeElement(type.toString());
String prefix = getImportPrefix(importType);
if (replacementsWithPrefix.put(
prefix,
computeReplacementsForType(
importType, context.getGeneratorContext().getAptContext()))
!= null) {
logger.log(ERROR, "Multiple imports that would use the prefix " + prefix);
fail = true;
}
}
if (fail) {
throw new UnableToCompleteException();
}
}
return replacementsWithPrefix;
}
private static List<? extends TypeMirror> getImportType(Import annotation) {
try {
annotation.value();
} catch (MirroredTypesException mte) {
return mte.getTypeMirrors();
}
return null;
}
/** Returns the import prefix for a type, including the trailing hyphen. */
public static String getImportPrefix(TypeElement importType) {
String prefix = importType.getSimpleName().toString();
ImportedWithPrefix exp = importType.getAnnotation(ImportedWithPrefix.class);
if (exp != null) {
prefix = exp.value();
}
return prefix + "-";
}
private Map<String, String> computeReplacementsForType(
TypeElement cssResource, AptContext aptContext) {
Map<String, String> replacements = replacementsByClassAndMethod.get(cssResource);
Types types = aptContext.types;
Elements elements = aptContext.elements;
if (replacements == null) {
replacements = new HashMap<>();
replacementsByClassAndMethod.put(cssResource, replacements);
String resourcePrefix = resourcePrefixBuilder.get(cssResource.toString());
// This substitution map will prefix each renamed class with the resource prefix and use a
// MinimalSubstitutionMap for computing the obfuscated name.
SubstitutionMap prefixingSubstitutionMap =
new PrefixingSubstitutionMap(
new MinimalSubstitutionMap(), obfuscationPrefix + resourcePrefix + "-");
for (ExecutableElement method :
MoreElements.getLocalAndInheritedMethods(cssResource, types, elements)) {
String name = method.getSimpleName().toString();
if (ignoredMethods.contains(name)) {
continue;
}
String styleClass = getClassName(method);
if (replacementsForSharedMethods.containsKey(method)) {
replacements.put(styleClass, replacementsForSharedMethods.get(method));
} else {
String obfuscatedClassName = prefixingSubstitutionMap.get(styleClass);
String replacement =
obfuscationStyle.getPrettyName(styleClass, cssResource, obfuscatedClassName);
if (hasSharedAnnotation(method)) {
// We always use the base type for obfuscation if this is a shared method
replacement =
obfuscationStyle.getPrettyName(
styleClass, (TypeElement) method.getEnclosingElement(), obfuscatedClassName);
replacementsForSharedMethods.put(method, replacement);
}
replacements.put(styleClass, replacement);
}
}
}
return replacements;
}
private String getClassName(ExecutableElement method) {
String name = method.getSimpleName().toString();
ClassName classNameOverride = method.getAnnotation(ClassName.class);
if (classNameOverride != null) {
name = classNameOverride.value();
}
return name;
}
private boolean hasSharedAnnotation(ExecutableElement method) {
TypeElement enclosingType = (TypeElement) method.getEnclosingElement();
Shared shared = enclosingType.getAnnotation(Shared.class);
return shared != null;
}
private Map<ExecutableElement, String> writeMethods(
TreeLogger logger,
ResourceContext context,
ExecutableElement method,
SourceWriter sw,
ConstantDefinitions constantDefinitions,
Map<String, String> originalConstantNameMapping,
Map<String, String> substitutionMap)
throws UnableToCompleteException {
Types types = context.getGeneratorContext().getAptContext().types;
Elements elements = context.getGeneratorContext().getAptContext().elements;
TypeElement gssResource = (TypeElement) MoreTypes.asElement(method.getReturnType());
assert gssResource.getKind().equals(ElementKind.INTERFACE);
boolean success = true;
Map<ExecutableElement, String> methodToClassName = new LinkedHashMap<>();
for (ExecutableElement toImplement :
MoreElements.getLocalAndInheritedMethods(gssResource, types, elements)) {
String simpleName = toImplement.getSimpleName().toString();
if (simpleName.equals("getText")) {
writeGetText(logger, context, method, sw);
} else if (simpleName.equals("ensureInjected")) {
writeEnsureInjected(sw);
} else if (simpleName.equals("getName")) {
writeGetName(method, sw);
} else {
success &=
writeUserMethod(
logger,
toImplement,
sw,
constantDefinitions,
originalConstantNameMapping,
substitutionMap,
methodToClassName);
}
}
if (!success) {
throw new UnableToCompleteException();
}
return methodToClassName;
}
private boolean writeUserMethod(
TreeLogger logger,
ExecutableElement userMethod,
SourceWriter sw,
ConstantDefinitions constantDefinitions,
Map<String, String> originalConstantNameMapping,
Map<String, String> substitutionMap,
Map<ExecutableElement, String> methodToClassName)
throws UnableToCompleteException {
String className = getClassName(userMethod);
// method to access style class ?
if (substitutionMap.containsKey(className) && isReturnTypeString(userMethod.getReturnType())) {
methodToClassName.put(userMethod, substitutionMap.get(className));
return writeClassMethod(logger, userMethod, substitutionMap, sw);
}
// method to access constant value ?
CssDefinitionNode definitionNode;
String methodName = userMethod.getSimpleName().toString();
if (originalConstantNameMapping.containsKey(methodName)) {
// method name maps a constant that has been renamed during the auto conversion
String constantName = originalConstantNameMapping.get(methodName);
definitionNode = constantDefinitions.getConstantDefinition(constantName);
} else {
definitionNode = constantDefinitions.getConstantDefinition(methodName);
if (definitionNode == null) {
// try with upper case
definitionNode = constantDefinitions.getConstantDefinition(toUpperCase(methodName));
}
}
if (definitionNode != null) {
return writeDefMethod(definitionNode, logger, userMethod, sw);
}
if (substitutionMap.containsKey(className)) {
// method matched a class name but not a constant and the return type is not a string
logger.log(
Type.ERROR,
"The return type of the method ["
+ userMethod.toString()
+ "] must "
+ "be java.lang.String.");
throw new UnableToCompleteException();
}
// the method doesn't match a style class nor a constant
logger.log(
Type.ERROR,
"The following method ["
+ userMethod.toString()
+ "] doesn't match a constant"
+ " nor a style class. You could fix that by adding ."
+ className
+ " {}");
return false;
}
private boolean writeClassMethod(
TreeLogger logger,
ExecutableElement userMethod,
Map<String, String> substitutionMap,
SourceWriter sw)
throws UnableToCompleteException {
if (userMethod.getParameters().size() > 0) {
logger.log(
Type.ERROR,
"The method [" + userMethod.toString() + "] shouldn't contain any " + "parameters");
throw new UnableToCompleteException();
}
String name = getClassName(userMethod);
String value = substitutionMap.get(name);
if (value == null) {
logger.log(
Type.ERROR,
"The following style class [" + name + "] is missing from the source" + " CSS file");
return false;
} else {
writeSimpleGetter(userMethod, "\"" + value + "\"", sw);
}
return true;
}
protected boolean isReturnTypeString(TypeMirror classReturnType) {
return (classReturnType != null
&& !classReturnType.getKind().isPrimitive()
&& classReturnType.toString().equals("java.lang.String"));
}
private boolean writeDefMethod(
CssDefinitionNode definitionNode,
TreeLogger logger,
ExecutableElement userMethod,
SourceWriter sw)
throws UnableToCompleteException {
String name = userMethod.toString();
TypeMirror classReturnType = userMethod.getReturnType();
List<CssValueNode> params = definitionNode.getParameters();
if (params.size() != 1 && !isReturnTypeString(classReturnType)) {
logger.log(
ERROR,
"@def rule " + name + " must define exactly one value or return type must be String");
return false;
}
assert classReturnType.getKind().isPrimitive()
|| classReturnType.getKind().toString().equals("java.lang.String");
String returnExpr;
if (isReturnTypeString(classReturnType)) {
List<String> returnValues = new ArrayList<String>();
for (CssValueNode valueNode : params) {
returnValues.add(Generator.escape(valueNode.toString()));
}
returnExpr = "\"" + Joiner.on(" ").join(returnValues) + "\"";
} else {
TypeMirror returnType = userMethod.getReturnType();
if (returnType == null) {
logger.log(
ERROR, name + ": Return type must be primitive type " + "or String for @def accessors");
return false;
}
CssValueNode valueNode = params.get(0);
// when a constant refers to another constant, closure-stylesheet wrap the CssNumericNode in
// a CssCompositeValueNode. Unwrap it.
if (valueNode instanceof CssCompositeValueNode) {
CssCompositeValueNode toUnwrap = (CssCompositeValueNode) valueNode;
if (toUnwrap.getValues().size() == 1) {
valueNode = toUnwrap.getValues().get(0);
}
}
if (!(valueNode instanceof CssNumericNode)) {
logger.log(
ERROR, "The value of the constant defined by @" + name + " is not a" + " numeric");
return false;
}
String numericValue = ((CssNumericNode) valueNode).getNumericPart();
if (returnType.getKind() == TypeKind.INT || returnType.getKind() == TypeKind.LONG) {
returnExpr = "" + Long.parseLong(numericValue);
} else if (returnType.getKind() == TypeKind.FLOAT) {
returnExpr = numericValue + "F";
} else if (returnType.getKind() == TypeKind.DOUBLE) {
returnExpr = "" + numericValue;
} else {
logger.log(ERROR, returnType + " is not a valid primitive return type for @def accessors");
throw new UnableToCompleteException();
}
}
writeSimpleGetter(userMethod, returnExpr, sw);
return true;
}
/**
* Transform a camel case string to upper case. Each word is separated by a '_'
*
* @param camelCase
*/
private String toUpperCase(String camelCase) {
return CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, camelCase);
}
@Override
public void init(TreeLogger logger, ResourceContext context) throws UnableToCompleteException {
cssParsingResultMap = new IdentityHashMap<>();
errorManager = new LoggerErrorManager(logger);
PropertyOracle propertyOracle = context.getGeneratorContext().getPropertyOracle();
Types types = context.getGeneratorContext().getAptContext().types;
Elements elements = context.getGeneratorContext().getAptContext().elements;
TypeElement superInterface = elements.getTypeElement(CssResource.class.getCanonicalName());
for (Element m : MoreElements.getLocalAndInheritedMethods(superInterface, types, elements)) {
if (m.getKind().equals(ElementKind.METHOD)) {
ignoredMethods.add(m.getSimpleName().toString());
}
}
allowedNonStandardFunctions = new HashSet<>();
allowedAtRules = Sets.newHashSet(ExternalClassesCollector.EXTERNAL_AT_RULE);
obfuscationStyle =
CssObfuscationStyle.getObfuscationStyle(
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_STYLE)
.asSingleValue());
obfuscationPrefix =
getObfuscationPrefix(
logger,
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_OBFUSCATION_PREFIX)
.asSingleValue(),
context);
allowedAtRules.addAll(
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_ALLOWED_AT_RULES)
.getValues());
allowedNonStandardFunctions.addAll(
propertyOracle
.getConfigurationProperty(logger, KEY_CSS_RESOURCE_ALLOWED_FUNCTIONS)
.getValues());
initReplacement(context);
}
private String getObfuscationPrefix(TreeLogger logger, String prefix, ResourceContext context) {
if ("empty".equalsIgnoreCase(prefix)) {
return "";
} else if ("default".equalsIgnoreCase(prefix)) {
return getDefaultObfuscationPrefix(logger, context);
}
return prefix;
}
private String getDefaultObfuscationPrefix(TreeLogger logger, ResourceContext context) {
String prefix = context.getCachedData(KEY_CLASS_PREFIX, String.class);
if (prefix == null) {
prefix = computeDefaultPrefix(logger, context);
context.putCachedData(KEY_CLASS_PREFIX, prefix);
}
return prefix;
}
private String computeDefaultPrefix(TreeLogger logger, ResourceContext context) {
SortedSet<TypeElement> gssResources = computeOperableTypes(logger, context);
Adler32 checksum = new Adler32();
for (TypeElement type : gssResources) {
checksum.update(Util.getBytes(type.toString()));
}
int seed = Math.abs((int) checksum.getValue());
return encode(seed) + "-";
}
private static String encode(long id) {
assert id >= 0;
StringBuilder b = new StringBuilder();
// Use only guaranteed-alpha characters for the first character
b.append(BASE32_CHARS[(int) (id & 0xf)]);
id >>= 4;
while (id != 0) {
b.append(BASE32_CHARS[(int) (id & 0x1f)]);
id >>= 5;
}
return b.toString();
}
private SortedSet<TypeElement> computeOperableTypes(TreeLogger logger, ResourceContext context) {
TypeElement bundle = context.getClientBundleType();
Types types = context.getGeneratorContext().getAptContext().types;
Elements elements = context.getGeneratorContext().getAptContext().elements;
logger = logger.branch(TreeLogger.DEBUG, "Finding operable CssResource subtypes");
TypeElement baseInterface = elements.getTypeElement(CssResourceBase.class.getCanonicalName());
SortedSet<TypeElement> toReturn =
new TreeSet<>(new CssResourceGenerator.JClassOrderComparator());
for (Element elm : bundle.getEnclosedElements()) {
if (elm.getKind().equals(ElementKind.METHOD)) {
ExecutableElement method = (ExecutableElement) elm;
if (types.isSubtype(method.getReturnType(), baseInterface.asType())) {
if (logger.isLoggable(TreeLogger.SPAM)) {
logger.log(TreeLogger.SPAM, "Added " + method);
}
toReturn.add(MoreTypes.asTypeElement(method.getReturnType()));
ResourceGeneratorUtil.getAllParents(MoreTypes.asTypeElement(method.getReturnType()))
.forEach(p -> toReturn.add(MoreTypes.asTypeElement(p)));
} else {
if (logger.isLoggable(TreeLogger.SPAM)) {
logger.log(TreeLogger.SPAM, "Ignored " + method);
}
}
} else if (elm.getKind().equals(ElementKind.INTERFACE)) {
toReturn.add((TypeElement) elm);
}
}
return toReturn;
}
@SuppressWarnings("unchecked")
private void initReplacement(ResourceContext context) {
if (context.getCachedData(KEY_HAS_CACHED_DATA, Boolean.class) != Boolean.TRUE) {
context.putCachedData(KEY_SHARED_METHODS, new IdentityHashMap<ExecutableElement, String>());
context.putCachedData(
KEY_BY_CLASS_AND_METHOD, new IdentityHashMap<TypeElement, Map<String, String>>());
context.putCachedData(KEY_HAS_CACHED_DATA, Boolean.TRUE);
}
replacementsByClassAndMethod = context.getCachedData(KEY_BY_CLASS_AND_METHOD, Map.class);
replacementsForSharedMethods = context.getCachedData(KEY_SHARED_METHODS, Map.class);
}
/** Different conversion modes from css to gss. */
public enum AutoConversionMode {
STRICT,
LENIENT,
OFF
}
/** Predicate implementation used during the conversion to GSS. */
private static class ConfigurationPropertyMatcher implements Predicate<String> {
private final TreeLogger logger;
private final ResourceContext context;
private boolean error;
ConfigurationPropertyMatcher(ResourceContext context, TreeLogger logger) {
this.logger = logger;
this.context = context;
}
@Override
public boolean apply(String booleanCondition) {
// if the condition is negated, the string parameter contains the ! operator if this method
// is called during the conversion to GSS
if (booleanCondition.startsWith("!")) {
booleanCondition = booleanCondition.substring(1);
}
String value = System.getProperty(booleanCondition);
if (value == null) {
logger.log(Type.WARN, "No such property " + booleanCondition);
return false;
}
DefaultConfigurationProperty configurationProperty =
new DefaultConfigurationProperty(booleanCondition, Collections.singletonList(value));
boolean valid = checkPropertyIsSingleValueAndBoolean(configurationProperty, logger);
error |= !valid;
return valid;
}
}
/**
* {@link ErrorManager} used to log the errors and warning messages produced by the different
* {@link CssCompilerPass}.
*/
public static class LoggerErrorManager implements ErrorManager {
private final TreeLogger logger;
private boolean hasErrors;
public LoggerErrorManager(TreeLogger logger) {
this.logger = logger;
}
@Override
public void generateReport() {
// do nothing
}
@Override
public boolean hasErrors() {
return hasErrors;
}
@Override
public void report(GssError error) {
String fileName = "";
String location = "";
SourceCodeLocation codeLocation = error.getLocation();
if (codeLocation != null) {
fileName = codeLocation.getSourceCode().getFileName();
location =
"[line: "
+ codeLocation.getBeginLineNumber()
+ " column: "
+ codeLocation.getBeginIndexInLine()
+ "]";
}
logger.log(Type.ERROR, "Error in " + fileName + location + ": " + error.getMessage());
hasErrors = true;
}
@Override
public void reportWarning(GssError warning) {
logger.log(Type.WARN, warning.getMessage());
}
}
private static class ConversionResult {
final String gss;
final Map<String, String> defNameMapping;
private ConversionResult(String gss, Map<String, String> defNameMapping) {
this.gss = gss;
this.defNameMapping = defNameMapping;
}
}
private static class RenamingResult {
final Map<String, String> mapping;
final Set<String> externalClassCandidate;
private RenamingResult(Map<String, String> mapping, Set<String> externalClassCandidate) {
this.mapping = mapping;
this.externalClassCandidate = externalClassCandidate;
}
}
private static class CssParsingResult {
final CssTree tree;
final List<String> permutationAxes; // TODO remove
final Map<String, String> originalConstantNameMapping;
final Set<String> trueConditions;
private CssParsingResult(
CssTree tree,
List<String> permutationAxis,
Set<String> trueConditions,
Map<String, String> originalConstantNameMapping) {
this.tree = tree;
this.permutationAxes = permutationAxis;
this.originalConstantNameMapping = originalConstantNameMapping;
this.trueConditions = trueConditions;
}
}
/**
* GssOptions contains the values of all configuration properties that can be used with
* GssResource.
*/
public static class GssOptions {
private final boolean enabled;
private final AutoConversionMode autoConversionMode;
private final boolean gssDefaultInUiBinder;
public GssOptions(
boolean enabled, AutoConversionMode autoConversionMode, boolean gssDefaultInUiBinder) {
this.enabled = enabled;
this.autoConversionMode = autoConversionMode;
this.gssDefaultInUiBinder = gssDefaultInUiBinder;
}
public boolean isEnabled() {
return enabled;
}
public boolean isGssDefaultInUiBinder() {
return gssDefaultInUiBinder;
}
public boolean isAutoConversionOff() {
return autoConversionMode == AutoConversionMode.OFF;
}
public boolean isLenientConversion() {
return autoConversionMode == AutoConversionMode.LENIENT;
}
}
}
|
import random
# Create an empty list
data = []
# Generate 20 names and ages
for i in range(20):
name = 'Name ' + str(i)
age = random.randint(18, 70)
# Append the data to the list
data.append([name, age])
# Print out the list
print(data)
# [['Name 0', 64],
# ['Name 1', 18],
# ['Name 2', 24],
# ['Name 3', 36],
# ['Name 4', 34],
# ['Name 5', 28],
# ['Name 6', 56],
# ['Name 7', 42],
# ['Name 8', 68],
# ['Name 9', 24],
# ['Name 10', 50],
# ['Name 11', 20],
# ['Name 12', 54],
# ['Name 13', 40],
# ['Name 14', 61],
# ['Name 15', 40],
# ['Name 16', 41],
# ['Name 17', 38],
# ['Name 18', 56],
# ['Name 19', 41]] |
const Discord = require("discord.js");
const client = new Discord.Client();
const DiscordAntiSpam = require("discord-anti-spam");
const AntiSpam = new DiscordAntiSpam({
warnThreshold: 3, // Amount of messages sent in a row that will cause a warning.
banThreshold: 7, // Amount of messages sent in a row that will cause a ban
maxInterval: 2000, // Amount of time (in ms) in which messages are cosidered spam.
warnMessage: "{@user}, Please stop spamming.", // Message will be sent in chat upon warning.
banMessage: "**{user_tag}** has been banned for spamming.", // Message will be sent in chat upon banning.
maxDuplicatesWarning: 7, // Amount of same messages sent that will be considered as duplicates that will cause a warning.
maxDuplicatesBan: 15, // Amount of same messages sent that will be considered as duplicates that will cause a ban.
deleteMessagesAfterBanForPastDays: 1, // Amount of days in which old messages will be deleted. (1-7)
exemptPermissions: ["MANAGE_MESSAGES", "ADMINISTRATOR", "MANAGE_GUILD", "BAN_MEMBERS"], // Bypass users with at least one of these permissions
ignoreBots: true, // Ignore bot messages
verbose: false, // Extended Logs from module
ignoredUsers: [], // Array of string user IDs that are ignored
ignoredGuilds: [], // Array of string Guild IDs that are ignored
ignoredChannels: [] // Array of string channels IDs that are ignored
});
AntiSpam.on("warnEmit", (member) => console.log(`Attempt to warn ${member.user.tag}.`));
AntiSpam.on("warnAdd", (member) => console.log(`${member.user.tag} has been warned.`));
AntiSpam.on("kickEmit", (member) => console.log(`Attempt to kick ${member.user.tag}.`));
AntiSpam.on("kickAdd", (member) => console.log(`${member.user.tag} has been kicked.`));
AntiSpam.on("banEmit", (member) => console.log(`Attempt to ban ${member.user.tag}.`));
AntiSpam.on("banAdd", (member) => console.log(`${member.user.tag} has been banned.`));
AntiSpam.on("dataReset", () => console.log("Module cache has been cleared."));
client.on("ready", () => console.log(`Logged in as ${client.user.tag}.`));
client.on("message", (msg) => {
AntiSpam.message(msg);
});
client.login(process.env.BOT_TOKEN); |
<filename>src/js/sharedMethods.js
const shared = (() => {
const setIcon = (main, description, iconId) => {
const otherConditions = 'mist smoke haze dust fog sand ash squall';
let icon;
if (iconId.includes('d')) {
if (main === 'Clear' && description === 'clear sky') { icon = 'fas fa-sun'; }
if (main === 'Clouds' && description === 'few clouds') { icon = 'fas fa-cloud-sun'; }
if (main === 'Clouds' && description === 'scattered clouds') { icon = 'fas fa-cloud-sun'; }
if (main === 'Clouds') { icon = 'fas fa-cloud'; }
if (main === 'Rain' && description === 'shower rain') { icon = 'fas fa-cloud-sun-rain'; }
if (main === 'Rain') { icon = 'fas fa-cloud-rain'; }
if (main === 'Thunderstorm') { icon = 'fas fa-bolt'; }
if (main === 'Snow') { icon = 'fas fa-snowflake'; }
if (main === 'Tornado') { icon = 'fas fa-exclamation-circle'; }
if (otherConditions.includes(main)) { icon = 'fas fa-smog'; }
} else {
if (main === 'Clear' && description === 'clear sky') { icon = 'fas fa-moon'; }
if (main === 'Clouds' && description === 'few clouds') { icon = 'fas fa-cloud-moon'; }
if (main === 'Clouds' && description === 'scattered clouds') { icon = 'fas fa-cloud-moon'; }
if (main === 'Clouds') { icon = 'fas fa-cloud'; }
if (main === 'Rain' && description === 'shower rain') { icon = 'fas fa-cloud-moon-rain'; }
if (main === 'Rain') { icon = 'fas fa-cloud-rain'; }
if (main === 'Thunderstorm') { icon = 'fas fa-bolt'; }
if (main === 'Snow') { icon = 'fas fa-snowflake'; }
if (main === 'Tornado') { icon = 'fas fa-exclamation-circle'; }
if (otherConditions.includes(main)) { icon = 'fas fa-smog'; }
}
return icon;
};
const fahToCel = (current) => {
const newTemp = Math.round((parseInt(current, 10) - 32) * (5 / 9)).toString();
return newTemp;
};
const celToFah = (current) => {
const newTemp = Math.round((parseInt(current, 10) * (9 / 5)) + 32).toString();
return newTemp;
};
const convertWind = (currentTodayWind) => {
if (currentTodayWind.split('').includes('k')) {
const kmEl = currentTodayWind.split(' ').pop();
const k = kmEl.split('');
k.splice(-5, 5);
const joinNumK = k.join('');
const numK = parseInt(joinNumK[0], 10);
return Math.round(numK / 1.609344);
}
const mhEl = currentTodayWind.split(' ').pop();
const m = mhEl.split('');
m.splice(-3, 3);
const joinNumM = m.join('');
const numMh = parseInt(joinNumM[0], 10);
return Math.round(numMh * 1.609344);
};
return {
setIcon, fahToCel, celToFah, convertWind,
};
})();
export default shared; |
<gh_stars>1-10
package main
import (
"encoding/json"
"fmt"
"os"
"gstack.io/concourse/keyval-resource/models"
)
func main() {
var request models.CheckRequest
err := json.NewDecoder(os.Stdin).Decode(&request)
if err != nil {
fmt.Fprintln(os.Stderr, "parse error:", err.Error())
os.Exit(1)
}
response := models.CheckResponse{}
if len(request.Version) > 0 {
response = models.CheckResponse{request.Version}
}
json.NewEncoder(os.Stdout).Encode(response)
}
|
<gh_stars>0
// Add this line to your host manifest.js for Sprockets 4
//= link_directory ../javascripts .js
//= link_directory ../stylesheets .css
//= link dfm_web/manifest.js
|
import * as React from 'react';
import glamorous , {ThemeProvider, /* Div */} from 'glamorous';
import * as Palettes from '../globals/palette';
import {items, farItems} from '../globals/header';
import { loadTheme } from 'office-ui-fabric-react/lib/Styling';
import { Nav, INavProps } from 'office-ui-fabric-react/lib/Nav';
import { CommandBar, ICommandBar } from 'office-ui-fabric-react/lib/CommandBar';
/* import { Icon } from 'office-ui-fabric-react/lib/Icon'; */
/* import { Layer, LayerHost } from 'office-ui-fabric-react/lib/Layer'; */
/* import { autobind } from 'office-ui-fabric-react/lib/Utilities'; */
/* import { DirectionalHint } from 'office-ui-fabric-react/lib/common/DirectionalHint'; */
/* import {css} from 'glamor'; */
import { mergeStyles } from '@uifabric/merge-styles';
//Has to be made dynamic theme resolver
const currentPalette = Palettes.azurePalette;
loadTheme({
palette: currentPalette
});
const MyGrid = glamorous.div('ms-font-xl', {
margin: 'auto',
display: 'grid',
width: '100vw',
height: '100vh',
gridTemplateRows: '[row1-start] 40px [row1-end row2-start] auto [row2-end]',
gridTemplateColumns: '200px auto',
gridTemplateAreas: `
"header header header"
"sidebar content content"
`
});
const Box = glamorous.div({
padding: 10
});
/* const Header = glamorous(Box)({
display: 'flex',
alignItems:'center',
gridArea: 'header'
},({theme}) => ({
backgroundColor: currentPalette.neutralPrimary,
color: currentPalette.neutralLighterAlt
})); */
const Sidebar = glamorous(Box)({
gridArea: 'sidebar',
padding: 0
},({theme}) => ({
backgroundColor: currentPalette.themePrimary,
color: currentPalette.neutralLighterAlt
}))
const Content = glamorous(Box)({
gridArea: 'content'
},({theme}) => ({
backgroundColor: currentPalette.neutralLighterAlt,
color: currentPalette.black
}));
/* const commandBarStyle = css({
backgroundColor: currentPalette.neutralPrimary,
color: currentPalette.neutralLighterAlt,
gridArea: 'header'
}); */
const testClass = mergeStyles({
backgroundColor: currentPalette.neutralPrimary,
color: currentPalette.neutralLighterAlt,
gridArea: 'header'
});
export default class App extends React.Component<any, any> {
constructor(props: INavProps) {
super(props);
this._onClickHandler = this._onClickHandler.bind(this);
}
renderDevTool() {
if (process.env.NODE_ENV !== 'production') {
const DevTools = require('mobx-react-devtools').default;
return (<DevTools />);
}
return null;
};
private _onClickHandler(e: React.MouseEvent<HTMLElement>) {
alert('test');
return false;
}
render() {
return (
<ThemeProvider theme={currentPalette}>
<MyGrid>
{/* <Header> */}
{/* <Div css={{display:'flex',flexFlow:'column',justifyContent:'flex-start'}}>
<Icon iconName='CollapseMenu' className='ms-IconExample' />
</Div>
<Div className='brand-div'>
<span>Coglite</span>
</Div> */}
<CommandBar
isSearchBoxVisible={ false }
items={ items }
farItems={ farItems }
className={testClass}
componentRef= {(component: ICommandBar ) => {
console.log("COMPONENT REF WORKING");
console.log(component);
}}
/>
{/* </Header> */}
<Sidebar>
<div>
<Nav
groups={
[
{
links:
[
{
name: 'Home',
url: '/second',
links: [{
name: 'Activity',
url: '/second',
key: 'key1'
},
{
name: 'News',
url: '/second',
key: 'key2'
}],
isExpanded: true
},
{ name: 'Documents', url: '/second', key: 'key3', isExpanded: true },
{ name: 'Pages', url: '/second', key: 'key4' },
{ name: 'Notebook', url: '/second', key: 'key5' },
{ name: 'Long Name Test for ellipse', url: '/second', key: 'key6' },
{
name: 'Edit',
url: '/second',
onClick: this._onClickHandler,
icon: 'Edit',
key: 'key8'
}
]
}
]
}
className={'my-nav-class'}
initialSelectedKey={ 'key1' }
/>
</div>
</Sidebar>
<Content>
{this.props.children}
{/* {this.renderDevTool()} */}
</Content>
</MyGrid>
</ThemeProvider>
);
}
}; |
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Widecoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check the test suite naming conventions
export LC_ALL=C
EXIT_CODE=0
NAMING_INCONSISTENCIES=$(git grep -E '^BOOST_FIXTURE_TEST_SUITE\(' -- \
"src/test/**.cpp" "src/wallet/test/**.cpp" | \
grep -vE '/(.*?)\.cpp:BOOST_FIXTURE_TEST_SUITE\(\1, .*\)$')
if [[ ${NAMING_INCONSISTENCIES} != "" ]]; then
echo "The test suite in file src/test/foo_tests.cpp should be named"
echo "\"foo_tests\". Please make sure the following test suites follow"
echo "that convention:"
echo
echo "${NAMING_INCONSISTENCIES}"
EXIT_CODE=1
fi
TEST_SUITE_NAME_COLLISSIONS=$(git grep -E '^BOOST_FIXTURE_TEST_SUITE\(' -- \
"src/test/**.cpp" "src/wallet/test/**.cpp" | cut -f2 -d'(' | cut -f1 -d, | \
sort | uniq -d)
if [[ ${TEST_SUITE_NAME_COLLISSIONS} != "" ]]; then
echo "Test suite names must be unique. The following test suite names"
echo "appear to be used more than once:"
echo
echo "${TEST_SUITE_NAME_COLLISSIONS}"
EXIT_CODE=1
fi
exit ${EXIT_CODE}
|
package com.acxiom.pipeline.streaming
import com.acxiom.pipeline.utils.StreamingUtils
import com.acxiom.pipeline.{Constants, PipelineContext, PipelineException}
import org.apache.log4j.Logger
import org.apache.spark.sql.streaming.StreamingQuery
import java.text.SimpleDateFormat
import java.util.Date
trait StreamingQueryMonitor extends Thread {
protected val logger: Logger = Logger.getLogger(getClass)
def query: StreamingQuery
def pipelineContext: PipelineContext
def getGlobalUpdates: Map[String, Any] = Map()
def continue: Boolean = false
}
class BaseStreamingQueryMonitor(override val query: StreamingQuery, override val pipelineContext: PipelineContext)
extends StreamingQueryMonitor
abstract class BatchWriteStreamingQueryMonitor(override val query: StreamingQuery, override val pipelineContext: PipelineContext)
extends BaseStreamingQueryMonitor(query, pipelineContext) {
protected val monitorType: String = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_MONITOR_TYPE").getOrElse("duration").toLowerCase
protected val duration: Int = pipelineContext.getGlobalAs[Int]("STREAMING_BATCH_MONITOR_DURATION").getOrElse(Constants.ONE_THOUSAND * Constants.SIXTY)
protected val approximateRows: Int = pipelineContext.getGlobalAs[Int]("STREAMING_BATCH_MONITOR_COUNT").getOrElse(Constants.ZERO)
protected val sleepDuration: Long =
if (monitorType == "duration") {
// Set the sleep for the duration specified
val durationType = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_MONITOR_DURATION_TYPE").getOrElse("milliseconds")
StreamingUtils.getDuration(Some(durationType), Some(duration.toString)).milliseconds
} else {
0L
}
protected var startTime: Long = System.currentTimeMillis()
protected var rowCount: Long = 0L
protected var currentDuration: Long = 0L
protected var lastStatusId: String = _
protected var globals: Map[String, Any] = Map()
protected var continueProcessing = false
override def getGlobalUpdates: Map[String, Any] = globals
override def continue: Boolean = continueProcessing
protected var processing = true
def keepProcessing: Boolean = processing
def checkCurrentStatus(): Unit = {
// See if we have reached the specified duration
processing = if ((monitorType == "duration" && currentDuration >= sleepDuration) ||
(rowCount >= approximateRows)) {
logger.info("Streaming threshold met")
// startTime = System.currentTimeMillis()
// currentDuration = 0L
// rowCount = 0
false
} else {
true
}
}
def manageQueryShutdown(): Unit
override def run(): Unit = {
logger.info("Starting streaming batch monitor")
while (keepProcessing) {
// Do the sleep at the beginning assuming that we want to process some data before we check the status
Thread.sleep(Constants.ONE_HUNDRED)
// Capture the current run length
currentDuration = System.currentTimeMillis() - startTime
// Update the stats - The array should be oldest to most recent
val recentProgress = query.recentProgress.toList
val index = recentProgress.indexWhere(p => s"${p.id}::${p.runId}::${p.batchId}::${p.timestamp}" == lastStatusId)
val progressList = if (index != -1) {
recentProgress.slice(index + 1, query.recentProgress.toList.size)
} else {
recentProgress
}
progressList.foreach(p => {
rowCount += p.numInputRows
lastStatusId = s"${p.id}::${p.runId}::${p.batchId}::${p.timestamp}"
})
// Call the functions to determine if we need to stop or keep going
checkCurrentStatus()
}
// Invoke the function that allows us to create the globals and set the continue flag
manageQueryShutdown()
// Stop the query once we are no longer processing
logger.info("Streaming query being stopped")
query.stop()
}
}
class BatchPartitionedStreamingQueryMonitor(override val query: StreamingQuery, override val pipelineContext: PipelineContext)
extends BatchWriteStreamingQueryMonitor(query, pipelineContext) {
logger.info("Created BatchPartitionedStreamingQueryMonitor")
private val dateFormat =new SimpleDateFormat("yyyy-dd-MM HH:mm:ssZ")
override def manageQueryShutdown(): Unit = {
val counter = pipelineContext.getGlobalAs[Int]("STREAMING_BATCH_PARTITION_COUNTER").getOrElse(0) + 1
val globalKey = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_PARTITION_GLOBAL").getOrElse("PARTITION_VALUE")
val template = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_PARTITION_TEMPLATE").getOrElse("counter").toLowerCase
val temp = if (template == "date") {
dateFormat.format(new Date())
} else {
counter.toString
}
logger.info(s"Setting $globalKey to $temp")
logger.info(s"Setting STREAMING_BATCH_OUTPUT_COUNTER to $counter")
globals = Map[String, Any]("STREAMING_BATCH_OUTPUT_COUNTER" -> counter, globalKey -> temp)
continueProcessing = true
}
}
class BatchFileStreamingQueryMonitor(override val query: StreamingQuery, override val pipelineContext: PipelineContext)
extends BatchWriteStreamingQueryMonitor(query, pipelineContext) {
logger.info("Created BatchFileStreamingQueryMonitor")
override def manageQueryShutdown(): Unit = {
validate()
val globalDestinationKey = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_OUTPUT_GLOBAL").get
val destinationKey = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_OUTPUT_PATH_KEY").get
val template = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_OUTPUT_TEMPLATE").getOrElse("DATE").toLowerCase
val counter = pipelineContext.getGlobalAs[Int]("STREAMING_BATCH_OUTPUT_COUNTER").getOrElse(0) + 1
val temp = if (template == "date") {
Constants.FILE_APPEND_DATE_FORMAT.format(new Date())
} else {
counter.toString
}
// Grab the path to be modified
val globalDestination = pipelineContext.getGlobalAs[String](globalDestinationKey).get
// See if we have already stored the original path
val originalPath = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_OUTPUT_GLOBAL_ORIGINAL_PATH")
if (originalPath.isEmpty) {
// Set the original path for use later and use the existing destination
globals = Map[String, Any]("STREAMING_BATCH_OUTPUT_COUNTER" -> counter,
"STREAMING_BATCH_OUTPUT_GLOBAL_ORIGINAL_PATH" -> globalDestination,
globalDestinationKey -> updatePath(globalDestination, destinationKey, temp, globalDestinationKey))
} else {
// Use the stored original path so we don't stack the increment values
globals = Map[String, Any]("STREAMING_BATCH_OUTPUT_COUNTER" -> counter,
globalDestinationKey -> updatePath(originalPath.get, destinationKey, temp, globalDestinationKey))
}
logger.info(s"Setting STREAMING_BATCH_OUTPUT_COUNTER to $counter")
continueProcessing = true
}
private def updatePath(path: String, destinationKey: String, incrementVal: String, globalDestinationKey: String): String = {
// Update the original path with the type parameter
val newPath = path.replaceAll(destinationKey, s"${destinationKey}_$incrementVal")
logger.info(s"Setting $globalDestinationKey to $newPath")
newPath
}
private def validate(): Unit = {
val globals = pipelineContext.globals.get
if (!globals.contains("STREAMING_BATCH_OUTPUT_GLOBAL")) {
logger.error("The STREAMING_BATCH_OUTPUT_GLOBAL value must be set!")
throw PipelineException(message = Some("The STREAMING_BATCH_OUTPUT_GLOBAL value must be set!"),
pipelineProgress = Some(pipelineContext.getPipelineExecutionInfo))
}
if (!globals.contains("STREAMING_BATCH_OUTPUT_PATH_KEY")) {
logger.error("The STREAMING_BATCH_OUTPUT_PATH_KEY value must be set!")
throw PipelineException(message = Some("The STREAMING_BATCH_OUTPUT_PATH_KEY value must be set!"),
pipelineProgress = Some(pipelineContext.getPipelineExecutionInfo))
}
val destinationKey = pipelineContext.getGlobalAs[String]("STREAMING_BATCH_OUTPUT_PATH_KEY")
if (destinationKey.isEmpty) {
logger.error(s"The $destinationKey is required!")
throw PipelineException(message = Some(s"The $destinationKey is required!"),
pipelineProgress = Some(pipelineContext.getPipelineExecutionInfo))
}
}
}
|
<reponame>padunlap/homework<filename>Develop/script.js
// Assignment Code
var generateBtn = document.querySelector("#generate");
function isNumeric(str) {
if (typeof str != "string") return false // we only process strings!
return !isNaN(str) && // use type coercion to parse the _entirety_ of the string (`parseFloat` alone does not do this)...
!isNaN(parseFloat(str)) // ...and ensure strings of whitespace fail
}
// Write password to the #password input
function generatePassword(pwlength) {
var charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
var retVal = "";
for (var i = 0; i < pwlength; ++i) {
retVal += charset.charAt(Math.floor(Math.random() * charset.length));
}
return retVal;
}
function writePassword() {
var pwCrit;
let validation = true;
while(validation){
pwCrit = window.prompt("Password Length? Between 8 - 128 characters");
console.log(isNumeric(pwCrit))
if (isNumeric(pwCrit) &&pwCrit<=128&&pwCrit>=8) {
validation = false;
}
}
var cwCrit;
cwCrit = window.prompt("lowercase, uppercase, numeric, and/or special characters");
console.log(cwCrit)
var password = generatePassword(pwCrit);
console.log(password);
var passwordText = document.querySelector("#password");
passwordText.value = password;
}
// Add event listener to generate button
generateBtn.addEventListener("click", writePassword);
|
const { whitespaceNormalize, replaceDisallowedChars } = require('../index')
test('whitespaceNormalize', () => {
expect(whitespaceNormalize('\n\r\t a \t\n\rb\r\t\n')).toBe('a b')
})
test('replaceDisallowedChars', () => {
const chars = (from, to) => {
function * codepoints (start, stop) {
while (start <= stop) yield start++
}
return String.fromCodePoint(...codepoints(from, to))
}
const strip = (str) => replaceDisallowedChars(str, true)
expect(strip(chars(0, 0x20))).toBe(String.fromCodePoint(0x09, 0x0A, 0x0C, 0x20))
expect(strip(chars(0x7E, 0xA0))).toBe('~\xA0')
expect(strip(chars(0xD7FF, 0xE000))).toBe('\uD7FF\uE000')
/*
TODO
// -1FFFD / %x20000-2FFFD / ... / %x100000-
for (let i=2; i<=16; i++) {
let code = i*0x10000
console.log(chars(code-3,code))
expect(strip(chars(code-3,code))).toBe(String.fromCodePoint(code-3,code))
}
// %x10000-10FFFD
expect(strip(chars(0x10FFFD,0x10FFFF))).toBe(String.fromCodePoint(0x10FFFD))
*/
})
|
const { isTagNode } = require("../../knife/tag_utils");
const knife = require("../../knife");
module.exports = {
name: "img-req-alt",
on: ["dom"],
need: "dom",
validateConfig(option) {
if (typeof option === "string" && option !== "allownull") {
throw new Error(`Configuration for rule "${this.name}" is invalid: Only "allownull" is accepted as string value`);
}
if (typeof option !== "boolean" && typeof option !== "string") {
throw new Error(`Configuration for rule "${this.name}" is invalid: Expected boolean got ${typeof option}`);
}
return option;
}
};
module.exports.lint = function(element, opts, { report }) {
if (isTagNode(element) === false || element.name !== "img") {
return;
}
const opt = opts[this.name];
if (knife.hasNonEmptyAttr(element, "alt", opt === "allownull") === false) {
report({
code: "E013",
position: element.openLineCol
});
}
};
|
#!/bin/sh
set -v -x
export BLDBASE="`pwd`"
export INC_DIR="$BLDBASE/base/include"
export FEZLYNX_INCLUDE_PATH="-I $INC_DIR -I $INC_DIR/Mainboards -I $INC_DIR/Core -I $INC_DIR/Modules"
jdo() {
cd $BLDBASE/base/src/$1
#-m32 -march=i486
g++ -g -O1 -c $FEZLYNX_INCLUDE_PATH $BLDBASE/base/src/$1/*.cpp
ar -rc "$BLDBASE/base/lib/lib$2.a" $BLDBASE/base/src/$1/*.o
rm $BLDBASE/base/src/$1/*.o
echo $1
}
jdo Mainboards mainboards
jdo Core core
jdo Modules modules
|
export * from 'src/javascript/ContentEditorExtensions/SelectorTypes/StringWidenPicker/components/ViewerJsx/ReferenceCard';
|
/**
* Spring Data JPA Repositories
*/
package com.test.sampleapp.repository; |
<reponame>klaasg/medgen-mysql<gh_stars>1-10
call log('create_index.sql','NCBI PubTator');
call log('mutation2pubtator', 'create index');
call create_index('mutation2pubtator','PMID');
call create_index('mutation2pubtator','Components');
-- #################################################################
call log('gene2pubtator', 'index');
--
call create_index('gene2pubtator','PMID');
call create_index('gene2pubtator','GeneID');
call create_index('gene2pubtator','PMID, GeneID');
--
call log('gene2pubtator', 'done');
-- #################################################################
#call log('disease2pubtator', 'create index');
--
#call create_index('disease2pubtator','PMID');
#call create_index('disease2pubtator','PMID,MeshID');
#call create_index('disease2pubtator','MeshID');
--
#call log('disease2pubtator', 'done');
-- #################################################################
#call log('species2pubtator', 'create index');
--
#call create_index('species2pubtator','PMID');
#call create_index('species2pubtator','PMID,TaxID');
--
#call log('species2pubtator', 'done');
-- #################################################################
#call log('chemical2pubtator', 'create index');
--
#call create_index('chemical2pubtator','PMID');
#call create_index('chemical2pubtator','PMID,MeshID');
#call create_index('chemical2pubtator','MeshID');
--
#call log('chemical2pubtator', 'done');
-- #################################################################
call log('create_index.sql','done');
|
widget.SplitView = function () {
var currentSplitView = null;
function getSplitView(target) {
var container = Dom.findParentWithProperty(target, "_splitView");
if (!container) return null;
return container._splitView;
}
function handleMouseDown(event) {
Dom.cancelEvent(event);
var target = Dom.getTarget(event);
currentSplitView = getSplitView(target);
if (!currentSplitView) return;
currentSplitView.ox = event.screenX;
currentSplitView.originalSplitViewX = currentSplitView.splitViewPos;
currentSplitView.moved = false;
Dom.addClass(currentSplitView.container, "SplitViewHeld");
}
function handleMouseMove(event) {
var target = Dom.getTarget(event);
if (!currentSplitView) return;
Dom.cancelEvent(event);
var x = event.screenX;
var p = currentSplitView.originalSplitViewX + x - currentSplitView.ox;
var W = Dom.getOffsetWidth(currentSplitView.container);
var margin = Math.round(W / 10);
p = Math.min(Math.max(p, margin), W - margin);
currentSplitView.setSplitViewPosition(p);
currentSplitView.moved = true;
}
function handleMouseUp(event) {
Dom.cancelEvent(event);
if (!currentSplitView) return;
if (!currentSplitView.moved) return;
currentSplitView.moved = false;
var r = (currentSplitView.splitViewPos / Dom.getOffsetWidth(currentSplitView.container));
currentSplitView.ratio = r;
currentSplitView.updateView();
Dom.removeClass(currentSplitView.container, "SplitViewHeld");
currentSplitView = null;
}
function SplitView(container, options) {
this.container = widget.get(container);
this.container._splitView = this;
this.options = options || {};
if (!this.options.initialRatio) this.options.initialRatio = 0.5;
this.ratio = this.options.initialRatio;
if (!this.options.initialMode) this.options.initialMode = SplitView.MODE_BOTH;
this.mode = this.options.initialMode;
for (var i = 0; i < this.container.childNodes.length; i ++) {
var node = this.container.childNodes[i];
if (!node.nodeName || node.nodeName.toLowerCase() != "div") continue;
if (node.getAttribute("role") == "splitter") {
this.splitter = node;
} else {
if (!this.splitter) {
this.left = node;
} else {
this.right = node;
}
}
}
Dom.registerEvent(this.splitter, "mousedown", handleMouseDown);
Dom.registerEvent(document, "mousemove", handleMouseMove);
Dom.registerEvent(document, "mouseup", handleMouseUp);
Dom.addClass(this.splitter, "SplitViewSplitter");
this.splitter.innerHTML = "<div></div>";
this.container.style.position = "relative";
this.splitter.style.position = "absolute";
this.splitter.style.zIndex = "3";
this.splitter.style.top = "0px";
this.splitter.style.bottom = "0px";
this.splitter.style.overflow = "hidden";
this.splitter.style.width = SplitView.HANDLE_WIDTH + "px";
this.left.style.position = "absolute";
this.left.style.left = "0px";
this.left.style.top = "0px";
this.left.style.bottom = "0px";
this.right.style.position = "absolute";
this.right.style.top = "0px";
this.right.style.bottom = "0px";
this.updateView();
}
SplitView.MODE_LEFT = "LEFT";
SplitView.MODE_RIGHT = "RIGHT";
SplitView.MODE_BOTH = "BOTH";
SplitView.HANDLE_WIDTH = 20;
SplitView.prototype.setMode = function (mode) {
this.mode = mode;
this.updateView();
};
SplitView.prototype.setRatio = function (ratio) {
this.ratio = ratio;
this.updateView();
};
SplitView.prototype.updateView = function () {
this.container.className = "SplitView";
Dom.addClass(this.container, "SplitView" + this.mode);
var w = Dom.getOffsetWidth(this.container);
if (this.mode == SplitView.MODE_LEFT) {
this.left.style.left = "0px";
this.left.style.right = "0px";
this.left.style.width = w + "px";
this.left.style.display = "block";
this.splitter.style.display = "none";
this.right.style.display = "none";
this.right.style.width = "0px";
} else if (this.mode == SplitView.MODE_RIGHT) {
this.right.style.left = "0px";
this.right.style.right = "0px";
this.right.style.width = w + "px";
this.right.style.display = "block";
this.splitter.style.display = "none";
this.left.style.display = "none";
this.left.style.width = "0px";
} else if (this.mode == SplitView.MODE_BOTH) {
var lw = Math.round(w * this.ratio);
var rw = w - lw;
this.left.style.left = "0px";
this.left.style.right = rw + "px";
this.left.style.width = lw + "px";
this.right.style.left = lw + "px";
this.right.style.right = "0px";
this.right.style.width = rw + "px";
this.setSplitViewPosition(lw);
this.left.style.display = "block";
this.splitter.style.display = "block";
this.right.style.display = "block";
if (this.listener) {
this.listener(lw, rw);
}
}
}
SplitView.prototype.setSplitViewPosition = function (pos) {
this.splitter.style.left = (pos - SplitView.HANDLE_WIDTH / 2) + "px";
this.splitViewPos = pos;
};
SplitView.prototype.setOnResizeListener = function(listener) {
this.listener = listener;
return this;
};
return SplitView;
}();
|
#!/bin/bash
#========================================================
# Filename: install.sh
#
# Description:
# Install script for the Reverseproxy module.
#
#========================================================
# add installed file and inform user
modulename="Reverseproxy"
#let user add domains
$UTILS/domain_managment.sh $modulename
mkdir -p /srv/docker-reverseproxy/conf.d
mkdir -p /srv/docker-reverseproxy/certs
$LOGGING -i "Installed module $modulename"
dialog --backtitle "$BACKTITLE" --msgbox "$modulename will be installed in next docker compose" 0 0
touch installed |
#!/bin/sh
hostname
|
<reponame>spadaal/p5-asteroids
class Asteroid {
constructor(pos, vel, r) {
this.pos = pos ? pos : createVector(random(width), random(height));
this.r = r;
this.vel = vel ? vel : createVector(random(5) - 2.5, random(5) - 2.5);
this.offsets = [];
for (var i=0;i<16;i++) this.offsets.push(random(this.r/3)-this.r/6);
}
update() {
this.pos = this.pos.add(this.vel);
if (this.pos.x+this.r < 0) this.pos.x = width;
if (this.pos.x-this.r > width) this.pos.x = 0;
if (this.pos.y+this.r < 0) this.pos.y = height;
if (this.pos.y-this.r > height) this.pos.y = 0;
}
draw() {
push();
translate(this.pos);
noFill();
if (this.r<3) {
ellipse(0, 0, this.r*2, this.r*2);
} else {
beginShape();
for (var i=0;i<this.offsets.length;i++) {
let angle = map(i,0,this.offsets.length,0,PI*2);
let x = (this.r+this.offsets[i])*cos(angle);
let y = (this.r+this.offsets[i])*sin(angle);
vertex(x,y);
}
endShape(CLOSE);
}
pop();
}
} |
from tensorflow.keras.layers import *
from tensorflow.keras.models import *
modelnames = {"3ds":"3D_SingleStream", "3dm":"3D_MultiStream", "3dmorig":"3D_MultiStreamOriginal", "3ddropout":"3D_UsingDropout" }
def readProperModel(model_name, img_size):
if model_name == "3dm":
model = getModel_3D_Multi([img_size,img_size,img_size], 'sigmoid')
if model_name == "3ddropout":
model = getModel_3D_Multi_Dropout([img_size,img_size,img_size], 'sigmoid')
return model
def getModel_3D_Multi(imgs_dims, last_layer='sigmoid'):
filterFactor = 1
[w, h, d] = imgs_dims
# [w, h, d] = [128,128,128]
# [w, h, d] = [168,168,168]
#### tra branch #####
inputs_tra = Input((w, h, d, 1))
conv1_tra = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_tra)
# conv1_tra = BatchNormalization(axis=4)(conv1_tra)
conv1_tra = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_tra)
# conv1_tra = BatchNormalization(axis=4)(conv1_tra)
pool1_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv1_tra)
conv2_tra = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_tra)
# conv2_tra = BatchNormalization(axis=4)(conv2_tra)
conv2_tra = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_tra)
# conv2_tra = BatchNormalization(axis=4)(conv2_tra)
pool2_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv2_tra)
conv3_tra = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_tra)
# conv3_tra = BatchNormalization(axis=4)(conv3_tra)
conv3_tra = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_tra)
# conv3_tra = BatchNormalization(axis=4)(conv3_tra)
pool3_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv3_tra)
###### cor branch #####
inputs_cor = Input((w, h, d, 1))
conv1_cor = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_cor)
# conv1_cor = BatchNormalization(axis=4)(conv1_cor)
conv1_cor = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_cor)
# conv1_cor = BatchNormalization(axis=4)(conv1_cor)
pool1_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv1_cor)
conv2_cor = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_cor)
# conv2_cor = BatchNormalization(axis=4)(conv2_cor)
conv2_cor = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_cor)
# conv2_cor = BatchNormalization(axis=4)(conv2_cor)
pool2_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv2_cor)
conv3_cor = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_cor)
# conv3_cor = BatchNormalization(axis=4)(conv3_cor)
conv3_cor = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_cor)
# conv3_cor = BatchNormalization(axis=4)(conv3_cor)
pool3_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv3_cor)
###### sag branch #####
inputs_sag = Input((w, h, d, 1))
conv1_sag = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_sag)
# conv1_sag = BatchNormalization(axis=4)(conv1_sag)
conv1_sag = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_sag)
# conv1_sag = BatchNormalization(axis=4)(conv1_sag)
pool1_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv1_sag)
conv2_sag = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_sag)
# conv2_sag = BatchNormalization(axis=4)(conv2_sag)
conv2_sag = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_sag)
# conv2_sag = BatchNormalization(axis=4)(conv2_sag)
pool2_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv2_sag)
conv3_sag = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_sag)
# conv3_sag = BatchNormalization(axis=4)(conv3_sag)
conv3_sag = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_sag)
# conv3_sag = BatchNormalization(axis=4)(conv3_sag)
pool3_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv3_sag)
merge = concatenate([pool3_tra, pool3_cor, pool3_sag])
# conv4 = Conv3D(192*filterFactor, (3, 3, 3), activation='relu', padding='same')(merge)
conv4 = Conv3D(128*filterFactor, (3, 3, 3), activation='relu', padding='same')(merge)
conv5 = BatchNormalization(axis=4)(conv4)
conv6 = Conv3D(128*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv5)
conv7 = BatchNormalization(axis=4)(conv6)
up6 = Conv3DTranspose(128,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv7)
up6 = concatenate([up6, conv3_tra, conv3_cor, conv3_sag])
conv8 = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(up6)
conv9 = BatchNormalization(axis=4)(conv8)
conv10 = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv9)
conv11 = BatchNormalization(axis=4)(conv10)
up7 = Conv3DTranspose(64,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv11)
up7 = concatenate([up7, conv2_tra, conv2_cor, conv2_sag])
conv12 = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(up7)
conv13 = BatchNormalization(axis=4)(conv12)
conv14 = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv13)
conv15 = BatchNormalization(axis=4)(conv14)
up8 = Conv3DTranspose(32,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv15)
up8 = concatenate([up8, conv1_tra, conv1_cor, conv1_sag])
conv16 = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(up8)
conv17 = BatchNormalization(axis=4)(conv16)
conv18 = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv17)
conv19 = BatchNormalization(axis=4)(conv18)
conv20 = Conv3D(1, (1, 1, 1), activation=last_layer)(conv19)
model = Model(inputs=[inputs_tra, inputs_sag, inputs_cor], outputs=[conv20])
return model
def getModel_3D_Multi_Dropout(imgs_dims, last_layer='sigmoid'):
filterFactor = 1
[w, h, d] = imgs_dims
# [w, h, d] = [128,128,128]
# [w, h, d] = [168,168,168]
#### tra branch #####
inputs_tra = Input((w, h, d, 1))
conv1_tra = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_tra)
# conv1_tra = BatchNormalization(axis=4)(conv1_tra)
conv1_tra = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_tra)
# conv1_tra = BatchNormalization(axis=4)(conv1_tra)
pool1_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv1_tra)
conv2_tra = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_tra)
# conv2_tra = BatchNormalization(axis=4)(conv2_tra)
conv2_tra = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_tra)
# conv2_tra = BatchNormalization(axis=4)(conv2_tra)
pool2_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv2_tra)
conv3_tra = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_tra)
# conv3_tra = BatchNormalization(axis=4)(conv3_tra)
conv3_tra = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_tra)
# conv3_tra = BatchNormalization(axis=4)(conv3_tra)
pool3_tra = MaxPooling3D(pool_size=(2, 2, 2))(conv3_tra)
###### cor branch #####
inputs_cor = Input((w, h, d, 1))
conv1_cor = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_cor)
# conv1_cor = BatchNormalization(axis=4)(conv1_cor)
conv1_cor = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_cor)
# conv1_cor = BatchNormalization(axis=4)(conv1_cor)
pool1_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv1_cor)
conv2_cor = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_cor)
# conv2_cor = BatchNormalization(axis=4)(conv2_cor)
conv2_cor = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_cor)
# conv2_cor = BatchNormalization(axis=4)(conv2_cor)
pool2_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv2_cor)
conv3_cor = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_cor)
# conv3_cor = BatchNormalization(axis=4)(conv3_cor)
conv3_cor = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_cor)
# conv3_cor = BatchNormalization(axis=4)(conv3_cor)
pool3_cor = MaxPooling3D(pool_size=(2, 2, 2))(conv3_cor)
###### sag branch #####
inputs_sag = Input((w, h, d, 1))
conv1_sag = Conv3D(8*filterFactor, (3, 3, 3), activation='relu', padding='same')(inputs_sag)
# conv1_sag = BatchNormalization(axis=4)(conv1_sag)
conv1_sag = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv1_sag)
# conv1_sag = BatchNormalization(axis=4)(conv1_sag)
pool1_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv1_sag)
conv2_sag = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool1_sag)
# conv2_sag = BatchNormalization(axis=4)(conv2_sag)
conv2_sag = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv2_sag)
# conv2_sag = BatchNormalization(axis=4)(conv2_sag)
pool2_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv2_sag)
conv3_sag = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(pool2_sag)
# conv3_sag = BatchNormalization(axis=4)(conv3_sag)
conv3_sag = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv3_sag)
# conv3_sag = BatchNormalization(axis=4)(conv3_sag)
pool3_sag = MaxPooling3D(pool_size=(2, 2, 2))(conv3_sag)
merge = concatenate([pool3_tra, pool3_cor, pool3_sag])
# conv4 = Conv3D(192*filterFactor, (3, 3, 3), activation='relu', padding='same')(merge)
conv4 = Conv3D(128*filterFactor, (3, 3, 3), activation='relu', padding='same')(merge)
conv5 = BatchNormalization(axis=4)(conv4)
conv5 = Dropout(rate=0.2)(conv5)
conv6 = Conv3D(128*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv5)
conv7 = BatchNormalization(axis=4)(conv6)
conv7 = Dropout(rate=0.2)(conv7)
up6 = Conv3DTranspose(128,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv7)
up6 = concatenate([up6, conv3_tra, conv3_cor, conv3_sag])
conv8 = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(up6)
conv9 = BatchNormalization(axis=4)(conv8)
conv9 = Dropout(rate=0.2)(conv9)
conv10 = Conv3D(64*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv9)
conv11 = BatchNormalization(axis=4)(conv10)
conv11 = Dropout(rate=0.2)(conv11)
up7 = Conv3DTranspose(64,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv11)
up7 = concatenate([up7, conv2_tra, conv2_cor, conv2_sag])
conv12 = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(up7)
conv13 = BatchNormalization(axis=4)(conv12)
conv13 = Dropout(rate=0.2)(conv13)
conv14 = Conv3D(32*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv13)
conv15 = BatchNormalization(axis=4)(conv14)
conv15 = Dropout(rate=0.2)(conv15)
up8 = Conv3DTranspose(32,(2,2,2), strides = (2,2,2), activation = 'relu', padding = 'same' )(conv15)
up8 = concatenate([up8, conv1_tra, conv1_cor, conv1_sag])
conv16 = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(up8)
conv17 = BatchNormalization(axis=4)(conv16)
conv17 = Dropout(rate=0.2)(conv17)
conv18 = Conv3D(16*filterFactor, (3, 3, 3), activation='relu', padding='same')(conv17)
conv19 = BatchNormalization(axis=4)(conv18)
conv19 = Dropout(rate=0.2)(conv19)
conv20 = Conv3D(1, (1, 1, 1), activation=last_layer)(conv19)
model = Model(inputs=[inputs_tra, inputs_sag, inputs_cor], outputs=[conv20])
return model
|
# Flag indicating if we've previously jumped to last directory
typeset -g ZSH_LAST_WORKING_DIRECTORY
# Updates the last directory once directory is changed
chpwd_functions+=(chpwd_last_working_dir)
chpwd_last_working_dir() {
local cache_file="$ZSH_CACHE_DIR/last-working-dir"
pwd >| "$cache_file"
}
# Changes directory to the last working directory
lwd() {
local cache_file="$ZSH_CACHE_DIR/last-working-dir"
[[ -r "$cache_file" ]] && cd "$(cat "$cache_file")"
}
# Jump to last directory automatically unless:
# - this isn't the first time the plugin is loaded
# - it's not in $HOME directory
[[ -n "$ZSH_LAST_WORKING_DIRECTORY" ]] && return
[[ "$PWD" != "$HOME" ]] && return
lwd 2>/dev/null && ZSH_LAST_WORKING_DIRECTORY=1 || true
|
#!/bin/bash
snap install bitwarden |
#!/bin/bash
#PBS -l nodes=4:ppn=16
#PBS -l walltime=40:00:00
module load gcc/5.3.1
module load openmpi/1.10.1
module load netcdf/4.4.1
#rm log-fix-size-prog.txt || true
#rm log-fix-size-time.txt || true
cd /storage/home/w/wuh20/github/LinearSystemSolvers/scripts/final/profile3
{ time mpirun -np 1 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 2 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 4 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 8 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 16 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 20 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 24 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 28 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 32 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 38 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 44 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 50 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 55 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 64 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 1/log-fix-size-prog.txt; } 2>> 1/log-fix-size-time.txt
{ time mpirun -np 1 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 2 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 4 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 8 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 16 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 20 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 24 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 28 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 32 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 38 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 44 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 50 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 55 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 64 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 2/log-fix-size-prog.txt; } 2>> 2/log-fix-size-time.txt
{ time mpirun -np 1 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 2 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 4 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 8 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 16 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 20 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 24 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 28 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 32 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 38 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 44 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 50 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 55 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
{ time mpirun -np 64 ../../../output/bin/parallelJacobi2 ../../../data/ncdf4/3200.nc 9000000 3 1 >> 3/log-fix-size-prog.txt; } 2>> 3/log-fix-size-time.txt
|
#!/bin/bash
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
scripts_dir=$(dirname "${BASH_SOURCE[0]}")/..
"${PYTHON-python3}" -m pylint --py3k "$scripts_dir/geopmpy"
|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.benchmark;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import org.apache.hc.core5.http.HttpVersion;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Test;
public class ResultFormatterTest {
@Test
public void testBasics() throws Exception {
final Results results = new Results(
"TestServer/1.1",
HttpVersion.HTTP_1_1,
"localhost",
8080,
"/index.html",
2924,
5,
3399,
20000,
0,
20000,
62640000,
0,
50000000);
final ByteArrayOutputStream buf = new ByteArrayOutputStream();
ResultFormatter.print(new PrintStream(buf, true, StandardCharsets.US_ASCII.name()), results);
Assert.assertThat(new String(buf.toByteArray(), StandardCharsets.US_ASCII).replace("\r\n", "\n"),
CoreMatchers.equalTo(
"Server Software:\t\tTestServer/1.1\n" +
"Protocol version:\t\tHTTP/1.1\n" +
"Server Hostname:\t\tlocalhost\n" +
"Server Port:\t\t\t8080\n" +
"Document Path:\t\t\t/index.html\n" +
"Document Length:\t\t2924 bytes\n" +
"\n" +
"Concurrency Level:\t\t5\n" +
"Time taken for tests:\t3.399000 seconds\n" +
"Complete requests:\t\t20000\n" +
"Failed requests:\t\t0\n" +
"Kept alive:\t\t\t\t20000\n" +
"Total transferred:\t\t62640000 bytes\n" +
"Content transferred:\t50000000 bytes\n" +
"Requests per second:\t5,884.08 [#/sec] (mean)\n" +
"Time per request:\t\t0.850 [ms] (mean)\n" +
"Time per request:\t\t0.170 [ms] (mean, across all concurrent requests)\n" +
"Transfer rate:\t\t\t17,997.02 [Kbytes/sec] received\n"
));
}
}
|
#!/bin/bash
source $HOME/.deploy/.env
source $HOME/.deploy/.formatting
source $HOME/.deploy/functions.sh
setMysqlParams
prod=""
clear
header
maestroInstall
menuProd |
#!/bin/bash
set -euo pipefail
pip install setuptools_scm
# The environment variable AEDGE_INSTALLER_VERSION needs to be defined.
# If the env variable NOTARIZE and the username and password variables are
# set, this will attempt to Notarize the signed DMG.
AEDGE_INSTALLER_VERSION=$(python installer-version.py)
if [ ! "$AEDGE_INSTALLER_VERSION" ]; then
echo "WARNING: No environment variable AEDGE_INSTALLER_VERSION set. Using 0.0.0."
AEDGE_INSTALLER_VERSION="0.0.0"
fi
echo "Aedge Installer Version is: $AEDGE_INSTALLER_VERSION"
echo "Installing npm and electron packagers"
npm install electron-installer-dmg -g
# Pinning electron-packager and electron-osx-sign to known working versions
# Current packager uses an old version of osx-sign, so if we install the newer sign package
# things break
npm install electron-packager@15.4.0 -g
npm install electron-osx-sign@v0.5.0 -g
npm install notarize-cli -g
echo "Create dist/"
sudo rm -rf dist
mkdir dist
echo "Install pyinstaller and build bootloaders for M1"
pip install pyinstaller==4.5
echo "Create executables with pyinstaller"
SPEC_FILE=$(python -c 'import aedge; print(aedge.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "pyinstaller failed!"
exit $LAST_EXIT_CODE
fi
cp -r dist/daemon ../aedge-blockchain-gui
cd .. || exit
cd aedge-blockchain-gui || exit
echo "npm build"
npm install
npm audit fix
npm run build
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "npm run build failed!"
exit $LAST_EXIT_CODE
fi
# sets the version for aedge-blockchain in package.json
brew install jq
cp package.json package.json.orig
jq --arg VER "$AEDGE_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json
electron-packager . Aedge --asar.unpack="**/daemon/**" --platform=darwin \
--icon=src/assets/img/Aedge.icns --overwrite --app-bundle-id=net.aedge.blockchain \
--appVersion=$AEDGE_INSTALLER_VERSION
LAST_EXIT_CODE=$?
# reset the package.json to the original
mv package.json.orig package.json
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-packager failed!"
exit $LAST_EXIT_CODE
fi
if [ "$NOTARIZE" ]; then
electron-osx-sign Aedge-darwin-arm64/Aedge.app --platform=darwin \
--hardened-runtime=true --provisioning-profile=aedgeblockchain.provisionprofile \
--entitlements=entitlements.mac.plist --entitlements-inherit=entitlements.mac.plist \
--no-gatekeeper-assess
fi
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-osx-sign failed!"
exit $LAST_EXIT_CODE
fi
mv Aedge-darwin-arm64 ../build_scripts/dist/
cd ../build_scripts || exit
DMG_NAME="Aedge-$AEDGE_INSTALLER_VERSION-arm64.dmg"
echo "Create $DMG_NAME"
mkdir final_installer
electron-installer-dmg dist/Aedge-darwin-arm64/Aedge.app Aedge-$AEDGE_INSTALLER_VERSION-arm64 \
--overwrite --out final_installer
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-installer-dmg failed!"
exit $LAST_EXIT_CODE
fi
ls -lh final_installer
if [ "$NOTARIZE" ]; then
echo "Notarize $DMG_NAME on ci"
cd final_installer || exit
notarize-cli --file=$DMG_NAME --bundle-id net.aedge.blockchain \
--username "$APPLE_NOTARIZE_USERNAME" --password "$APPLE_NOTARIZE_PASSWORD"
echo "Notarization step complete"
else
echo "Not on ci or no secrets so skipping Notarize"
fi
# Notes on how to manually notarize
#
# Ask for username and password. password should be an app specific password.
# Generate app specific password https://support.apple.com/en-us/HT204397
# xcrun altool --notarize-app -f Aedge-0.1.X.dmg --primary-bundle-id net.aedge.blockchain -u username -p password
# xcrun altool --notarize-app; -should return REQUEST-ID, use it in next command
#
# Wait until following command return a success message".
# watch -n 20 'xcrun altool --notarization-info {REQUEST-ID} -u username -p password'.
# It can take a while, run it every few minutes.
#
# Once that is successful, execute the following command":
# xcrun stapler staple Aedge-0.1.X.dmg
#
# Validate DMG:
# xcrun stapler validate Aedge-0.1.X.dmg
|
# This hook applies patches from "patches" directory.
_process_patch() {
local _args= _patch= i=$1
_args="-Np0"
_patch=${i##*/}
if [ -f $PATCHESDIR/${_patch}.args ]; then
_args=$(cat $PATCHESDIR/${_patch}.args)
elif [ -n "$patch_args" ]; then
_args=$patch_args
fi
cp -f $i "$wrksrc"
# Try to guess if its a compressed patch.
if [[ $f =~ .gz$ ]]; then
gunzip "$wrksrc/${_patch}"
_patch=${_patch%%.gz}
elif [[ $f =~ .bz2$ ]]; then
bunzip2 "$wrksrc/${_patch}"
_patch=${_patch%%.bz2}
elif [[ $f =~ .diff$ ]]; then
:
elif [[ $f =~ .patch$ ]]; then
:
else
msg_warn "$pkgver: unknown patch type: $i.\n"
return 0
fi
cd "$wrksrc"
msg_normal "$pkgver: patching: ${_patch}.\n"
patch -sl ${_args} -i ${_patch} 2>/dev/null
}
hook() {
if [ ! -d "$wrksrc" ]; then
return 0
fi
if [ -r $PATCHESDIR/series ]; then
cat $PATCHESDIR/series | while read f; do
_process_patch "$PATCHESDIR/$f"
done
else
for f in $PATCHESDIR/*; do
[ ! -f $f ] && continue
if $(echo $f|grep -Eq '^.*.args$'); then
continue
fi
_process_patch $f
done
fi
}
|
// 1. Creating Arrays
let firstArray = [ "a", "b", "c" ];
let secondArray = [ "d", "e", "f" ];
// 2. Access an Array Item
console.log( firstArray[ 0 ] ); // Results: "a"
// 3. Loop over an Array
firstArray.forEach( (item, index, array) => {
console.log( item, index );
} );
// Results:
// a 0
// b 1
// c 2
// 4. Add new item to END of array
secondArray.push( 'g' );
console.log( secondArray );
// Results: ["d","e","f", "g"]
// 5. Remove item from END of array
secondArray.pop();
console.log( secondArray );
// Results: ["d","e","f"]
// 6. Remove item from FRONT of array
secondArray.shift();
console.log( secondArray );
// Results: ["e","f"]
// 7. Add item to FRONT of array
secondArray.unshift( "d" );
console.log( secondArray );
// Results: ["d","e","f"]
// 8. Find INDEX of an item in array
let position = secondArray.indexOf( 'f' );
// Results: 2
// 9. Remove Item by Index Position
secondArray.splice( position, 1 );
console.log( secondArray );
// Note, the second argument, in this case "1",
// represent the number of array elements to be removed
// Results: ["d","e"]
// 10. Copy an Array
let shallowCopy = secondArray.slice();
console.log( secondArray );
console.log( shallowCopy );
// Results: ShallowCopy === ["d","e"]
// 11. JavaScript properties that BEGIN with a digit MUST be accessed using bracket notation
renderer ['.3d'].setTexture( model, 'character.png' ); // a syntax error
renderer[ '3d' ].setTexture( model, 'character.png' ); // works properly
// 12. Combine two Arrays
let thirdArray = firstArray.concat( secondArray );
console.log( thirdArray );
// ["a","b","c", "d", "e"];
// 13. Combine all Array elements into a string
console.log( thirdArray.join() ); // Results: a,b,c,d,e
console.log( thirdArray.join( '' ) ); // Results: abcde
console.log( thirdArray.join( '-' ) ); // Results: a-b-c-d-e
// 14. Reversing an Array (in place, i.e. destructive)
console.log( thirdArray.reverse() ); // ["e", "d", "c", "b", "a"]
// 15. sort
let unsortedArray = [ "Alphabet", "Zoo", "Products", "Computer Science", "Computer" ];
console.log( unsortedArray.sort() );
// Results: ["Alphabet", "Computer", "Computer Science", "Products", "Zoo" ]
|
<gh_stars>0
"""
speakstatus.py - 2016 <NAME>
This is a script designed to speak "status" info such as the
time/public transport/appointments/etc etc. It is intended to be
periodically run from cron or similarly triggered such as at login and
uses espeak in a subprocess to do the actual speaking.
CONFIGURATION:
There are a set of "Sources" which generate output
to be spoken. These are put in a global called SOURCES and the
script says their output sleeping a bit between each.
TODO:
- command line config
- config file config
- more sources
- PTV API
- Google calendar
- Proper rastatodo integration
- more natural time phrasing
"""
from time import sleep, strftime
from subprocess import check_output, check_call
DEFAULT_SLEEP_TIME=1
class BaseSource(object):
"""
Source ABC, takes an optional "prefix" argument to say a
fixed string before the main content.
"""
def __init__(self, prefix=None):
self.prefix = prefix
def speech(self):
raise NotImplementedError
class TimeSource(BaseSource):
"""
Says the hour and minute rather mechanically
"""
def speech(self):
return strftime('%H %M')
class SubProcSource(BaseSource):
"""
Wraps and returns output of a subprocess
"""
def __init__(self, command_arg_list, prefix=None):
self.prefix = prefix
self.command_arg_list = command_arg_list
def get_stdout(self):
return check_output(self.command_arg_list)
# NOTE: May throw CPE or OSErrors
def speech(self):
return self.get_stdout()
class CountSubProcSource(SubProcSource):
"""
Returns the number of lines output by the subprocess
"""
def speech(self):
output = self.get_stdout()
return str(output.count('\n'))
# EXAMPLE OUTPUT CONFIG ===================================
SOURCES = [
TimeSource(prefix="Time:"),
# Says how many items due today in rastodo
CountSubProcSource(
['rastodo', '--days=0', '--ex-types=w'],
prefix="To Do Today:"
)
]
# ESPEAK CLASS ============================================
class ESpeak(object):
"""
Says strings through a subprocess running espeak
"""
def __init___(self):
pass # TODO: don't be lazy and keep spawning new ones, use -stdin arg
def say(self, string):
return check_output(['espeak', string])
# MAIN ====================================================
if __name__ == "__main__":
espeak = ESpeak()
for src in SOURCES:
if src.prefix:
espeak.say(src.prefix)
espeak.say(src.speech())
sleep(DEFAULT_SLEEP_TIME)
|
module.exports = {
extends: ['@bigcommerce/eslint-config'],
};
|
<filename>misc/impl/service/im_member_service.go
/*
* Copyright © 2019 <NAME>.
*/
package service
import (
"context"
"github.com/hedzr/voxr-api/api/v10"
"github.com/hedzr/voxr-lite/internal/exception"
)
type ImMemberService struct {
}
func (s *ImMemberService) AddMember(ctx context.Context, req *v10.AddMemberReq) (res *v10.AddMemberReply, err error) {
res = &v10.AddMemberReply{ProtoOp: v10.Op_TopicsAllAck, Seq: req.Seq, ErrorCode: v10.Err_INVALID_PARAMS}
if req == nil || (req.ProtoOp != v10.Op_TopicsAll && req.ProtoOp != v10.Op_OrgsAll) {
err = exception.New(exception.InvalidParams)
return
}
return
}
func (s *ImMemberService) RemoveMember(ctx context.Context, req *v10.RemoveMemberReq) (res *v10.RemoveMemberReply, err error) {
res = &v10.RemoveMemberReply{ProtoOp: v10.Op_TopicsAllAck, Seq: req.Seq, ErrorCode: v10.Err_INVALID_PARAMS}
if req == nil || (req.ProtoOp != v10.Op_TopicsAll && req.ProtoOp != v10.Op_OrgsAll) {
err = exception.New(exception.InvalidParams)
return
}
return
}
func (s *ImMemberService) InviteMember(ctx context.Context, req *v10.InviteMemberReq) (res *v10.InviteMemberReply, err error) {
res = &v10.InviteMemberReply{ProtoOp: v10.Op_TopicsAllAck, Seq: req.Seq, ErrorCode: v10.Err_INVALID_PARAMS}
if req == nil || (req.ProtoOp != v10.Op_TopicsAll && req.ProtoOp != v10.Op_OrgsAll) {
err = exception.New(exception.InvalidParams)
return
}
return
}
func (s *ImMemberService) UpdateMember(ctx context.Context, req *v10.UpdateMemberReq) (res *v10.UpdateMemberReply, err error) {
res = &v10.UpdateMemberReply{ProtoOp: v10.Op_TopicsAllAck, Seq: req.Seq, ErrorCode: v10.Err_INVALID_PARAMS}
if req == nil || (req.ProtoOp != v10.Op_TopicsAll && req.ProtoOp != v10.Op_OrgsAll) {
err = exception.New(exception.InvalidParams)
return
}
return
}
func (s *ImMemberService) ListMembers(ctx context.Context, req *v10.ListMembersReq) (res *v10.ListMembersReply, err error) {
res = &v10.ListMembersReply{ProtoOp: v10.Op_TopicsAllAck, Seq: req.Seq, ErrorCode: v10.Err_INVALID_PARAMS}
if req == nil || (req.ProtoOp != v10.Op_TopicsAll && req.ProtoOp != v10.Op_OrgsAll) {
err = exception.New(exception.InvalidParams)
return
}
return
}
|
# (User's) Problem
# We Have:
# int
# We Need:
# from 1 to int as string
# We Must:
# function name (unclear)
# str output
#
# Solution (Product)
# use recusive function to get n factorial
def recursive_factorial(n):
n = int(n)
# base case
if n == 1:
return print(n, end="")
else: # until you reach base case
recursive_factorial(n - 1)
return print(n, end="")
# what form is this? (crazy hacker rank...)
if __name__ == "__main__":
n = int(input())
recursive_factorial(n)
|
echo "******************************************************************************"
echo "Check if this is the first run." `date`
echo "******************************************************************************"
FIRST_RUN="false"
if [ ! -f ~/CONTAINER_ALREADY_STARTED_FLAG ]; then
echo "First run."
FIRST_RUN="true"
touch ~/CONTAINER_ALREADY_STARTED_FLAG
else
echo "Not first run."
fi
echo "******************************************************************************"
echo "Handle shutdowns." `date`
echo "docker stop --time=30 {container}" `date`
echo "******************************************************************************"
function gracefulshutdown {
${CATALINA_HOME}/bin/shutdown.sh
}
trap gracefulshutdown SIGINT
trap gracefulshutdown SIGTERM
trap gracefulshutdown SIGKILL
echo "******************************************************************************"
echo "Check DB is available." `date`
echo "******************************************************************************"
export PATH=${PATH}:${JAVA_HOME}/bin
function check_db {
CONNECTION=$1
RETVAL=`/u01/sqlcl/bin/sql -silent ${CONNECTION} <<EOF
SET PAGESIZE 0 FEEDBACK OFF VERIFY OFF HEADING OFF ECHO OFF TAB OFF
SELECT 'Alive' FROM dual;
EXIT;
EOF`
RETVAL="${RETVAL//[$'\t\r\n']}"
if [ "${RETVAL}" = "Alive" ]; then
DB_OK=0
else
DB_OK=1
fi
}
CONNECTION="APEX_PUBLIC_USER/${APEX_PUBLIC_USER_PASSWORD}@//${DB_HOSTNAME}:${DB_PORT}/${DB_SERVICE}"
check_db ${CONNECTION}
while [ ${DB_OK} -eq 1 ]
do
echo "DB not available yet. Waiting for 30 seconds."
sleep 30
check_db ${CONNECTION}
done
if [ ! -d ${CATALINA_BASE}/conf ]; then
echo "******************************************************************************"
echo "New CATALINA_BASE location." `date`
echo "******************************************************************************"
cp -r ${CATALINA_HOME}/conf ${CATALINA_BASE}
cp -r ${CATALINA_HOME}/logs ${CATALINA_BASE}
cp -r ${CATALINA_HOME}/temp ${CATALINA_BASE}
cp -r ${CATALINA_HOME}/webapps ${CATALINA_BASE}
cp -r ${CATALINA_HOME}/work ${CATALINA_BASE}
fi
if [ ! -d ${CATALINA_BASE}/webapps/i ]; then
echo "******************************************************************************"
echo "First time APEX images." `date`
echo "******************************************************************************"
mkdir -p ${CATALINA_BASE}/webapps/i/
cp -R ${SOFTWARE_DIR}/images/* ${CATALINA_BASE}/webapps/i/
APEX_IMAGES_REFRESH="false"
fi
if [ "${APEX_IMAGES_REFRESH}" == "true" ]; then
echo "******************************************************************************"
echo "Overwrite APEX images." `date`
echo "******************************************************************************"
cp -R ${SOFTWARE_DIR}/images/* ${CATALINA_BASE}/webapps/i/
fi
if [ "${FIRST_RUN}" == "true" ]; then
echo "******************************************************************************"
echo "Prepare the ORDS parameter file." `date`
echo "******************************************************************************"
cat > ${ORDS_HOME}/params/ords_params.properties <<EOF
db.hostname=${DB_HOSTNAME}
db.port=${DB_PORT}
db.servicename=${DB_SERVICE}
#db.sid=
db.username=APEX_PUBLIC_USER
db.password=${APEX_PUBLIC_USER_PASSWORD}
migrate.apex.rest=false
plsql.gateway.add=true
rest.services.apex.add=true
rest.services.ords.add=true
schema.tablespace.default=${APEX_TABLESPACE}
schema.tablespace.temp=${TEMP_TABLESPACE}
standalone.mode=false
#standalone.use.https=true
#standalone.http.port=8080
#standalone.static.images=/home/oracle/apex/images
user.apex.listener.password=${APEX_LISTENER_PASSWORD}
user.apex.restpublic.password=${APEX_REST_PASSWORD}
user.public.password=${PUBLIC_PASSWORD}
user.tablespace.default=${APEX_TABLESPACE}
user.tablespace.temp=${TEMP_TABLESPACE}
sys.user=SYS
sys.password=${SYS_PASSWORD}
restEnabledSql.active=true
feature.sdw=true
database.api.enabled=true
EOF
echo "******************************************************************************"
echo "Configure ORDS." `date`
echo "******************************************************************************"
cd ${ORDS_HOME}
$JAVA_HOME/bin/java -jar ords.war configdir ${ORDS_CONF}
$JAVA_HOME/bin/java -jar ords.war
echo "******************************************************************************"
echo "Install ORDS. Safe to run on DB with existing config." `date`
echo "******************************************************************************"
cp ords.war ${CATALINA_BASE}/webapps/
fi
if [ ! -f ${KEYSTORE_DIR}/keystore.jks ]; then
echo "******************************************************************************"
echo "Configure HTTPS." `date`
echo "******************************************************************************"
mkdir -p ${KEYSTORE_DIR}
cd ${KEYSTORE_DIR}
${JAVA_HOME}/bin/keytool -genkey -keyalg RSA -alias selfsigned -keystore keystore.jks \
-dname "CN=${HOSTNAME}, OU=My Department, O=My Company, L=Birmingham, ST=West Midlands, C=GB" \
-storepass ${KEYSTORE_PASSWORD} -validity 3600 -keysize 2048 -keypass ${KEYSTORE_PASSWORD}
sed -i -e "s|###KEYSTORE_DIR###|${KEYSTORE_DIR}|g" ${SCRIPTS_DIR}/server.xml
sed -i -e "s|###KEYSTORE_PASSWORD###|${KEYSTORE_PASSWORD}|g" ${SCRIPTS_DIR}/server.xml
sed -i -e "s|###AJP_SECRET###|${AJP_SECRET}|g" ${SCRIPTS_DIR}/server.xml
sed -i -e "s|###AJP_ADDRESS###|${AJP_ADDRESS}|g" ${SCRIPTS_DIR}/server.xml
sed -i -e "s|###PROXY_IPS###|${PROXY_IPS}|g" ${SCRIPTS_DIR}/server.xml
cp ${SCRIPTS_DIR}/server.xml ${CATALINA_BASE}/conf
cp ${SCRIPTS_DIR}/web.xml ${CATALINA_BASE}/conf
fi;
echo "******************************************************************************"
echo "Start Tomcat." `date`
echo "******************************************************************************"
${CATALINA_HOME}/bin/startup.sh
echo "******************************************************************************"
echo "Tail the catalina.out file as a background process" `date`
echo "and wait on the process so script never ends." `date`
echo "******************************************************************************"
tail -f ${CATALINA_BASE}/logs/catalina.out &
bgPID=$!
wait $bgPID
|
"""
Creating a Database in MySQL to Store Information About Employees
"""
CREATE DATABASE employee_db;
USE employee_db;
CREATE TABLE employees (
employee_id INT AUTO_INCREMENT PRIMARY KEY,
first_name VARCHAR(50),
last_name VARCHAR(50),
job_title VARCHAR(50),
date_of_birth DATE,
date_of_hire DATE
);
CREATE TABLE salary_history (
salary_history_id INT AUTO_INCREMENT PRIMARY KEY,
employee_id INT,
salary DECIMAL(10,2) NOT NULL,
start_date DATE,
end_date DATE,
FOREIGN KEY (employee_id) REFERENCES employees(employee_id)
); |
# ocr.py
from PIL import Image, ImageEnhance
def process_image(input_image_path, output_image_path):
# Open the input image
input_image = Image.open(input_image_path)
# Enhance the image for improved contrast and sharpness
enhanced_image = ImageEnhance.Contrast(input_image).enhance(1.5)
enhanced_image = ImageEnhance.Sharpness(enhanced_image).enhance(2.0)
# Save the processed image to the output file path
enhanced_image.save(output_image_path)
# Example usage
input_path = "input_image.jpg"
output_path = "processed_image.jpg"
process_image(input_path, output_path) |
/**
* PAGE USER AGENT
*/
export const DEFAULT_DESKTOP_USER_AGENT =
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:22.0) Gecko/20100101 Firefox/22.0';
export const DEFAULT_MOBILE_USER_AGENT =
'Mozilla/5.0 (Linux; U; Android 2.2; en-us; DROID2 GLOBAL Build/S273) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1';
/**
* Page VIEWPORT size
*/
export const DEFAULT_DESKTOP_PAGE_VIEWPORT_WIDTH = 1366;
export const DEFAULT_DESKTOP_PAGE_VIEWPORT_HEIGHT = 768;
export const DEFAULT_MOBILE_PAGE_VIEWPORT_WIDTH = 1080;
export const DEFAULT_MOBILE_PAGE_VIEWPORT_HEIGHT = 1920;
|
#!/bin/bash
: ${HADOOP_PREFIX:=/usr/local/hadoop}
$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
rm /tmp/*.pid
# installing libraries if any - (resource urls added comma separated to the ACP system variable)
cd $HADOOP_PREFIX/share/hadoop/common ; for cp in ${ACP//,/ }; do echo == $cp; curl -LO $cp ; done; cd -
# altering the core-site configuration
sed s/HOSTNAME/$HOSTNAME/ /usr/local/hadoop/etc/hadoop/core-site.xml.template > /usr/local/hadoop/etc/hadoop/core-site.xml
export YARN_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
export PATH=$PATH:$SPARK_HOME/bin:$HADOOP_PREFIX/bin
# setting spark defaults
echo spark.yarn.jar hdfs:///spark/spark-assembly-1.6.1-hadoop2.6.0.jar > $SPARK_HOME/conf/spark-defaults.conf
cp $SPARK_HOME/conf/metrics.properties.template $SPARK_HOME/conf/metrics.properties
service ssh start
$HADOOP_PREFIX/sbin/start-dfs.sh
$HADOOP_PREFIX/sbin/start-yarn.sh
CMD=${1:-"exit 0"}
if [[ "$CMD" == "-d" ]];
then
service sshd stop
/usr/sbin/sshd -D -d
else
/bin/bash -c "$*"
fi
supervisord -n
|
<reponame>creative-gestalt/niklib-backend
import * as path from 'path';
import { v4 as uuidv4 } from 'uuid';
export const readerFileFilter = (req, file, callback) => {
if (!file.originalname.match(/\.(pdf|epub|png|jpg)$/)) {
return callback(
new Error('Only images and readable files are allowed!'),
false
);
}
callback(null, true);
};
export const editFileName = (req, file, callback) => {
const name = file.originalname.split('.')[0];
const fileExtName = path.extname(file.originalname);
const randomName = uuidv4();
callback(null, `${name}-${randomName}${fileExtName}`);
};
|
#!/bin/sh
# https://github.com/kubernetes/kubernetes/issues/39557
# "Failed to setup network for pod \ using network plugins \"cni\": no IP addresses available in network: podnet; Skipping pod"
kubeadm reset -f
systemctl stop kubelet
systemctl stop docker
rm -rf /var/lib/cni/
rm -rf /var/lib/kubelet/*
rm -rf /etc/cni/
ifconfig cni0 down
ifconfig flannel.1 down
ifconfig docker0 down
ip link delete cni0
ip link delete flannel.1
systemctl start docker
|
// Input: 1->2->4, 1->3->4
// Output: 1->1->2->3->4->4
function solution(l1, l2) {
let head = new ListNode(1);
let dummy = head;
while (l1 !== null && l2 !== null) {
if (l1.val <= l2.val) {
dummy.next = l1;
l1 = l1.next;
} else {
dummy.next = l2;
l2 = l2.next;
}
dummy = dummy.next;
}
dummy.next = l1 ? l1 : l2;
return head.next;
}
|
def get_first_element(result):
if result:
return result[0]
return '' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.