text stringlengths 1 1.05M |
|---|
<gh_stars>1-10
import PropTypes from "prop-types"
import React, { Component } from "react"
import {window} from 'browser-monads'
import logo from "../images/logo.svg"
class Header extends Component {
constructor(props) {
super(props)
this.state = {
prevScrollpos: window.pageYOffset,
scrolling: false
}
}
handleScroll = () => {
const currentScrollPos = window.pageYOffset
const visible = this.state.prevScrollpos > currentScrollPos
this.setState({
prevScrollpos: currentScrollPos,
scrolling: visible,
})
}
componentDidMount() {
window.addEventListener("scroll", this.handleScroll)
console.log("mount")
}
componentWillUnmount() {
window.removeEventListener("scroll", this.handleScroll)
console.log("unmount")
}
render() {
const change1 = this.props.change1
const toggleNav = this.props.toggleNav
return (
<header id="page-header" className={`nav-container sticky-top page-header ${!this.state.scrolling || this.state.prevScrollpos === 0 ? "" : "nav-down"} ${change1 ? "no-box" : ""}`}>
<nav className="navbar navbar-expand-md top-nav">
<a className="navbar-brand" href="#home">
<img src={logo} alt="logo"></img>
</a>
<button className={`menu-button navbar-toggler ${change1 ? "change1" : ""}`} onClick={toggleNav}>
<div className="menu-top"></div>
<div className="menu-middle"></div>
<div className="menu-bottom"></div>
</button>
<div className={`collapse navbar-collapse justify-content-end`}>
<ul className="navbar-nav navbar-right">
<li className="nav-item">
<a className="nav-link" href="#about">
about
</a>
</li>
<li className="nav-item">
<a className="nav-link" href="#work">
work
</a>
</li>
<li className="nav-item">
<a className="nav-link" href="#contact">
contact
</a>
</li>
<li className="nav-item">
<a className="nav-link" href="https://www.richardawestmoreland.com/resume.pdf" target="_blank"
rel="noopener noreferrer">
resume
</a>
</li>
</ul>
</div>
</nav>
<div className={`mobile-nav ${change1 ? "expanded-nav" : ""}`}>
<div className="mobile-nav-wrapper">
<nav className="mobile-nav-links">
<ul>
<li><a onClick={toggleNav} href="#about">about</a></li>
<li><a onClick={toggleNav} href="#work">work</a></li>
<li><a onClick={toggleNav} href="#contact">contact</a></li>
<li><a onClick={toggleNav} href="https://www.richardawestmoreland.com/resume.pdf" target="_blank"
rel="noopener noreferrer">resume</a></li>
</ul>
</nav>
</div>
</div>
</header>
)
}
}
Header.propTypes = {
siteTitle: PropTypes.string
}
Header.defaultProps = {
siteTitle: ``
}
export default Header
|
#!/usr/bin/env bash
# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Initialize the DPDK framework on the DUT node. Bind interfaces to driver.
set -exuo pipefail
# Assumptions:
# + There is a directory holding CSIT code to use (this script is there).
# + At least one of the following is true:
# ++ JOB_NAME environment variable is set,
# ++ or this entry script has access to arguments.
# Consequences (and specific assumptions) are multiple,
# examine tree of functions for current description.
# FIXME: Define API contract (as opposed to just help) for bootstrap.
# "set -eu" handles failures from the following two lines.
BASH_ENTRY_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
BASH_FUNCTION_DIR="$(readlink -e "${BASH_ENTRY_DIR}/../function")"
source "${BASH_FUNCTION_DIR}/common.sh" || {
echo "Source failed." >&2
exit 1
}
source "${BASH_FUNCTION_DIR}/dpdk.sh" || die "Source failed."
common_dirs || die
dpdk_bind "${@}" || die
|
<gh_stars>0
module NotificationHub
module Envelope
class Multichannel < Base
def strategy
'multichannel'
end
end
end
end
|
#!/bin/bash
echo "Please enter a integer:"
read num
if [ $((num%2)) -eq 0 ]; then
echo "It's even."
fi
#[ $((num%2)) -eq 0 ] && echo "It's even."
|
<filename>src/main/scala/scalation/analytics/ExpRegression.scala<gh_stars>1-10
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author <NAME>, <NAME>
* @version 1.3
* @date Sun Jan 11 19:05:20 EST 2015
* @see LICENSE (MIT style license file).
*/
// U N D E R D E V E L O P M E N T
// FIX: needs improved optimization
package scalation.analytics
import scala.math.{exp, pow}
import scalation.linalgebra.{MatrixD, VectoD, VectorD}
import scalation.minima.QuasiNewton
import scalation.plot.Plot
import scalation.util.Error
import RegTechnique._
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ExpRegression` class supports exponential regression. In this case,
* 'x' is multi-dimensional [1, x_1, ... x_k]. Fit the parameter vector 'b' in the
* exponential regression equation
* <p>
* log (mu (x)) = b dot x = b_0 + b_1 * x_1 + ... b_k * x_k
* <p>
* @see www.stat.uni-muenchen.de/~leiten/Lehre/Material/GLM_0708/chapterGLM.pdf
* @param x the data/design matrix
* @param nonneg whether to check that responses are nonnegative
* @param y the response vector
*/
class ExpRegression (x: MatrixD, nonneg: Boolean, y: VectorD)
extends Predictor with Error
{
if (x.dim1 != y.dim) flaw ("constructor", "dimensions of x and y are incompatible")
if (nonneg && ! y.isNonnegative) flaw ("constructor", "response vector y must be nonnegative")
private val k = x.dim2 - 1 // number of variables (k = n-1)
private val m = x.dim1.toDouble // number of data points (rows)
private val r_df = (m-1.0) / (m-k-1.0) // ratio of degrees of freedom
private var rSquared = -1.0 // coefficient of determination (quality of fit)
private var rBarSq = -1.0 // Adjusted R-squared
private var fStat = -1.0 // F statistic (quality of fit)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For a given parameter vector b, compute '-2 * Log-Likelihood' (-2LL).
* '-2LL' is the standard measure that follows a Chi-Square distribution.
* @see www.stat.cmu.edu/~cshalizi/350/lectures/26/lecture-26.pdf
* @see www.statisticalhorizons.com/wp-content/uploads/Allison.StatComp.pdf
* @param b the parameters to fit
*/
def ll (b: VectorD): Double =
{
var sum = 0.0
for (i <- 0 until y.dim) {
val bx = b dot x(i)
sum += -bx - y(i) / exp { bx }
} // for
-2.0 * sum
} // ll
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For a given parameter vector b, compute '-2 * Log-Likelihood' (-2LL) for
* the null model (the one that does not consider the effects of x(i)).
* @param b the parameters to fit
*/
def ll_null (b: VectorD): Double =
{
var sum = 0.0
for (i <- 0 until y.dim) {
val bx = b(0) // only use intercept
sum += -bx - y(i) / exp { bx }
} // for
-2.0 * sum
} // ll_null
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the predictor by fitting the parameter vector (b-vector) in the
* exponential regression equation.
*/
def train ()
{
val b0 = new VectorD (x.dim2) // use b_0 = 0 for starting guess for parameters
val bfgs = new QuasiNewton (ll) // minimizer for -2LL
b = bfgs.solve (b0) // find optimal solution for parameters
val e = y / (x * b) // residual/error vector
val sse = e dot e // residual/error sum of squares
val sst = (y dot y) - pow (y.sum, 2) / m // total sum of squares
val ssr = sst - sse // regression sum of squares
rSquared = ssr / sst // coefficient of determination (R-squared)
rBarSq = 1.0 - (1.0-rSquared) * r_df // R-bar-squared (adjusted R-squared)
fStat = ssr * (m-k-1.0) / (sse * k) // F statistic (msr / mse)
} // train
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the quality of fit including 'rSquared'.
*/
def fit: VectorD = VectorD (rSquared, rBarSq, fStat)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Predict the value of y = f(z) by evaluating the formula y = b dot z,
* e.g., (b_0, b_1, b_2) dot (1, z_1, z_2).
* @param z the new vector to predict
*/
def predict (z: VectoD): Double = exp (b dot z)
} // ExpRegression class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ExpRegressionTest` object tests `ExpRegression` class using the following
* exponential regression problem.
*/
object ExpRegressionTest extends App
{
val x = new MatrixD ((5, 3), 1.0, 36.0, 66.0, // 5-by-3 matrix
1.0, 37.0, 68.0,
1.0, 47.0, 64.0,
1.0, 32.0, 53.0,
1.0, 1.0, 101.0)
val y = VectorD (745.0, 895.0, 442.0, 440.0, 1598.0)
val z = VectorD (1.0, 20.0, 80.0)
println ("x = " + x)
println ("y = " + y)
val erg = new ExpRegression (x, true, y)
erg.train ()
println ("fit = " + erg.fit)
val yp = erg.predict (z)
println ("predict (" + z + ") = " + yp)
} // ExpRegressionTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ExpRegressionTest2` object has a basic test for the `ExpRegression` class.
*/
object ExpRegressionTest2 extends App
{
import scalation.random.{Uniform, Exponential, Random}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test `ExpRegression` by simulating 'n'-many observations.
* @param n number of observations
* @param k number of variables
* @return (actual, estimated, r^2)
*/
def test (n: Int = 10000, k: Int = 5): Tuple5 [Int, Int, VectorD, VectoD, Double] =
{
val u = new Uniform (0, 1) // uniform random
val e = new Exponential (1) // exponential error
val r = new Random ()
val x = new MatrixD (n, k) // data matrix
val y = new VectorD (x.dim1) // response vector
val b = new VectorD (k) // known coefficients
for (i <- 0 until b.dim) b(i) = 1 + r.gen * 6
for (i <- 0 until x.dim1; j <- 0 until x.dim2) x(i, j) = u.gen
for (i <- 0 until y.dim) y(i) = exp (x(i) dot b) * e.gen
val erg = new ExpRegression (x, true, y)
erg.train ()
(n, k, b, erg.coefficient, erg.fit(0))
} // test
val tests = Array.ofDim [Tuple5 [Int, Int, VectorD, VectoD, Double]] (10)
for (k <- 0 until tests.size) tests(k) = test(1000, k + 1)
tests.foreach {
case (n, k, actual, fit, rSquared) => {
actual.setFormat ("%.4f, ")
fit.setFormat ("%.4f, ")
println ("nobs = %d, regressors = %d, R^2 = %f\nactual = %s\nfir = %s\n".format(n, k, rSquared, actual, fit))
} // case
} // foreach
} // ExpRegressionTest2
|
import os
class LogHandler:
def readLogFile(self, filename):
try:
with open(filename, 'r') as f:
data = f.read()
return data
except FileNotFoundError:
return ""
def printLogFile(self):
try:
ADAGUC_LOGFILE = os.environ.get('ADAGUC_LOGFILE')
if ADAGUC_LOGFILE:
data = self.readLogFile(ADAGUC_LOGFILE)
if data:
print(data)
else:
print("Log file is empty")
else:
print("Log file not found")
except Exception as e:
print("Error occurred while printing log file:", e)
# Example usage
log_handler = LogHandler()
log_handler.printLogFile() |
import { ApolloServer } from 'apollo-server-express';
// Models
import {
ProcedureModel,
UserModel,
DeputyModel,
NamedPollModel,
HistoryModel,
ConferenceWeekDetailModel,
PlenaryMinuteModel,
} from '@democracy-deutschland/bundestagio-common';
import CONFIG from '../../config';
import typeDefs from '../../graphql/schemas';
import resolvers from '../../graphql/resolvers';
import schemaDirectives from '../../graphql/schemaDirectives';
export const graphql = new ApolloServer({
engine: CONFIG.ENGINE_API_KEY
? {
apiKey: CONFIG.ENGINE_API_KEY,
// Send params and headers to engine
privateVariables: !CONFIG.ENGINE_DEBUG_MODE,
privateHeaders: !CONFIG.ENGINE_DEBUG_MODE,
}
: false,
typeDefs,
resolvers,
schemaDirectives,
introspection: true,
playground: true,
context: ({ req, res }) => ({
// Connection
req,
res,
// user
user: (req as any).user,
// Models
ProcedureModel,
UserModel,
DeputyModel,
NamedPollModel,
HistoryModel,
ConferenceWeekDetailModel,
PlenaryMinuteModel,
}),
});
|
<gh_stars>0
package io.cucumber.core.api;
import org.apiguardian.api.API;
import java.util.Locale;
/**
* The type registry configurer allows to configure a new type registry and the
* locale.
*
* @deprecated Please use annotation based configuration. See <a href=
* "https://github.com/cucumber/cucumber-jvm/blob/master/examples/java-calculator/src/test/java/io/cucumber/examples/java/ShoppingSteps.java">Annotation
* based example</a> See <a href=
* "https://github.com/cucumber/cucumber-jvm/blob/master/examples/java8-calculator/src/test/java/io/cucumber/examples/java8/ShoppingSteps.java">Lambda
* based example</a>
*/
@API(status = API.Status.STABLE)
@Deprecated
public interface TypeRegistryConfigurer {
/**
* @return The locale to use, or null when language from feature file should
* be used.
*/
default Locale locale() {
return null;
}
/**
* Configures the type registry.
*
* @param typeRegistry The new type registry.
*/
void configureTypeRegistry(TypeRegistry typeRegistry);
}
|
import { initLast } from './initLast.ts';
export async function* init<T>(iter: AsyncIterable<T>) {
const [r] = await initLast(iter);
yield* r;
}
|
#!/bin/bash
# -------------------------------------------------------
# List Linux Permissions Cheatsheet
# -------------------------------------------------------
# _ _
# __ _ __| | ___ _ __ | | _____ _ __ ___
# / _` | / _` |/ _ \ '_ \| |/ / _ \ '__/ __|
# | (_| || (_| | __/ | | | < __/ | \__ \
# \__,_(_)__,_|\___|_| |_|_|\_\___|_| |___/
# adge.denkers@gmail.com | https://github.com/adgedenkers
# -------------------------------------------------------
# file name: linux-permissions.sh
# location: /Users/adge/bin/afd/settings/reference/
# date: 2018-02-15
# -------------------------------------------------------
awk -F, '{printf "%-7s %-7s %-7s %-7s\n",$1,$2,$3,$4}' /Users/adge/bin/afd/settings/reference/linux-permissions.txt
|
#!/bin/bash
# NOTE: You need the "playerctl" pachage in order for this to work!!!
exec 2>/dev/null
if [ "$(playerctl status)" = "Playing" ]; then
title=`exec playerctl metadata xesam:title`
artist=`exec playerctl metadata xesam:artist`
echo "[$artist] $title"
else
echo "No song currently playing"
fi
|
import pandas as pd
from sklearn.cluster import KMeans
X = df.to_numpy()
kmeans = KMeans(n_clusters=3).fit(X)
df['labels'] = kmeans.labels_ |
#!/bin/bash
#Compile and install exim to collect code coverage
cd ${WORKDIR}
git clone https://github.com/Exim/exim exim-gcov
cd exim-gcov
git checkout 38903fb
cd src; mkdir Local; cp src/EDITME Local/Makefile
cd Local; patch -p1 < ${WORKDIR}/exim.patch; cd ..
make CFLAGS="-fprofile-arcs -ftest-coverage" LDFLAGS="-fprofile-arcs -ftest-coverage" LFLAGS+="-lgcov --coverage" clean all install
# Configure exim
#cd /usr/exim
#patch -p1 < ${WORKDIR}/exim.configure.patch
#chmod 1777 /var/mail
|
import {ActivateableMixin, DraggableMixin, HelpableMixin, HoverableMixin, IconableMixin, MenuableMixin} from './mixin/'
import {attr, controller} from '@github/catalyst'
import {html, render} from '@github/jtml'
import {FlaggableMixin} from './mixin/flaggable'
import {SerializedMeasurementGateType} from '@qni/common'
import measurementGateIcon from '../icon/measurement-gate.svg'
export class MeasurementGateElement extends MenuableMixin(
HelpableMixin(FlaggableMixin(DraggableMixin(IconableMixin(ActivateableMixin(HoverableMixin(HTMLElement))))))
) {
@attr value = ''
get operationType(): typeof SerializedMeasurementGateType {
return SerializedMeasurementGateType
}
connectedCallback(): void {
if (this.shadowRoot !== null) return
this.attachShadow({mode: 'open'})
this.update()
}
update(): void {
render(
html`${this.iconHtml(measurementGateIcon)}
<div id="value" part="value"></div>`,
this.shadowRoot!
)
}
toJson(): string {
if (this.flag === '') {
return `"${SerializedMeasurementGateType}"`
} else {
return `"${SerializedMeasurementGateType}>${this.flag}"`
}
}
}
controller(MeasurementGateElement)
|
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//
// MIT License
// Copyright (c) 2021 Colin
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#include <RaftCore/log.h>
#include <RaftCore/util.h>
#include <spdlog/spdlog.h>
#include <algorithm>
// RaftLog manage the log entries, its struct look like:
//
// snapshot/first.....applied....committed....stabled.....last
// --------|------------------------------------------------|
// log entries
//
// for simplify the RaftLog implement should manage all log entries
// that not truncated
namespace eraft {
// newLog returns log using the given storage. It recovers the log
// to the state that it just commits and applies the latest snapshot.
RaftLog::RaftLog(StorageInterface &st) {
uint64_t lo = st.FirstIndex();
uint64_t hi = st.LastIndex();
std::vector<eraftpb::Entry> entries = st.Entries(lo, hi + 1);
this->storage_ = &st;
this->entries_ = entries;
this->applied_ = lo - 1;
this->stabled_ = hi;
this->firstIndex_ = lo;
SPDLOG_INFO("init raft log with firstIndex " +
std::to_string(this->firstIndex_) + " applied " +
std::to_string(this->applied_) + " stabled " +
std::to_string(this->stabled_) + " commited " +
std::to_string(this->commited_));
}
RaftLog::~RaftLog() {}
// We need to compact the log entries in some point of time like
// storage compact stabled log entries prevent the log entries
// grow unlimitedly in memory
void RaftLog::MaybeCompact() {
uint64_t first = this->storage_->FirstIndex();
if (first > this->firstIndex_) {
if (this->entries_.size() > 0) {
this->entries_.erase(this->entries_.begin(),
this->entries_.begin() + this->ToSliceIndex(first));
}
this->firstIndex_ = first;
}
}
std::vector<eraftpb::Entry> RaftLog::UnstableEntries() {
if (this->entries_.size() > 0) {
return std::vector<eraftpb::Entry>(
this->entries_.begin() + (this->stabled_ - this->firstIndex_ + 1),
this->entries_.end());
}
return std::vector<eraftpb::Entry>{};
}
std::vector<eraftpb::Entry> RaftLog::NextEnts() {
SPDLOG_INFO("raftlog next ents l.applied: " + std::to_string(this->applied_) +
" l.firstIndex: " + std::to_string(this->firstIndex_) +
" l.commited: " + std::to_string(this->commited_));
if (this->entries_.size() > 0) {
return std::vector<eraftpb::Entry>(
this->entries_.begin() + (this->applied_ - this->firstIndex_ + 1),
this->entries_.begin() + this->commited_ - this->firstIndex_ + 1);
}
return std::vector<eraftpb::Entry>{};
}
uint64_t RaftLog::ToSliceIndex(uint64_t i) {
uint64_t idx = i - this->firstIndex_;
if (idx < 0) {
return 0;
}
return idx;
}
uint64_t RaftLog::ToEntryIndex(uint64_t i) { return i + this->firstIndex_; }
uint64_t RaftLog::LastIndex() {
uint64_t index = 0;
// if (!IsEmptySnap(pendingSnapshot_)) {
// index = pendingSnapshot_.metadata().index();
// }
if (this->entries_.size() > 0) {
return std::max(this->entries_[this->entries_.size() - 1].index(), index);
}
uint64_t i = this->storage_->LastIndex();
return std::max(i, index);
}
std::pair<uint64_t, bool> RaftLog::Term(uint64_t i) {
if (this->entries_.size() > 0 && i >= this->firstIndex_) {
return std::make_pair<uint64_t, bool>(
this->entries_[i - this->firstIndex_].term(), true);
}
// i <= firstIndex
std::pair<uint64_t, bool> pair = this->storage_->Term(i);
if (!pair.second) {
return std::make_pair<uint64_t, bool>(0, false);
}
uint64_t term_ = pair.first;
if (term_ == 0 && !IsEmptySnap(pendingSnapshot_)) {
if (i == pendingSnapshot_.metadata().index()) {
term_ = static_cast<uint64_t>(pendingSnapshot_.metadata().index());
}
}
return std::make_pair<uint64_t, bool>(static_cast<uint64_t>(term_), true);
}
} // namespace eraft
|
<gh_stars>0
#include "image_io/base/message_writer.h"
#include <cstring>
#include <sstream>
#include <string>
namespace photos_editing_formats {
namespace image_io {
using std::string;
using std::stringstream;
string MessageWriter::GetFormattedMessage(const Message& message) const {
stringstream message_stream;
auto type = message.GetType();
if (type != Message::kStatus) {
message_stream << GetTypeCategory(type) << ":";
}
if (type == Message::kInternalError || type == Message::kStdLibError) {
message_stream << GetTypeDescription(type, message.GetSystemErrno()) << ":";
}
message_stream << message.GetText();
return message_stream.str();
}
string MessageWriter::GetTypeCategory(Message::Type type) const {
string category;
switch (type) {
case Message::kStatus:
category = "STATUS";
break;
case Message::kWarning:
category = "WARNING";
break;
case Message::kStdLibError:
case Message::kPrematureEndOfDataError:
case Message::kStringNotFoundError:
case Message::kDecodingError:
case Message::kSyntaxError:
case Message::kValueError:
case Message::kInternalError:
category = "ERROR";
break;
}
return category;
}
string MessageWriter::GetTypeDescription(Message::Type type,
int system_errno) const {
string description;
switch (type) {
case Message::kStatus:
break;
case Message::kWarning:
break;
case Message::kStdLibError:
description = system_errno > 0 ? std::strerror(system_errno) : "Unknown";
break;
case Message::kPrematureEndOfDataError:
description = "Premature end of data";
break;
case Message::kStringNotFoundError:
description = "String not found";
break;
case Message::kDecodingError:
description = "Decoding error";
break;
case Message::kSyntaxError:
description = "Syntax error";
break;
case Message::kValueError:
description = "Value error";
break;
case Message::kInternalError:
description = "Internal error";
break;
}
return description;
}
} // namespace image_io
} // namespace photos_editing_formats
|
<filename>app/src/main/java/com/telenav/osv/item/network/AuthData.java
package com.telenav.osv.item.network;
/**
* Created by kalmanb on 8/3/17.
*/
public class AuthData extends ApiResponse {
private String mAccessToken = "";
private String mId = "";
private String mUsername = "";
private String mDisplayName = "";
private int mUserType = 0;
private String mLoginType;
public String getAccessToken() {
return mAccessToken;
}
public void setAccessToken(String accessToken) {
this.mAccessToken = accessToken;
}
public String getId() {
return mId;
}
public void setId(String id) {
this.mId = id;
}
public String getUsername() {
return mUsername;
}
public void setUsername(String username) {
this.mUsername = username;
}
public String getDisplayName() {
return mDisplayName;
}
public void setDisplayName(String displayName) {
this.mDisplayName = displayName;
}
public int getUserType() {
return mUserType;
}
public void setUserType(int userType) {
this.mUserType = userType;
}
public String getLoginType() {
return mLoginType;
}
public void setLoginType(String loginType) {
this.mLoginType = loginType;
}
}
|
#!/bin/bash
./blz -d ./tmp/out/overlay/overlay_0011.bin > /dev/null
./blz -d ./tmp/out/overlay/overlay_0028.bin > /dev/null
./blz -d ./tmp/out/overlay/overlay_0035.bin > /dev/null
# "Apprentice"
printf '\x41\x70\x70\x72\x65\x6E\x74\x69\x63\x65\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52264 count=12 conv=notrunc 2> /dev/null
# "Rank 5"
printf '\x52\x61\x6E\x6B\x20\x35\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52276 count=8 conv=notrunc 2> /dev/null
# "Rank 4"
printf '\x52\x61\x6E\x6B\x20\x34\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52284 count=8 conv=notrunc 2> /dev/null
# "Rank 3"
printf '\x52\x61\x6E\x6B\x20\x33\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52292 count=8 conv=notrunc 2> /dev/null
# "Rank 2"
printf '\x52\x61\x6E\x6B\x20\x32\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52300 count=8 conv=notrunc 2> /dev/null
# "Rank 1"
printf '\x52\x61\x6E\x6B\x20\x31\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52308 count=8 conv=notrunc 2> /dev/null
# "Vice Squad Leader"
printf '\x56\x69\x63\x65\x20\x53\x71\x75\x61\x64\x20\x4C\x65\x61\x64\x65\x72\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52316 count=20 conv=notrunc 2> /dev/null
# "Squad Leader"
printf '\xB0\x70\x26\x02' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=29988 count=4 conv=notrunc 2> /dev/null
printf '\x53\x71\x75\x61\x64\x20\x4C\x65\x61\x64\x65\x72\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52336 count=14 conv=notrunc 2> /dev/null
# "Vice Officer"
printf '\xBE\x70\x26\x02' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=29992 count=4 conv=notrunc 2> /dev/null
printf '\x56\x69\x63\x65\x20\x4F\x66\x66\x69\x63\x65\x72\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52350 count=14 conv=notrunc 2> /dev/null
# "Officer"
printf '\xCC\x70\x26\x02' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=29996 count=4 conv=notrunc 2> /dev/null
printf '\x4F\x66\x66\x69\x63\x65\x72\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0011.bin bs=1 seek=52364 count=10 conv=notrunc 2> /dev/null
# "What's this?"
printf '\x57\x68\x61\x74\x27\x73\x20\x74\x68\x69\x73\x3f\x00\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0028.bin bs=1 seek=70232 count=16 conv=notrunc 2> /dev/null
# "Details"
printf '\x44\x65\x74\x61\x69\x6c\x73\x00\x00' | dd of=./tmp/out/overlay/overlay_0028.bin bs=1 seek=70248 count=8 conv=notrunc 2> /dev/null
# "More details"
printf '\x4d\x6f\x72\x65\x20\x64\x65\x74\x61\x69\x6c\x73\x00\x00\x00\x00\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0028.bin bs=1 seek=70260 count=18 conv=notrunc 2> /dev/null
# "Records"
printf '\x52\x65\x63\x6f\x72\x64\x73\x00' | dd of=./tmp/out/overlay/overlay_0028.bin bs=1 seek=70276 count=8 conv=notrunc 2> /dev/null
# "Back"
printf '\x42\x61\x63\x6b\x00\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0028.bin bs=1 seek=70284 count=8 conv=notrunc 2> /dev/null
# "*Click*!"
printf '\x2A\x43\x6C\x69\x63\x6B\x2A\x21\x00\x00\x00\x00' | dd of=./tmp/out/overlay/overlay_0035.bin bs=1 seek=71732 count=12 conv=notrunc 2> /dev/null
./blz -en ./tmp/out/overlay/overlay_0011.bin > /dev/null
./blz -en ./tmp/out/overlay/overlay_0028.bin > /dev/null
./blz -en ./tmp/out/overlay/overlay_0035.bin > /dev/null
FILE_SIZE=$(stat --printf "%s" ./tmp/out/overlay/overlay_0011.bin)
FILE_SIZE=$(printf "%x\n" $FILE_SIZE)
echo -n -e "\\x"${FILE_SIZE:2:2}"\\x"${FILE_SIZE:0:2} | dd of=./tmp/out/y9.bin bs=1 seek=380 count=2 conv=notrunc 2> /dev/null
FILE_SIZE=$(stat --printf "%s" ./tmp/out/overlay/overlay_0028.bin)
FILE_SIZE=$(printf "%x\n" $FILE_SIZE)
echo -n -e "\\x"${FILE_SIZE:2:2}"\\x"${FILE_SIZE:0:2} | dd of=./tmp/out/y9.bin bs=1 seek=924 count=2 conv=notrunc 2> /dev/null |
<filename>iris_snippets.py
# coding: utf-8
# In[1]:
import iris
from iris.unit import Unit
from iris.cube import CubeList
from iris.exceptions import CoordinateNotFoundError, CoordinateMultiDimError
iris.FUTURE.netcdf_promote = True
iris.FUTURE.cell_datetime_objects = True
def time_coord(cube):
"""Return the variable attached to time axis and rename it to time."""
try:
cube.coord(axis='T').rename('time')
except CoordinateNotFoundError:
pass
timevar = cube.coord('time')
return timevar
def z_coord(cube):
"""Heuristic way to return the
dimensionless vertical coordinate."""
try:
z = cube.coord(axis='Z')
except CoordinateNotFoundError:
z = cube.coords(axis='Z')
for coord in cube.coords(axis='Z'):
if coord.ndim == 1:
z = coord
return z
def time_near(cube, datetime):
"""Return the nearest index to a `datetime`."""
timevar = time_coord(cube)
try:
time = timevar.units.date2num(datetime)
idx = timevar.nearest_neighbour_index(time)
except IndexError:
idx = -1
return idx
def time_slice(cube, start, stop=None):
"""TODO: Re-write to use `iris.FUTURE.cell_datetime_objects`."""
istart = time_near(cube, start)
if stop:
istop = time_near(cube, stop)
if istart == istop:
raise ValueError('istart must be different from istop!'
'Got istart {!r} and '
' istop {!r}'.format(istart, istop))
return cube[istart:istop, ...]
else:
return cube[istart, ...]
def bbox_extract_2Dcoords(cube, bbox):
"""Extract a sub-set of a cube inside a lon, lat bounding box
bbox=[lon_min lon_max lat_min lat_max].
NOTE: This is a work around too subset an iris cube that has
2D lon, lat coords."""
lons = cube.coord('longitude').points
lats = cube.coord('latitude').points
def minmax(v):
return np.min(v), np.max(v)
inregion = np.logical_and(np.logical_and(lons > bbox[0],
lons < bbox[2]),
np.logical_and(lats > bbox[1],
lats < bbox[3]))
region_inds = np.where(inregion)
imin, imax = minmax(region_inds[0])
jmin, jmax = minmax(region_inds[1])
return cube[..., imin:imax+1, jmin:jmax+1]
def intersection(cube, bbox):
"""Sub sets cube with 1D or 2D lon, lat coords.
Using `intersection` instead of `extract` we deal with 0-360
longitudes automagically."""
try:
method = "Using iris `cube.intersection`"
cube = cube.intersection(longitude=(bbox[0], bbox[2]),
latitude=(bbox[1], bbox[3]))
except CoordinateMultiDimError:
method = "Using iris `bbox_extract_2Dcoords`"
cube = bbox_extract_2Dcoords(cube, bbox)
print(method)
return cube
def get_cube(url, name_list=None, bbox=None, time=None, units=None):
cubes = iris.load_raw(url)
if name_list:
in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
if not cubes:
raise ValueError('Cube does not contain {!r}'.format(name_list))
else:
cube = cubes.merge_cube()
if bbox:
cube = intersection(cube, bbox)
if time:
if isinstance(time, datetime):
start, stop = time, None
elif isinstance(time, tuple):
start, stop = time[0], time[1]
else:
raise ValueError('Time must be start or (start, stop).'
' Got {!r}'.format(time))
cube = time_slice(cube, start, stop)
if units:
if not cube.units == units:
cube.convert_units(units)
return cube
# In[2]:
import time
import contextlib
@contextlib.contextmanager
def timeit(log=None):
t = time.time()
yield
elapsed = time.strftime("%H:%M:%S", time.gmtime(time.time()-t))
if log:
log.info(elapsed)
else:
print(elapsed)
# In[3]:
get_ipython().magic('matplotlib inline')
import numpy as np
import numpy.ma as ma
import iris.quickplot as qplt
import matplotlib.pyplot as plt
def plot_surface(cube, model=''):
z = z_coord(cube)
positive = z.attributes.get('positive', None)
if positive == 'up':
idx = np.argmax(z.points)
else:
idx = np.argmin(z.points)
c = cube[idx, ...]
c.data = ma.masked_invalid(c.data)
t = time_coord(cube)
t = t.units.num2date(t.points)[0]
qplt.pcolormesh(c)
plt.title('{}: {}\nVariable: {} level: {}'.format(model, t, c.name(), idx))
# In[4]:
print(iris.__version__)
print(iris.__file__)
# In[5]:
from datetime import datetime, timedelta
start = datetime.utcnow() - timedelta(days=7)
stop = datetime.utcnow()
name_list = ['sea_water_potential_temperature', 'sea_water_temperature']
bbox = [-76.4751, 38.3890, -71.7432, 42.9397]
units = Unit('Kelvin')
# In[6]:
model = 'MARACOOS/ESPRESSO'
url = 'http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2009_da/his'
with timeit():
cube = get_cube(url, name_list=name_list, bbox=bbox,
time=start, units=units)
plot_surface(cube, model)
# In[7]:
model = 'USGS/COAWST'
url = 'http://geoport.whoi.edu/thredds/dodsC/coawst_4/use/fmrc/'
url += 'coawst_4_use_best.ncd'
with timeit():
cube = get_cube(url, name_list=name_list, bbox=bbox,
time=start, units=units)
plot_surface(cube, model)
# In[8]:
model = 'HYCOM'
url = 'http://ecowatch.ncddc.noaa.gov/thredds/dodsC/hycom/hycom_reg1_agg/'
url += 'HYCOM_Region_1_Aggregation_best.ncd'
with timeit():
cube = get_cube(url, name_list=name_list, bbox=bbox,
time=start, units=units)
plot_surface(cube, model)
# In[9]:
model = 'NYHOP'
url = 'http://colossus.dl.stevens-tech.edu/thredds/dodsC/fmrc/NYBight/'
url += 'NYHOPS_Forecast_Collection_for_the_New_York_Bight_best.ncd'
with timeit():
cube = get_cube(url, name_list=name_list, bbox=bbox,
time=start, units=units)
plot_surface(cube, model)
# In[10]:
model = 'RUTGERS/NWA'
url = 'http://oceanus.esm.rutgers.edu:8090/thredds/dodsC/ROMS/NWA/Run03/Output'
with timeit():
cube = get_cube(url, name_list=name_list, bbox=bbox,
time=start, units=units)
plot_surface(cube, model)
|
<filename>src/utils/database/defaultDatabase.ts<gh_stars>0
import { FireBase } from "./firebase/firebase";
export const newDefaultDatabase = <Transaction extends { id: string }>() =>
new FireBase<Transaction>();
|
use std::fs;
use std::io;
fn create_symlink(source_file: &str, destination_file: &str) -> io::Result<()> {
fs::symlink(source_file, destination_file)?;
Ok(())
} |
require 'nokogiri'
html_string = "<p>This is a <a href="https://example.com">link</a> and this is <a href="http://another-example.com">another link</a></p>"
doc = Nokogiri::HTML(html_string)
links = doc.css('a').map { |link| link.attribute('href').to_s }
puts links
# Output: ["https://example.com", "http://another-example.com"] |
<reponame>wolfchinaliu/gameCenter<filename>jwx/src/main/java/weixin/liuliangbao/jsonbean/WeixinShareAccess.java
package weixin.liuliangbao.jsonbean;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Date;
/**
* Created by GuoLiang on 2016/7/13 10:59.
*/
@Entity
@Table(name = "weixin_share_access")
public class WeixinShareAccess implements Serializable {
private String id; // UUID主键',
private String sharerOpenId; // 分享人石榴openId',
private String viewerOpenId; // 查看人石榴openId',
private String accountId; // 商户Id',
private String shareId; // 分享文章Id',
private Date createTime; // 创建时间/分享时间',
private Date updateTime; // 更新时间
private Double flowValue; // 流量币值',
private String flowType; // 流量类型',
private Integer status; // 流量领取状态',
private Long accessTimes = 1L; // 此人访问分享页面次数
@Id
@GeneratedValue(generator = "shareAccesstableGenerator")
@GenericGenerator(name = "shareAccesstableGenerator", strategy = "uuid")
@Column(name = "ID", nullable = false, length = 36)
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Column(name = "sharer_open_id", nullable = false, length = 36)
public String getSharerOpenId() {
return sharerOpenId;
}
public void setSharerOpenId(String sharerOpenId) {
this.sharerOpenId = sharerOpenId;
}
@Column(name = "viewer_open_id", nullable = false, length = 36)
public String getViewerOpenId() {
return viewerOpenId;
}
public void setViewerOpenId(String viewerOpenId) {
this.viewerOpenId = viewerOpenId;
}
@Column(name = "account_id", nullable = false, length = 36)
public String getAccountId() {
return accountId;
}
public void setAccountId(String accountId) {
this.accountId = accountId;
}
@Column(name = "share_id", nullable = false, length = 36)
public String getShareId() {
return shareId;
}
public void setShareId(String shareId) {
this.shareId = shareId;
}
@Column(name = "create_time")
@Temporal(TemporalType.TIMESTAMP)
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
@Column(name = "flow_value")
public Double getFlowValue() {
return flowValue;
}
public void setFlowValue(Double flowValue) {
this.flowValue = flowValue;
}
@Column(name = "flow_type", length = 64)
public String getFlowType() {
return flowType;
}
public void setFlowType(String flowType) {
this.flowType = flowType;
}
@Column(name = "status", length = 1)
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
@Column(name = "update_time")
@Temporal(TemporalType.TIMESTAMP)
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
@Column(name = "access_times", length = 20)
public Long getAccessTimes() {
return accessTimes;
}
public void setAccessTimes(Long accessTimes) {
this.accessTimes = accessTimes;
}
}
|
#!/bin/sh
#
# This is a wrapper for xz to compress the kernel image using appropriate
# compression options depending on the architecture.
#
# Author: Lasse Collin <lasse.collin@tukaani.org>
#
# This file has been put into the public domain.
# You can do whatever you want with this file.
#
BCJ=
LZMA2OPTS=
case $SRCARCH in
x86) BCJ=--x86 ;;
powerpc) BCJ=--powerpc ;;
ia64) BCJ=--ia64; LZMA2OPTS=pb=4 ;;
arm) BCJ=--arm ;;
sparc) BCJ=--sparc ;;
esac
exec xz --check=crc32 $BCJ --lzma2=$LZMA2OPTS,dict=32MiB
|
/*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webauthn4j.appattest.validator;
import com.webauthn4j.appattest.authenticator.DCAppleDevice;
import com.webauthn4j.appattest.authenticator.DCAppleDeviceImpl;
import com.webauthn4j.authenticator.CoreAuthenticator;
import com.webauthn4j.data.CoreAuthenticationData;
import com.webauthn4j.data.CoreAuthenticationParameters;
import com.webauthn4j.data.attestation.authenticator.AuthenticatorData;
import com.webauthn4j.data.extension.authenticator.AuthenticationExtensionAuthenticatorOutput;
import com.webauthn4j.server.CoreServerProperty;
import com.webauthn4j.util.AssertUtil;
import com.webauthn4j.validator.CoreAuthenticationDataValidator;
import com.webauthn4j.validator.CoreAuthenticationObject;
import com.webauthn4j.validator.CustomCoreAuthenticationValidator;
import org.checkerframework.checker.nullness.qual.NonNull;
import java.util.List;
public class DCAssertionDataValidator extends CoreAuthenticationDataValidator {
public DCAssertionDataValidator(List<CustomCoreAuthenticationValidator> customAuthenticationValidators) {
super(customAuthenticationValidators, new DCAssertionSignatureValidator());
}
@Override
protected @NonNull CoreAuthenticationObject createCoreAuthenticationObject(@NonNull CoreAuthenticationData authenticationData, @NonNull CoreAuthenticationParameters authenticationParameters) {
AssertUtil.notNull(authenticationData, "authenticationData must not be null");
AssertUtil.notNull(authenticationData, "authenticationParameters must not be null");
byte[] credentialId = authenticationData.getCredentialId();
AuthenticatorData<AuthenticationExtensionAuthenticatorOutput> authenticatorData = authenticationData.getAuthenticatorData();
byte[] authenticatorDataBytes = authenticationData.getAuthenticatorDataBytes();
byte[] clientDataHash = authenticationData.getClientDataHash();
CoreServerProperty serverProperty = authenticationParameters.getServerProperty();
CoreAuthenticator authenticator = authenticationParameters.getAuthenticator();
DCAppleDevice dcAppleDevice = new DCAppleDeviceImpl(
authenticator.getAttestedCredentialData(),
authenticator.getAttestationStatement(),
authenticator.getCounter(),
authenticator.getAuthenticatorExtensions());
//noinspection ConstantConditions null check is already done in caller
return new DCAuthenticationObject(
credentialId, authenticatorData, authenticatorDataBytes, clientDataHash, serverProperty, dcAppleDevice
);
}
}
|
def find_shortest_route(matrix, start, end):
queue = collections.deque([[start]])
visited = set()
while queue:
path = queue.popleft()
node = path[-1]
if node == end:
return path
for next_node in matrix[node]:
if next_node not in visited:
queue.append(path + [next_node])
visited.add(next_node)
return [] |
<filename>src/commands/pony/user/update.ts
import {flags, FlagsConfig} from '@salesforce/command';
import {sfdx} from '../../..';
import PonyCommand from '../../../lib/PonyCommand';
// @ts-ignore
export default class UserUpdateCommand extends PonyCommand {
public static readonly description: string = `update target user`;
public static readonly requiresUsername: boolean = true;
public static readonly requiresProject: boolean = true;
public static readonly flagsConfig: FlagsConfig = {
values: flags.string({
char: 'v',
description: 'a list of <fieldName>=<value> pairs to search for',
required: true,
})
};
public async run(): Promise<void> {
const {values} = this.flags;
const username = this.org?.getUsername();
const devhub = await this.org?.getDevHubOrg();
const devhubUsername = devhub?.getUsername();
const {id: userId} = await sfdx.force.user.display({
targetusername: username,
targetdevhubusername: devhubUsername
});
await sfdx.force.data.record.update({
targetusername: this.flags.targetusername,
sobjecttype: 'User',
sobjectid: userId,
values
});
}
}
|
#!/bin/bash
# Messing around with some shell functions
exec 5>&1
plog() {
if [ -z "${per_run_log:-}" ] ; then
per_run_log="log_test.out"
# In SPEW mode, log to the terminal too
if [ "${SPEW:-0}" != 0 ] ; then
exec 6> >(tee -a "$per_run_log" >&5)
else
exec 6>>"$per_run_log"
fi
fi
if ! { [ $# = 0 ] && cat >&6 || echo "$@" >&6 ; } ; then
log '!!'" Failed to write to $per_run_log"
log "$@"
fi
}
plog "Hello world"
date | plog
##
tail -v -n+0 "$per_run_log"
|
<filename>opentaps/opentaps-common/src/common/org/opentaps/common/autocomplete/UtilAutoComplete.java
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.common.autocomplete;
import java.sql.ResultSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityOperator;
import org.ofbiz.entity.util.EntityFindOptions;
import org.ofbiz.entity.util.EntityUtil;
import org.opentaps.base.constants.RoleTypeConstants;
import org.opentaps.base.constants.StatusItemConstants;
import org.opentaps.common.event.AjaxEvents;
import org.opentaps.common.util.UtilMessage;
/**
* Auto Complete constants and utility methods.
*/
public final class UtilAutoComplete {
private UtilAutoComplete() { }
/** Common EntityFindOptions for distinct search. */
public static final EntityFindOptions AC_FIND_OPTIONS = new EntityFindOptions(true, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, true);
/** How many results to show in the autocomplete list. */
public static final int AC_DEFAULT_RESULTS_SIZE = 10;
/** The fields to reteive to build party names autocompleted items. */
public static final List<String> AC_PARTY_NAME_FIELDS = Arrays.asList("partyId", "groupName", "firstName", "lastName");
/** The party autocompleters sort. */
public static final List<String> AP_PARTY_ORDER_BY = Arrays.asList("groupName", "firstName", "lastName");
/** The fields to reteive to build GL accounts autocompleted items. */
public static final List<String> AC_ACCOUNT_FIELDS = Arrays.asList("glAccountId", "accountCode", "accountName");
/** The GL account autocompleters sort. */
public static final List<String> AC_ACCOUNT_ORDER_BY = Arrays.asList("accountCode", "accountName");
/** The fields to reteive to build products autocompleted items. */
public static final List<String> AC_PRODUCT_FIELDS = Arrays.asList("productId", "internalName");
/** The product autocompleters sort. */
public static final List<String> AC_PRODUCT_ORDER_BY = Arrays.asList("productId");
/** Some role conditions used by the autocompleters. */
public static final EntityCondition ac_accountRoleCondition, ac_contactRoleCondition, ac_prospectRoleCondition, ac_clientRoleCondition, ac_crmPartyRoleCondition, ac_accountOrProspectRoleCondition, ac_activePartyCondition;
static {
ac_activePartyCondition = EntityCondition.makeCondition(EntityOperator.OR,
EntityCondition.makeCondition("statusId", EntityOperator.NOT_EQUAL, StatusItemConstants.PartyStatus.PARTY_DISABLED),
EntityCondition.makeCondition("statusId", EntityOperator.EQUALS, null));
ac_accountRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.EQUALS, RoleTypeConstants.ACCOUNT);
ac_contactRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.EQUALS, RoleTypeConstants.CONTACT);
ac_prospectRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.EQUALS, RoleTypeConstants.PROSPECT);
ac_clientRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.IN, UtilMisc.toList(RoleTypeConstants.ACCOUNT, RoleTypeConstants.CONTACT, RoleTypeConstants.PROSPECT));
ac_crmPartyRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.IN, UtilMisc.toList(RoleTypeConstants.ACCOUNT, RoleTypeConstants.CONTACT, RoleTypeConstants.PROSPECT, RoleTypeConstants.PARTNER));
ac_accountOrProspectRoleCondition = EntityCondition.makeCondition("roleTypeIdFrom", EntityOperator.IN, UtilMisc.toList(RoleTypeConstants.ACCOUNT, RoleTypeConstants.PROSPECT));
}
public static EntityCondition getActiveRelationshipCondition(GenericValue party, EntityCondition otherConditions) {
return EntityCondition.makeCondition(EntityOperator.AND,
EntityCondition.makeCondition("partyIdTo", party.get("partyId")),
EntityUtil.getFilterByDateExpr(),
otherConditions);
}
/**
* Make an autocomplete selection list in JSON. The idea is we pass in the key field to be used for the option value (objectKey)
* and a SectionBuilder for constructing a map representing each of the elements. The map must return a name field containing
* the description of the option and a value field. TODO: maybe an object is a better way to do this.
* @param response a <code>HttpServletResponse</code> value
* @param collection a <code>Collection</code> value
* @param objectKey a <code>String</code> value
* @param builder a <code>SelectionBuilder</code> value
* @return a <code>String</code> value
*/
public static String makeSelectionJSONResponse(HttpServletResponse response, Collection collection, String objectKey, SelectionBuilder builder, Locale locale) {
JSONArray jsonArray = new JSONArray();
if (collection != null) {
for (Object element : collection) {
Map<String, Object> map = builder.buildRow(element);
JSONObject jsonObject = new JSONObject();
for (String key : map.keySet()) {
jsonObject.put(key, map.get(key));
}
jsonArray.element(jsonObject.toString());
}
}
if (collection == null || collection.isEmpty()) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("name", UtilMessage.expandLabel("OpentapsAutocompletionNoMatch", locale));
jsonObject.put(objectKey, "");
jsonArray.element(jsonObject.toString());
}
Map<String, Object> retval = new HashMap<String, Object>();
retval.put("items", jsonArray);
retval.put("identifier", objectKey);
return AjaxEvents.doJSONResponse(response, JSONObject.fromObject(retval));
}
}
|
#!/bin/sh -i
#
# For SYS_TYPE blueos_3_ppc64le_ib
EXE_NAME=pf3dtest-mpi
export EXE_NAME
echo "Using xlc to build for rzansel"
mkdir -p $SYS_TYPE
# issue a make clean so that optimization does not become mixed up
# when switching between reference and optimized builds.
make -f Makefile-mpi clean
make -f Makefile-mpi CODE_NAME=$EXE_NAME CC="mpixlc -g8 -std=c99 -qsmp=omp -DCOMPLEXMACRO -DUSE_FFTW -DUSE_MPI" CFLAGS="" COPTIMIZE="-O3 -qtune=pwr9 -qarch=pwr9 -qreport" LDFLAGS="-std=c99 -qreport /usr/tcetmp/packages/fftw/fftw-3.3.7/lib/libfftw3.a -lm"
|
"""
Classify a given text as spam or ham
"""
import nltk
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def classify_text(text):
"""
This function classifies a given text as spam or ham.
"""
# Preprocess the text
words = nltk.word_tokenize(text)
# Create the model
pipeline = Pipeline([
('svc', SVC())
])
# train the model
pipeline.fit(X, Y)
# predict using the model
prediction = pipeline.predict(words)
if __name__ == "__main__":
classification = classify_text("A sample text")
print(classification) |
<gh_stars>10-100
import { SimpleCommand } from '../../executor/simple-command';
export class PrepareProxyDomainCommand extends SimpleCommand {
constructor(readonly instanceHash: any, readonly portId: string) {
super();
}
}
|
<filename>test-data/comp-changes/client/src/mainclient/classLessAccessible/ClassLessAccessiblePub2PackPrivExt.java
package mainclient.classLessAccessible;
import main.classLessAccessible.ClassLessAccessiblePub2PackPriv;
public class ClassLessAccessiblePub2PackPrivExt extends ClassLessAccessiblePub2PackPriv {
public void instantiatePub2PackPriv() {
ClassLessAccessiblePub2PackPriv c1 = new ClassLessAccessiblePub2PackPriv();
ClassLessAccessiblePub2PackPriv c2 = new ClassLessAccessiblePub2PackPrivExt();
}
public int accessPublicField() {
return super.publicField;
}
public int invokePublicMethod() {
return super.publicMethod();
}
}
|
package ipfix_test
import (
"fmt"
"os"
"github.com/calmh/ipfix"
)
func ExampleSession() {
s := ipfix.NewSession()
for {
// ParseReader will block until a full message is available.
msg, err := s.ParseReader(os.Stdin)
if err != nil {
panic(err)
}
for _, record := range msg.DataRecords {
// record contains raw enterpriseId, fieldId => []byte information
fmt.Println(record)
}
}
}
func ExampleInterpreter() {
s := ipfix.NewSession()
i := ipfix.NewInterpreter(s)
for {
// ParseReader will block until a full message is available.
msg, err := s.ParseReader(os.Stdin)
if err != nil {
panic(err)
}
var fieldList []ipfix.InterpretedField
for _, record := range msg.DataRecords {
fieldList = i.InterpretInto(record, fieldList)
fmt.Println(fieldList)
}
}
}
func ExampleInterpreter_AddDictionaryEntry() {
s := ipfix.NewSession()
i := ipfix.NewInterpreter(s)
entry := ipfix.DictionaryEntry{
Name: "someVendorField",
FieldID: 42,
EnterpriseID: 123456,
Type: ipfix.Int32,
}
i.AddDictionaryEntry(entry)
// Now use i.Interpret() etc as usual.
}
|
from importlib.util import spec_from_file_location, module_from_spec
import os
class Configuration:
def __init__(self, file_path):
self._config = self._load_config(file_path)
def _load_config(self, file_path):
if not os.path.isabs(file_path):
file_path = os.path.join(os.path.dirname(__file__), file_path)
spec = spec_from_file_location("credentials", file_path)
module = module_from_spec(spec)
spec.loader.exec_module(module)
return module
def get_database_config(self):
return self._config.Configuration.DATABASE
def get_api_key(self):
return self._config.Configuration.API_KEY
def is_debug_mode(self):
return self._config.Configuration.DEBUG_MODE |
<gh_stars>0
package bd.edu.daffodilvarsity.classmanager.notification;
import android.content.Context;
import androidx.room.Database;
import androidx.room.Room;
import androidx.room.RoomDatabase;
@Database(entities = {NotificationObjStudent.class} , version = 1 , exportSchema = false)
public abstract class NotificationStudentDatabase extends RoomDatabase {
private static NotificationStudentDatabase sInstance;
public abstract NotificationStudentDao notificationStudentDao();
public static synchronized NotificationStudentDatabase getInstance(Context context) {
if(sInstance==null) {
sInstance = Room.databaseBuilder(context.getApplicationContext() , NotificationStudentDatabase.class,"student_notification")
.fallbackToDestructiveMigration()
.build();
}
return sInstance;
}
}
|
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
def generate_id(self):
unique_id = f'{self.name}_{self.age}' + 'img'
return unique_id
# Example usage
person1 = Person("Alice", 25)
print(person1.generate_id()) # Output: Alice_25img
person2 = Person("Bob", 30)
print(person2.generate_id()) # Output: Bob_30img |
ssh -i "heimdall.pem" ubuntu@ec2-35-171-201-74.compute-1.amazonaws.com
|
#!/bin/bash
/usr/bin/tmux new-session -d -s services
/usr/bin/tmux new-window -t services:1 -n roscore 'bash -c "roscore"'
sleep 2
/usr/bin/tmux new-window -t services:2 -n speech_features 'bash -c "roslaunch --wait speech_features server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:3 -n emorec 'bash -c "roslaunch --wait emorec server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:4 -n objectrec 'bash -c "roslaunch --wait objectrec server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:5 -n concept_space 'bash -c "roslaunch --wait concept_net concept_space_server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:6 -n semantic_fusion 'bash -c "roslaunch --wait concept_net fusion_server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:7 -n semantic_similarity 'bash -c "roslaunch --wait concept_net semantic_similarity_server.launch"'
sleep 2
/usr/bin/tmux new-window -t services:8 -n flask_api 'bash -c "python /babyrobot-integration/babyrobot/api/v1.py"'
|
<reponame>kkoogqw/OpenItem
package access
import (
"encoding/json"
"fmt"
"github.com/beego/beego/v2/server/web/context"
"log"
"review/logger"
"review/utils"
)
type FilterResponse struct {
Status string `json:"status"`
Msg string `json:"msg"`
Data interface{} `json:"data"`
}
func denyRequest(ctx *context.Context, message string) {
w := ctx.ResponseWriter
w.WriteHeader(403)
// resp := &controllers.Response{Status: "error", Msg: "Unauthorized operation"}
resp := &FilterResponse{Status: "error", Msg: message}
_, err := w.Write([]byte(utils.StructToJson(resp)))
if err != nil {
logger.Recorder.Error("filter error:" + err.Error())
}
}
func TokenFilter(ctx *context.Context) {
method := ctx.Request.Method
if method == "POST" {
var requestBody map[string]interface{}
json.Unmarshal(ctx.Input.RequestBody, &requestBody)
logger.Recorder.Info(fmt.Sprintf("%s", requestBody))
if requestBody["token"] == nil {
log.Println("deny")
denyRequest(ctx, "no token")
}
}
}
|
#!/usr/bin/env bats
@test "prips.sh" {
run $BATS_TEST_DIRNAME/../bin/prips.sh
[ "$status" -eq 1 ]
[ "${lines[0]}" = "usage: prips.sh [options] <start> <end>" ]
}
@test "prips.sh -h" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -h
[ "$status" -eq 1 ]
[ "${lines[0]}" = "usage: prips.sh [options] <start> <end>" ]
}
@test "prips.sh -v" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -v
[ "$status" -eq 0 ]
[ "${lines[0]}" = "prips.sh: v0.1.0" ]
}
@test "prips.sh -n0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -n0
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: count must be a positive integer" ]
}
@test "prips.sh -n11 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -n11 192.168.0.0 192.168.0.10
[ "$status" -eq 1 ]
[ "${lines[0]}" = "usage: prips.sh [options] <start> <end>" ]
}
@test "prips.sh -i0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -i0
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: increment must be a positive integer" ]
}
@test "prips.sh -fx" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -fx
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: invalid format 'x'" ]
}
@test "prips.sh 192.168.0.0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.0
[ "$status" -eq 1 ]
[ "${lines[0]}" = "usage: prips.sh [options] <start> <end>" ]
}
@test "prips.sh 192.168.0.x 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.x 192.168.0.10
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: bad IP address" ]
}
@test "prips.sh 192.168.0.0 192.168.0.x" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.0 192.168.0.x
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: bad IP address" ]
}
@test "prips.sh 192.168.0.x 192.168.0.x" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.x 192.168.0.x
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: bad IP address" ]
}
@test "prips.sh 192.168.0.10 192.168.0.0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.10 192.168.0.0
[ "$status" -eq 1 ]
[ "${lines[0]}" = "prips.sh: start address must be smaller than end address" ]
}
@test "prips.sh 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh 192.168.0.0 192.168.0.10
[ "$status" -eq 0 ]
[ "${lines[0]}" = "192.168.0.0" ]
[ "${lines[10]}" = "192.168.0.10" ]
}
@test "prips.sh -i2 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -i2 192.168.0.0 192.168.0.10
[ "$status" -eq 0 ]
[ "${lines[0]}" = "192.168.0.0" ]
[ "${lines[5]}" = "192.168.0.10" ]
}
@test "prips.sh -n11 192.168.0.0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -n11 192.168.0.0
[ "$status" -eq 0 ]
[ "${lines[0]}" = "192.168.0.0" ]
[ "${lines[10]}" = "192.168.0.10" ]
}
@test "prips.sh -i2 -n11 192.168.0.0" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -i2 -n11 192.168.0.0
[ "$status" -eq 0 ]
[ "${lines[0]}" = "192.168.0.0" ]
[ "${lines[10]}" = "192.168.0.20" ]
}
@test "prips.sh -fdot 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -fdot 192.168.0.0 192.168.0.10
[ "$status" -eq 0 ]
[ "${lines[0]}" = "192.168.0.0" ]
[ "${lines[10]}" = "192.168.0.10" ]
}
@test "prips.sh -fdec 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -fdec 192.168.0.0 192.168.0.10
[ "$status" -eq 0 ]
[ "${lines[0]}" = "3232235520" ]
[ "${lines[10]}" = "3232235530" ]
}
@test "prips.sh -fhex 192.168.0.0 192.168.0.10" {
run $BATS_TEST_DIRNAME/../bin/prips.sh -fhex 192.168.0.0 192.168.0.10
[ "$status" -eq 0 ]
[ "${lines[0]}" = "C0A80000" ]
[ "${lines[10]}" = "C0A8000A" ]
}
|
import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core';
@Component({
selector: 'app-child',
templateUrl: './child.component.html',
styleUrls: ['./child.component.css']
})
export class ChildComponent implements OnInit {
constructor() { }
ngOnInit(): void {
}
@Input() public messageFromParent;
@Output() public messageToParent = new EventEmitter();
myPostman(){
this.messageToParent.emit('Hello parent bro');
}
}
|
package org.hisp.dhis.databrowser.util;
/*
* Copyright (c) 2004-2011, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import org.amplecode.quick.StatementHolder;
import org.amplecode.quick.StatementManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.common.Grid;
import org.hisp.dhis.common.GridHeader;
import org.hisp.dhis.databrowser.MetaValue;
/**
* @author <NAME>
* @version $Id$
*/
public class DataBrowserUtils
{ protected static final Log log = LogFactory.getLog( DataBrowserUtils.class );
public static void setMetaStructure( Grid grid, StringBuffer sqlsb, List<Integer> metaIds,
StatementManager statementManager )
{
final StatementHolder holder = statementManager.getHolder();
try
{
Integer metaId = null;
String metaName = null;
ResultSet resultSet = getScrollableResult( sqlsb.toString(), holder );
while ( resultSet.next() )
{
metaId = resultSet.getInt( 1 );
metaName = resultSet.getString( 2 );
metaIds.add( metaId );
grid.addRow().addValue( new MetaValue( metaId, metaName ) );
}
}
catch ( SQLException e )
{ log.error( "Failed to add meta value\n" + sqlsb.toString());
throw new RuntimeException( "Failed to add meta value\n" , e );
}
catch ( Exception e )
{
throw new RuntimeException( "Oops. Something else went wrong in setMetaStructure()", e );
}
finally
{
holder.close();
}
}
public static void setHeaderStructure( Grid grid, ResultSet resultSet, List<Integer> headerIds,
boolean isZeroAdded, StatementManager statementManager )
{
try
{
Integer headerId = null;
String headerName = null;
while ( resultSet.next() )
{
headerId = resultSet.getInt( 4 );
headerName = resultSet.getString( 5 );
GridHeader header = new GridHeader( headerName, headerId + "", String.class.getName(), false, false );
if ( !headerIds.contains( headerId ) )
{
headerIds.add( headerId );
grid.addHeader( header );
for ( List<Object> row : grid.getRows() )
{
row.add( isZeroAdded ? "0" : "" );
}
}
}
}
catch ( SQLException e )
{
throw new RuntimeException( "Failed to add header\n", e );
}
catch ( Exception e )
{
throw new RuntimeException( "Oops. Something else went wrong in setHeaderStructure()", e );
}
}
public static int fillUpDataBasic( Grid grid, StringBuffer sqlsb, boolean isZeroAdded,
StatementManager statementManager )
{
final StatementHolder holder = statementManager.getHolder();
int countRows = 0;
try
{
ResultSet resultSet = getScrollableResult( sqlsb.toString(), holder );
while ( resultSet.next() )
{
MetaValue metaValue = new MetaValue( resultSet.getInt( 1 ), resultSet.getString( 2 ) );
grid.addRow().addValue( metaValue ).addValue( checkValue( resultSet.getString( 3 ), isZeroAdded ) );
}
}
catch ( SQLException e )
{
log.error( "Error executing" + sqlsb.toString());
throw new RuntimeException( "Failed to get aggregated data value\n" , e );
}
catch ( Exception e )
{
throw new RuntimeException( "Oops. Something else went wrong", e );
}
finally
{
holder.close();
}
return countRows;
}
public static int fillUpDataAdvance( Grid grid, StringBuffer sqlsb, List<Integer> metaIds, boolean isZeroAdded,
StatementManager statementManager )
{
final StatementHolder holder = statementManager.getHolder();
int countRows = 0;
int rowIndex = -1;
int columnIndex = -1;
int oldWidth = grid.getWidth();
try
{
ResultSet rs = getScrollableResult( sqlsb.toString(), holder );
List<Integer> headerIds = new ArrayList<Integer>();
setHeaderStructure( grid, rs, headerIds, isZeroAdded, statementManager );
if ( rs.first() != true )
{
return countRows;
}
rs.beforeFirst();
while ( rs.next() )
{
rowIndex = metaIds.indexOf( rs.getInt( 1 ) );
columnIndex = headerIds.indexOf( rs.getInt( 4 ) ) + oldWidth;
grid.getRow( rowIndex ).set( columnIndex, checkValue( rs.getString( 3 ), isZeroAdded ) );
countRows++;
}
}
catch ( SQLException e )
{
log.error( "Error executing" + sqlsb.toString());
throw new RuntimeException( "Failed to get aggregated data value\n", e );
}
catch ( Exception e )
{
throw new RuntimeException( "Oops. Somthing else went wrong", e );
}
finally
{
holder.close();
}
return countRows;
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
/**
* Uses StatementManager to obtain a scroll-able, read-only ResultSet based
* on the query string.
*
* @param sql the query
* @param holder the StatementHolder object
* @return null or the ResultSet
*/
private static ResultSet getScrollableResult( String sql, StatementHolder holder )
throws SQLException
{
Connection con = holder.getConnection();
Statement stm = con.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY );
stm.execute( sql );
log.debug( sql );
return stm.getResultSet();
}
private static String checkValue( String value, boolean isZeroAdded )
{ if ( value == null )
{
return "null";
}
return (value.equals( "0" ) && !isZeroAdded) ? "" : value;
}
}
|
#!/usr/bin/env bats
load test_helpers
setup() {
setup_asdf_dir
install_dummy_plugin
PROJECT_DIR=$HOME/project
mkdir -p $PROJECT_DIR
cd $PROJECT_DIR
# asdf lib needed to run generated shims
cp -rf $BATS_TEST_DIRNAME/../{bin,lib} $ASDF_DIR/
}
teardown() {
clean_asdf_dir
}
@test "asdf exec without argument should display help" {
run asdf exec
[ "$status" -eq 1 ]
echo "$output" | grep "usage: asdf exec <command>"
}
@test "asdf exec should pass all arguments to executable" {
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run asdf install
run asdf exec dummy world hello
[ "$output" == "This is Dummy 1.0! hello world" ]
[ "$status" -eq 0 ]
}
@test "asdf exec should pass all arguments to executable even if shim is not in PATH" {
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run asdf install
path=$(echo "$PATH" | sed -e "s|$(asdf_data_dir)/shims||g; s|::|:|g")
run env PATH=$path which dummy
[ "$output" == "" ]
[ "$status" -eq 1 ]
run env PATH=$path asdf exec dummy world hello
[ "$output" == "This is Dummy 1.0! hello world" ]
[ "$status" -eq 0 ]
}
@test "shim exec should pass all arguments to executable" {
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run asdf install
run $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Dummy 1.0! hello world" ]
[ "$status" -eq 0 ]
}
@test "shim exec should pass stdin to executable" {
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run asdf install
echo "tr [:lower:] [:upper:]" > $ASDF_DIR/installs/dummy/1.0/bin/upper
chmod +x $ASDF_DIR/installs/dummy/1.0/bin/upper
run asdf reshim dummy 1.0
run echo $(echo hello | $ASDF_DIR/shims/upper)
[ "$output" == "HELLO" ]
[ "$status" -eq 0 ]
}
@test "shim exec should fail if no version is selected" {
run asdf install dummy 1.0
touch $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 126 ]
echo "$output" | grep -q "No version set for command dummy" 2>/dev/null
}
@test "shim exec should suggest which plugin to use when no version is selected" {
run asdf install dummy 1.0
run asdf install dummy 2.0.0
touch $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 126 ]
echo "$output" | grep -q "No version set for command dummy" 2>/dev/null
echo "$output" | grep -q "Consider adding one of the following versions in your config file at $PROJECT_DIR/.tool-versions" 2>/dev/null
echo "$output" | grep -q "dummy 1.0" 2>/dev/null
echo "$output" | grep -q "dummy 2.0.0" 2>/dev/null
}
@test "shim exec should suggest different plugins providing same tool when no version is selected" {
# Another fake plugin with 'dummy' executable
cp -rf $ASDF_DIR/plugins/dummy $ASDF_DIR/plugins/mummy
run asdf install dummy 1.0
run asdf install mummy 3.0
touch $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 126 ]
echo "$output" | grep -q "No version set for command dummy" 2>/dev/null
echo "$output" | grep -q "Consider adding one of the following versions in your config file at $PROJECT_DIR/.tool-versions" 2>/dev/null
echo "$output" | grep -q "dummy 1.0" 2>/dev/null
echo "$output" | grep -q "mummy 3.0" 2>/dev/null
}
@test "shim exec should suggest to install missing version" {
run asdf install dummy 1.0
echo "dummy 2.0.0 1.3" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 126 ]
echo "$output" | grep -q "No preset version installed for command dummy" 2>/dev/null
echo "$output" | grep -q "Please install a version by running one of the following:" 2>/dev/null
echo "$output" | grep -q "asdf install dummy 2.0.0" 2>/dev/null
echo "$output" | grep -q "asdf install dummy 1.3" 2>/dev/null
echo "$output" | grep -q "or add one of the following versions in your config file at $PROJECT_DIR/.tool-versions" 2>/dev/null
echo "$output" | grep -q "dummy 1.0" 2>/dev/null
}
@test "shim exec should execute first plugin that is installed and set" {
run asdf install dummy 2.0.0
run asdf install dummy 3.0
echo "dummy 1.0 3.0 2.0.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 0 ]
echo "$output" | grep -q "This is Dummy 3.0! hello world" 2>/dev/null
}
@test "shim exec should only use the first version found for a plugin" {
run asdf install dummy 3.0
echo "dummy 3.0" > $PROJECT_DIR/.tool-versions
echo "dummy 1.0" >> $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$status" -eq 0 ]
echo "$output" | grep -q "This is Dummy 3.0! hello world" 2>/dev/null
}
@test "shim exec should determine correct executable on two projects using different plugins that provide the same tool" {
# Another fake plugin with 'dummy' executable
cp -rf $ASDF_DIR/plugins/dummy $ASDF_DIR/plugins/mummy
sed -i -e 's/Dummy/Mummy/' $ASDF_DIR/plugins/mummy/bin/install
run asdf install mummy 3.0
run asdf install dummy 1.0
mkdir $PROJECT_DIR/{A,B}
echo "dummy 1.0" > $PROJECT_DIR/A/.tool-versions
echo "mummy 3.0" > $PROJECT_DIR/B/.tool-versions
cd $PROJECT_DIR/A
run $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Dummy 1.0! hello world" ]
[ "$status" -eq 0 ]
cd $PROJECT_DIR/B
run $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Mummy 3.0! hello world" ]
[ "$status" -eq 0 ]
}
@test "shim exec should determine correct executable on a project with two plugins set that provide the same tool" {
# Another fake plugin with 'dummy' executable
cp -rf $ASDF_DIR/plugins/dummy $ASDF_DIR/plugins/mummy
sed -i -e 's/Dummy/Mummy/' $ASDF_DIR/plugins/mummy/bin/install
run asdf install dummy 1.0
run asdf install mummy 3.0
echo "dummy 2.0.0" > $PROJECT_DIR/.tool-versions
echo "mummy 3.0" >> $PROJECT_DIR/.tool-versions
echo "dummy 1.0" >> $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Mummy 3.0! hello world" ]
[ "$status" -eq 0 ]
}
@test "shim exec should fallback to system executable when specified version is system" {
run asdf install dummy 1.0
echo "dummy system" > $PROJECT_DIR/.tool-versions
mkdir $PROJECT_DIR/foo/
echo "echo System" > $PROJECT_DIR/foo/dummy
chmod +x $PROJECT_DIR/foo/dummy
run env PATH=$PATH:$PROJECT_DIR/foo $ASDF_DIR/shims/dummy hello
[ "$output" == "System" ]
}
@test "shim exec should use path executable when specified version path:<path>" {
run asdf install dummy 1.0
CUSTOM_DUMMY_PATH=$PROJECT_DIR/foo
CUSTOM_DUMMY_BIN_PATH=$CUSTOM_DUMMY_PATH/bin
mkdir -p $CUSTOM_DUMMY_BIN_PATH
echo "echo System" > $CUSTOM_DUMMY_BIN_PATH/dummy
chmod +x $CUSTOM_DUMMY_BIN_PATH/dummy
echo "dummy path:$CUSTOM_DUMMY_PATH" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy hello
[ "$output" == "System" ]
}
@test "shim exec should execute system if set first" {
run asdf install dummy 2.0.0
echo "dummy system" > $PROJECT_DIR/.tool-versions
echo "dummy 2.0.0" >> $PROJECT_DIR/.tool-versions
mkdir $PROJECT_DIR/foo/
echo "echo System" > $PROJECT_DIR/foo/dummy
chmod +x $PROJECT_DIR/foo/dummy
run env PATH=$PATH:$PROJECT_DIR/foo $ASDF_DIR/shims/dummy hello
[ "$output" == "System" ]
}
@test "shim exec should use custom exec-env for tool" {
run asdf install dummy 2.0.0
echo "export FOO=sourced" > $ASDF_DIR/plugins/dummy/bin/exec-env
mkdir $ASDF_DIR/plugins/dummy/shims
echo 'echo $FOO custom' > $ASDF_DIR/plugins/dummy/shims/foo
chmod +x $ASDF_DIR/plugins/dummy/shims/foo
run asdf reshim dummy 2.0.0
echo "dummy 2.0.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/foo
[ "$output" == "sourced custom" ]
}
@test "shim exec with custom exec-env using ASDF_INSTALL_PATH" {
run asdf install dummy 2.0.0
echo 'export FOO=$ASDF_INSTALL_PATH/foo' > $ASDF_DIR/plugins/dummy/bin/exec-env
mkdir $ASDF_DIR/plugins/dummy/shims
echo 'echo $FOO custom' > $ASDF_DIR/plugins/dummy/shims/foo
chmod +x $ASDF_DIR/plugins/dummy/shims/foo
run asdf reshim dummy 2.0.0
echo "dummy 2.0.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/foo
[ "$output" == "$ASDF_DIR/installs/dummy/2.0.0/foo custom" ]
}
@test "shim exec doest not use custom exec-env for system version" {
run asdf install dummy 2.0.0
echo "export FOO=sourced" > $ASDF_DIR/plugins/dummy/bin/exec-env
mkdir $ASDF_DIR/plugins/dummy/shims
echo 'echo $FOO custom' > $ASDF_DIR/plugins/dummy/shims/foo
chmod +x $ASDF_DIR/plugins/dummy/shims/foo
run asdf reshim dummy 2.0.0
echo "dummy system" > $PROJECT_DIR/.tool-versions
mkdir $PROJECT_DIR/sys/
echo 'echo x$FOO System' > $PROJECT_DIR/sys/foo
chmod +x $PROJECT_DIR/sys/foo
run env PATH=$PATH:$PROJECT_DIR/sys $ASDF_DIR/shims/foo
[ "$output" == "x System" ]
}
@test "shim exec should prepend the plugin paths on execution" {
run asdf install dummy 2.0.0
mkdir $ASDF_DIR/plugins/dummy/shims
echo 'which dummy' > $ASDF_DIR/plugins/dummy/shims/foo
chmod +x $ASDF_DIR/plugins/dummy/shims/foo
run asdf reshim dummy 2.0.0
echo "dummy 2.0.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/foo
[ "$output" == "$ASDF_DIR/installs/dummy/2.0.0/bin/dummy" ]
}
@test "shim exec should be able to find other shims in path" {
cp -rf $ASDF_DIR/plugins/dummy $ASDF_DIR/plugins/gummy
echo "dummy 2.0.0" > $PROJECT_DIR/.tool-versions
echo "gummy 2.0.0" >> $PROJECT_DIR/.tool-versions
run asdf install
mkdir $ASDF_DIR/plugins/{dummy,gummy}/shims
echo 'which dummy' > $ASDF_DIR/plugins/dummy/shims/foo
chmod +x $ASDF_DIR/plugins/dummy/shims/foo
echo 'which gummy' > $ASDF_DIR/plugins/dummy/shims/bar
chmod +x $ASDF_DIR/plugins/dummy/shims/bar
touch $ASDF_DIR/plugins/gummy/shims/gummy
chmod +x $ASDF_DIR/plugins/gummy/shims/gummy
run asdf reshim
run $ASDF_DIR/shims/foo
[ "$output" == "$ASDF_DIR/installs/dummy/2.0.0/bin/dummy" ]
run $ASDF_DIR/shims/bar
[ "$output" == "$ASDF_DIR/shims/gummy" ]
}
@test "shim exec should remove shim_path from path on system version execution" {
run asdf install dummy 2.0.0
echo "dummy system" > $PROJECT_DIR/.tool-versions
mkdir $PROJECT_DIR/sys/
echo 'which dummy' > $PROJECT_DIR/sys/dummy
chmod +x $PROJECT_DIR/sys/dummy
run env PATH=$PATH:$PROJECT_DIR/sys $ASDF_DIR/shims/dummy
echo $status $output
[ "$output" == "$ASDF_DIR/shims/dummy" ]
}
@test "shim exec can take version from legacy file if configured" {
run asdf install dummy 2.0.0
echo "legacy_version_file = yes" > $HOME/.asdfrc
echo "2.0.0" > $PROJECT_DIR/.dummy-version
run $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Dummy 2.0.0! hello world" ]
}
@test "shim exec can take version from environment variable" {
run asdf install dummy 2.0.0
run env ASDF_DUMMY_VERSION=2.0.0 $ASDF_DIR/shims/dummy world hello
[ "$output" == "This is Dummy 2.0.0! hello world" ]
}
@test "shim exec uses plugin list-bin-paths" {
exec_path="$ASDF_DIR/plugins/dummy/bin/list-bin-paths"
custom_path="$ASDF_DIR/installs/dummy/1.0/custom"
echo "echo bin custom" > $exec_path
chmod +x $exec_path
run asdf install dummy 1.0
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
mkdir $custom_path
echo "echo CUSTOM" > $custom_path/foo
chmod +x $custom_path/foo
run asdf reshim dummy 1.0
run $ASDF_DIR/shims/foo
[ "$output" == "CUSTOM" ]
}
@test "shim exec uses plugin custom exec-path hook" {
run asdf install dummy 1.0
exec_path="$ASDF_DIR/plugins/dummy/bin/exec-path"
custom_dummy="$ASDF_DIR/installs/dummy/1.0/custom/dummy"
echo "echo custom/dummy" > $exec_path
chmod +x $exec_path
mkdir $(dirname $custom_dummy)
echo "echo CUSTOM" > $custom_dummy
chmod +x $custom_dummy
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy
[ "$output" == "CUSTOM" ]
}
@test "shim exec uses plugin custom exec-path hook that defaults" {
run asdf install dummy 1.0
exec_path="$ASDF_DIR/plugins/dummy/bin/exec-path"
echo 'echo $3 # always same path' > $exec_path
chmod +x $exec_path
echo "dummy 1.0" > $PROJECT_DIR/.tool-versions
run $ASDF_DIR/shims/dummy
[ "$output" == "This is Dummy 1.0!" ]
}
@test "shim exec executes configured pre-hook" {
run asdf install dummy 1.0
echo dummy 1.0 > $PROJECT_DIR/.tool-versions
cat > $HOME/.asdfrc <<-'EOM'
pre_dummy_dummy = echo PRE $version $1 $2
EOM
run $ASDF_DIR/shims/dummy hello world
[ "$status" -eq 0 ]
echo "$output" | grep "PRE 1.0 hello world"
echo "$output" | grep "This is Dummy 1.0! world hello"
}
@test "shim exec doesnt execute command if pre-hook failed" {
run asdf install dummy 1.0
echo dummy 1.0 > $PROJECT_DIR/.tool-versions
mkdir $HOME/hook
pre_cmd="$HOME/hook/pre"
echo 'echo $* && false' > "$pre_cmd"
chmod +x "$pre_cmd"
cat > $HOME/.asdfrc <<'EOM'
pre_dummy_dummy = pre $1 no $plugin_name $2
EOM
run env PATH=$PATH:$HOME/hook $ASDF_DIR/shims/dummy hello world
[ "$output" == "hello no dummy world" ]
[ "$status" -eq 1 ]
}
|
# frozen_string_literal: true
require_relative '../config/game/g_18_chesapeake'
require_relative 'base'
module Engine
module Game
class G18Chesapeake < Base
load_from_json(Config::Game::G18Chesapeake::JSON)
end
end
end
|
# -----------------------------------------------------------------------------
#
# Package : fliplog
# Version : 0.3.13
# Source repo : https://github.com/fliphub/fliplog
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=fliplog
PACKAGE_VERSION=0.3.13
PACKAGE_URL=https://github.com/fliphub/fliplog
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB/13-1024+0+512-N-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
<reponame>cristiklein/cli<filename>cmd/sks_nodepool_delete.go
package cmd
import (
"errors"
"fmt"
apiv2 "github.com/exoscale/egoscale/api/v2"
"github.com/spf13/cobra"
)
var sksNodepoolDeleteCmd = &cobra.Command{
Use: "delete <cluster name | ID> <Nodepool name | ID>",
Short: "Delete a SKS cluster Nodepool",
Aliases: gRemoveAlias,
PreRunE: func(cmd *cobra.Command, args []string) error {
if len(args) != 2 {
cmdExitOnUsageError(cmd, "invalid arguments")
}
cmdSetZoneFlagFromDefault(cmd)
return cmdCheckRequiredFlags(cmd, []string{"zone"})
},
RunE: func(cmd *cobra.Command, args []string) error {
var (
c = args[0]
np = args[1]
)
zone, err := cmd.Flags().GetString("zone")
if err != nil {
return err
}
force, err := cmd.Flags().GetBool("force")
if err != nil {
return err
}
if !force {
if !askQuestion(fmt.Sprintf("Do you really want to delete Nodepool %q?", args[1])) {
return nil
}
}
ctx := apiv2.WithEndpoint(gContext, apiv2.NewReqEndpoint(gCurrentAccount.Environment, zone))
cluster, err := lookupSKSCluster(ctx, zone, c)
if err != nil {
return err
}
for _, n := range cluster.Nodepools {
if n.ID == np || n.Name == np {
n := n
decorateAsyncOperation(fmt.Sprintf("Deleting Nodepool %q...", np), func() {
err = cluster.DeleteNodepool(ctx, n)
})
if err != nil {
return err
}
return nil
}
}
return errors.New("Nodepool not found") // nolint:golint
},
}
func init() {
sksNodepoolDeleteCmd.Flags().BoolP("force", "f", false, "Attempt to delete without prompting for confirmation")
sksNodepoolDeleteCmd.Flags().StringP("zone", "z", "", "SKS cluster zone")
sksNodepoolCmd.AddCommand(sksNodepoolDeleteCmd)
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common.config.keys.loader.openssh;
import java.security.KeyPair;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.util.Collection;
import java.util.List;
import org.apache.sshd.common.config.keys.BuiltinIdentities;
import org.apache.sshd.common.config.keys.KeyUtils;
import org.apache.sshd.common.config.keys.PrivateKeyEntryDecoder;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.util.test.JUnit4ClassRunnerWithParametersFactory;
import org.apache.sshd.util.test.NoIoTestCase;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
/**
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@RunWith(Parameterized.class) // see https://github.com/junit-team/junit/wiki/Parameterized-tests
@UseParametersRunnerFactory(JUnit4ClassRunnerWithParametersFactory.class)
@Category({ NoIoTestCase.class })
public class OpenSSHKeyPairResourceParserDecodingTest extends OpenSSHKeyPairResourceParserTestSupport {
public OpenSSHKeyPairResourceParserDecodingTest(BuiltinIdentities identity) {
super(identity);
}
@Parameters(name = "type={0}")
public static List<Object[]> parameters() {
return parameterize(BuiltinIdentities.VALUES);
}
@Test
public void testLoadUnencryptedKeyPairs() throws Exception {
testLoadKeyPairs(false, null);
}
@Test
public void testLoadEncryptedKeyPairs() throws Exception {
testLoadKeyPairs(true, DEFAULT_PASSWORD_PROVIDER);
}
@Override
protected void testLoadKeyPairs(
boolean encrypted, String resourceKey, Collection<KeyPair> pairs, PublicKey pubEntry)
throws Exception {
assertEquals("Mismatched pairs count", 1, GenericUtils.size(pairs));
Class<? extends PublicKey> pubType = identity.getPublicKeyType();
Class<? extends PrivateKey> prvType = identity.getPrivateKeyType();
Collection<String> supportedTypeNames = identity.getSupportedKeyTypes();
for (KeyPair kp : pairs) {
PublicKey pubKey = kp.getPublic();
assertObjectInstanceOf("Mismatched public key type", pubType, pubKey);
assertKeyEquals("Mismatched identity public key", pubEntry, pubKey);
PrivateKey prvKey = kp.getPrivate();
assertObjectInstanceOf("Mismatched private key type", prvType, prvKey);
String pubName = KeyUtils.getKeyType(pubKey);
String prvName = KeyUtils.getKeyType(prvKey);
assertEquals("Mismatched reported key type names", pubName, prvName);
if (!supportedTypeNames.contains(pubName)) {
fail("Unsupported key type name (" + pubName + "): " + supportedTypeNames);
}
validateKeyPairSignable(pubName, kp);
@SuppressWarnings("rawtypes")
PrivateKeyEntryDecoder decoder = OpenSSHKeyPairResourceParser.getPrivateKeyEntryDecoder(prvKey);
assertNotNull("No private key decoder", decoder);
if (decoder.isPublicKeyRecoverySupported()) {
@SuppressWarnings("unchecked")
PublicKey recKey = decoder.recoverPublicKey(prvKey);
assertKeyEquals("Mismatched recovered public key", pubKey, recKey);
}
}
}
}
|
#!/bin/bash
red='\033[0;31m'
green='\033[0;32m'
yellow='\033[0;33m'
plain='\033[0m'
version="v1.0.0"
# check root
[[ $EUID -ne 0 ]] && echo -e "${red}错误: ${plain} 必须使用root用户运行此脚本!\n" && exit 1
# check os
if [[ -f /etc/redhat-release ]]; then
release="centos"
elif cat /etc/issue | grep -Eqi "debian"; then
release="debian"
elif cat /etc/issue | grep -Eqi "ubuntu"; then
release="ubuntu"
elif cat /etc/issue | grep -Eqi "centos|red hat|redhat"; then
release="centos"
elif cat /proc/version | grep -Eqi "debian"; then
release="debian"
elif cat /proc/version | grep -Eqi "ubuntu"; then
release="ubuntu"
elif cat /proc/version | grep -Eqi "centos|red hat|redhat"; then
release="centos"
else
echo -e "${red}未检测到系统版本,请联系脚本作者!${plain}\n" && exit 1
fi
os_version=""
# os version
if [[ -f /etc/os-release ]]; then
os_version=$(awk -F'[= ."]' '/VERSION_ID/{print $3}' /etc/os-release)
fi
if [[ -z "$os_version" && -f /etc/lsb-release ]]; then
os_version=$(awk -F'[= ."]+' '/DISTRIB_RELEASE/{print $2}' /etc/lsb-release)
fi
if [[ x"${release}" == x"centos" ]]; then
if [[ ${os_version} -le 6 ]]; then
echo -e "${red}请使用 CentOS 7 或更高版本的系统!${plain}\n" && exit 1
fi
elif [[ x"${release}" == x"ubuntu" ]]; then
if [[ ${os_version} -lt 16 ]]; then
echo -e "${red}请使用 Ubuntu 16 或更高版本的系统!${plain}\n" && exit 1
fi
elif [[ x"${release}" == x"debian" ]]; then
if [[ ${os_version} -lt 8 ]]; then
echo -e "${red}请使用 Debian 8 或更高版本的系统!${plain}\n" && exit 1
fi
fi
confirm() {
if [[ $# > 1 ]]; then
echo && read -p "$1 [默认$2]: " temp
if [[ x"${temp}" == x"" ]]; then
temp=$2
fi
else
read -p "$1 [y/n]: " temp
fi
if [[ x"${temp}" == x"y" || x"${temp}" == x"Y" ]]; then
return 0
else
return 1
fi
}
confirm_restart() {
confirm "是否重启XrayR" "y"
if [[ $? == 0 ]]; then
restart
else
show_menu
fi
}
before_show_menu() {
echo && echo -n -e "${yellow}按回车返回主菜单: ${plain}" && read temp
show_menu
}
install() {
bash <(curl -Ls https://raw.githubusercontent.com/nrsxcloud/XrayR-V2Board/master/install.sh)
if [[ $? == 0 ]]; then
if [[ $# == 0 ]]; then
start
else
start 0
fi
fi
}
update() {
if [[ $# == 0 ]]; then
echo && echo -n -e "输入指定版本(默认最新版): " && read version
else
version=$2
fi
# confirm "本功能会强制重装当前最新版,数据不会丢失,是否继续?" "n"
# if [[ $? != 0 ]]; then
# echo -e "${red}已取消${plain}"
# if [[ $1 != 0 ]]; then
# before_show_menu
# fi
# return 0
# fi
bash <(curl -Ls https://raw.githubusercontent.com/nrsxcloud/XrayR-V2Board/master/install.sh) $version
if [[ $? == 0 ]]; then
echo -e "${green}更新完成,已自动重启 XrayR,请使用 XrayR log 查看运行日志${plain}"
exit
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
config() {
echo "XrayR在修改配置后会自动尝试重启"
vi /etc/XrayR/config.yml
sleep 2
check_status
case $? in
0)
echo -e "XrayR状态: ${green}已运行${plain}"
;;
1)
echo -e "检测到您未启动XrayR或XrayR自动重启失败,是否查看日志?[Y/n]" && echo
read -e -p "(默认: y):" yn
[[ -z ${yn} ]] && yn="y"
if [[ ${yn} == [Yy] ]]; then
show_log
fi
;;
2)
echo -e "XrayR状态: ${red}未安装${plain}"
esac
}
uninstall() {
confirm "确定要卸载 XrayR 吗?" "n"
if [[ $? != 0 ]]; then
if [[ $# == 0 ]]; then
show_menu
fi
return 0
fi
systemctl stop XrayR
systemctl disable XrayR
rm /etc/systemd/system/XrayR.service -f
systemctl daemon-reload
systemctl reset-failed
rm /etc/XrayR/ -rf
rm /usr/local/XrayR/ -rf
echo ""
echo -e "卸载成功,如果你想删除此脚本,则退出脚本后运行 ${green}rm /usr/bin/XrayR -f${plain} 进行删除"
echo ""
if [[ $# == 0 ]]; then
before_show_menu
fi
}
start() {
check_status
if [[ $? == 0 ]]; then
echo ""
echo -e "${green}XrayR已运行,无需再次启动,如需重启请选择重启${plain}"
else
systemctl start XrayR
sleep 2
check_status
if [[ $? == 0 ]]; then
echo -e "${green}XrayR 启动成功,请使用 XrayR log 查看运行日志${plain}"
else
echo -e "${red}XrayR可能启动失败,请稍后使用 XrayR log 查看日志信息${plain}"
fi
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
stop() {
systemctl stop XrayR
sleep 2
check_status
if [[ $? == 1 ]]; then
echo -e "${green}XrayR 停止成功${plain}"
else
echo -e "${red}XrayR停止失败,可能是因为停止时间超过了两秒,请稍后查看日志信息${plain}"
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
restart() {
systemctl restart XrayR
sleep 2
check_status
if [[ $? == 0 ]]; then
echo -e "${green}XrayR 重启成功,请使用 XrayR log 查看运行日志${plain}"
else
echo -e "${red}XrayR可能启动失败,请稍后使用 XrayR log 查看日志信息${plain}"
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
status() {
systemctl status XrayR --no-pager -l
if [[ $# == 0 ]]; then
before_show_menu
fi
}
enable() {
systemctl enable XrayR
if [[ $? == 0 ]]; then
echo -e "${green}XrayR 设置开机自启成功${plain}"
else
echo -e "${red}XrayR 设置开机自启失败${plain}"
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
disable() {
systemctl disable XrayR
if [[ $? == 0 ]]; then
echo -e "${green}XrayR 取消开机自启成功${plain}"
else
echo -e "${red}XrayR 取消开机自启失败${plain}"
fi
if [[ $# == 0 ]]; then
before_show_menu
fi
}
show_log() {
journalctl -u XrayR.service -e --no-pager -f
if [[ $# == 0 ]]; then
before_show_menu
fi
}
install_bbr() {
bash <(curl -L -s https://raw.githubusercontent.com/chiakge/Linux-NetSpeed/master/tcp.sh)
#if [[ $? == 0 ]]; then
# echo ""
# echo -e "${green}安装 bbr 成功,请重启服务器${plain}"
#else
# echo ""
# echo -e "${red}下载 bbr 安装脚本失败,请检查本机能否连接 Github${plain}"
#fi
#before_show_menu
}
update_shell() {
wget -O /usr/bin/XrayR -N --no-check-certificate https://raw.githubusercontent.com/nrsxcloud/XrayR-V2Board/master/XrayR.sh
if [[ $? != 0 ]]; then
echo ""
echo -e "${red}下载脚本失败,请检查本机能否连接 Github${plain}"
before_show_menu
else
chmod +x /usr/bin/XrayR
echo -e "${green}升级脚本成功,请重新运行脚本${plain}" && exit 0
fi
}
# 0: running, 1: not running, 2: not installed
check_status() {
if [[ ! -f /etc/systemd/system/XrayR.service ]]; then
return 2
fi
temp=$(systemctl status XrayR | grep Active | awk '{print $3}' | cut -d "(" -f2 | cut -d ")" -f1)
if [[ x"${temp}" == x"running" ]]; then
return 0
else
return 1
fi
}
check_enabled() {
temp=$(systemctl is-enabled XrayR)
if [[ x"${temp}" == x"enabled" ]]; then
return 0
else
return 1;
fi
}
check_uninstall() {
check_status
if [[ $? != 2 ]]; then
echo ""
echo -e "${red}XrayR已安装,请不要重复安装${plain}"
if [[ $# == 0 ]]; then
before_show_menu
fi
return 1
else
return 0
fi
}
check_install() {
check_status
if [[ $? == 2 ]]; then
echo ""
echo -e "${red}请先安装XrayR${plain}"
if [[ $# == 0 ]]; then
before_show_menu
fi
return 1
else
return 0
fi
}
show_status() {
check_status
case $? in
0)
echo -e "XrayR状态: ${green}已运行${plain}"
show_enable_status
;;
1)
echo -e "XrayR状态: ${yellow}未运行${plain}"
show_enable_status
;;
2)
echo -e "XrayR状态: ${red}未安装${plain}"
esac
}
show_enable_status() {
check_enabled
if [[ $? == 0 ]]; then
echo -e "是否开机自启: ${green}是${plain}"
else
echo -e "是否开机自启: ${red}否${plain}"
fi
}
show_XrayR_version() {
echo -n "XrayR 版本:"
/usr/local/XrayR/XrayR -version
echo ""
if [[ $# == 0 ]]; then
before_show_menu
fi
}
show_usage() {
echo "XrayR 管理脚本使用方法: "
echo "------------------------------------------"
echo "XrayR - 显示管理菜单 (功能更多)"
echo "XrayR start - 启动 XrayR"
echo "XrayR stop - 停止 XrayR"
echo "XrayR restart - 重启 XrayR"
echo "XrayR status - 查看 XrayR 状态"
echo "XrayR enable - 设置 XrayR 开机自启"
echo "XrayR disable - 取消 XrayR 开机自启"
echo "XrayR log - 查看 XrayR 日志"
echo "XrayR update - 更新 XrayR"
echo "XrayR update x.x.x - 更新 XrayR 指定版本"
echo "XrayR install - 安装 XrayR"
echo "XrayR uninstall - 卸载 XrayR"
echo "XrayR version - 查看 XrayR 版本"
echo "------------------------------------------"
}
show_menu() {
echo -e "
${green}XrayR 后端管理脚本,${plain}${red}不适用于docker${plain}
--- https://github.com/nrsxcloud/XrayR-V2Board ---
${green}0.${plain} 修改配置
————————————————
${green}1.${plain} 安装 XrayR
${green}2.${plain} 更新 XrayR
${green}3.${plain} 卸载 XrayR
————————————————
${green}4.${plain} 启动 XrayR
${green}5.${plain} 停止 XrayR
${green}6.${plain} 重启 XrayR
${green}7.${plain} 查看 XrayR 状态
${green}8.${plain} 查看 XrayR 日志
————————————————
${green}9.${plain} 设置 XrayR 开机自启
${green}10.${plain} 取消 XrayR 开机自启
————————————————
${green}11.${plain} 一键安装 bbr (最新内核)
${green}12.${plain} 查看 XrayR 版本
${green}13.${plain} 升级维护脚本
"
#后续更新可加入上方字符串中
show_status
echo && read -p "请输入选择 [0-13]: " num
case "${num}" in
0) config
;;
1) check_uninstall && install
;;
2) check_install && update
;;
3) check_install && uninstall
;;
4) check_install && start
;;
5) check_install && stop
;;
6) check_install && restart
;;
7) check_install && status
;;
8) check_install && show_log
;;
9) check_install && enable
;;
10) check_install && disable
;;
11) install_bbr
;;
12) check_install && show_XrayR_version
;;
13) update_shell
;;
*) echo -e "${red}请输入正确的数字 [0-12]${plain}"
;;
esac
}
if [[ $# > 0 ]]; then
case $1 in
"start") check_install 0 && start 0
;;
"stop") check_install 0 && stop 0
;;
"restart") check_install 0 && restart 0
;;
"status") check_install 0 && status 0
;;
"enable") check_install 0 && enable 0
;;
"disable") check_install 0 && disable 0
;;
"log") check_install 0 && show_log 0
;;
"update") check_install 0 && update 0 $2
;;
"config") config $*
;;
"install") check_uninstall 0 && install 0
;;
"uninstall") check_install 0 && uninstall 0
;;
"version") check_install 0 && show_XrayR_version 0
;;
"update_shell") update_shell
;;
*) show_usage
esac
else
show_menu
fi
|
import React from 'react'
const TeamStyle3 = ({title, designation, memberImg, fbLink, twitterLink, linkedInLink}) => {
return (
<div className="team-member-3">
<div className="team-member-img">
<img className="img-fluid" src={memberImg} alt="theshahriyar"/>
</div>
<div className="team-info">
<span className="team-name">{title}</span>
<span className="team-designation">{designation}</span>
</div>
<div className="social-icon">
<ul className="icon">
<li>
<a href={fbLink}><i className="icon-facebook"></i></a>
</li>
<li>
<a href={twitterLink}><i className="icon-twitter"></i></a>
</li>
<li>
<a href={linkedInLink}><i className="icon-linkedin"></i></a>
</li>
</ul>
</div>
</div>
)
}
export default TeamStyle3
|
var Q = require('q');
var rp = require('request-promise');
var _ = require('lodash');
var events = require('events');
var WebCacheMem = function () {
this.client = {};
};
WebCacheMem.prototype.get = function (key) {
var val = this.client[key];
return Q.fcall(function () { return val; });
};
WebCacheMem.prototype.set = function (key, val) {
this.client[key] = val;
return Q.fcall(function () { return 'OK'; });
};
var WebCache = function (options) {
this.urls = [];
this.options = options || {};
this.cache = this.options.store || new WebCacheMem();
this.isReady = false;
events.EventEmitter.call(this);
if(_.isNumber(this.options.refreshInterval)) {
var self = this;
setInterval(function () {
if(self.urls.length > 0) self.refresh();
}, self.options.refreshInterval);
}
};
WebCache.prototype.__proto__ = events.EventEmitter.prototype;
WebCache.prototype.seed = function (seedUrls) {
this.urls = seedUrls;
this.refresh();
};
WebCache.prototype.refresh = function () {
var self = this;
var start = +new Date;
var requests = this.urls.map(function (url) {
var options = {
uri: url,
headers: {
'cache-control': 'no-cache'
},
transform: function (body, response) {
console.log(url);
return {
url: url,
data: body
}
},
timeout: 300000
};
console.log('cache getting: %s', url);
return rp(options).catch(function (e) { console.log(e); });
});
console.log('cache started and seeding %s items.', requests.length);
return Q.allSettled(requests).then(function (results) {
results = _.groupBy(results, 'state');
results.rejected = results.rejected || [];
var sets = results.fulfilled.map(function (r) { return self.cache.set(r.value.url, r.value.data); })
return Q.allSettled(sets).then(function () {
var end = +new Date;
var duration = (end-start);
if(!self.isready) {
console.log('cache seeded with %s successes and %s timeouts in %s milliseconds', results.fulfilled.length, results.rejected.length, duration);
self.isready = true;
self.emit('seeded');
} else {
console.log('cache refreshed with %s successes and %s timeouts in %s milliseconds', results.fulfilled.length, results.rejected.length, duration);
self.emit('refreshed');
}
});
}, function (e) { console.log('error:' + e); });
};
WebCache.prototype.get = function (url) {
return this.cache.get(url);
};
WebCache.prototype.cacheUrl = function (url, data) {
if(this.urls.indexOf(url) === -1) this.urls.push(url);
if(data) this.cache.set(url, data);
};
module.exports = WebCache; |
<filename>src/main/java/com/common/system/CommonServletInitializer.java
package com.common.system;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.support.SpringBootServletInitializer;
/**
* Created by Mr.Yangxiufeng on 2017/6/15.
* Time:15:44
* ProjectName:Common-admin
*/
public class CommonServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder builder) {
return builder.sources(CommonAdminApplication.class);
}
}
|
#!/bin/bash
# makde sure the below path are correctly configured before you run this script
data_path=<YOUR-PATH>/mulmon_datasets/GQN
repo_path=.
log_path=${repo_path}/logs
data_type=gqn_jaco
seed=2020 # random seed
allow_obs=5 # (important) how many observations are available
epoch=3000
which_gpu=0
python demo.py --arch jaco_MulMON --datatype ${data_type} --work_mode testing --gpu ${which_gpu} \
--input_dir ${data_path} --output_dir ${log_path} --output_name ev${epoch}_obs${allow_obs} \
--resume_epoch ${epoch} --batch_size 1 --test_batch 5 --vis_batch 5 --analyse_batch 0 --seed ${seed} \
--num_slots 7 --pixel_sigma 0.1 --latent_dim 32 --view_dim 7 --min_sample_views 1 --max_sample_views 6 --num_vq_show ${allow_obs} --num_mc_samples 20 \
--query_nll 1.0 --exp_nll 1.0 --exp_attention 1.0 --kl_latent 1.0 --kl_spatial 1.0 \
--use_bg --traverse_z --traverse_v
|
<reponame>blrhc/apps<filename>packages/ui-params/src/Param/Tuple.tsx
// Copyright 2017-2019 @polkadot/ui-app authors & contributors
// This software may be modified and distributed under the terms
// of the Apache-2.0 license. See the LICENSE file for details.
import { TypeDef } from '@polkadot/types';
import { Props, RawParam } from '../types';
import React from 'react';
import { isUndefined } from '@polkadot/util';
import Bare from './Bare';
import findComponent from './findComponent';
type State = {
Components: Array<React.ComponentType<Props>>,
sub: Array<string>,
subTypes: Array<TypeDef>,
type?: string,
values: Array<RawParam>
};
export default class Tuple extends React.PureComponent<Props, State> {
state: State = {
Components: [],
sub: [],
subTypes: [],
values: []
};
static getDerivedStateFromProps ({ defaultValue: { value }, type: { sub, type } }: Props, prevState: State): Partial<State> | null {
if (type === prevState.type) {
return null;
}
const subTypes = sub && Array.isArray(sub)
? sub
: [];
const values = (value as Array<any>).map((value) =>
isUndefined(value) || isUndefined(value.isValid)
? {
isValid: !isUndefined(value),
value
}
: value
);
return {
Components: subTypes.map((type) => findComponent(type)),
sub: subTypes.map(({ type }) => type),
subTypes,
type,
values
};
}
render () {
const { className, isDisabled, style, withLabel } = this.props;
const { Components, sub, subTypes, values } = this.state;
return (
<Bare
className={className}
style={style}
>
{Components.map((Component, index) => (
<Component
defaultValue={values[index] || {}}
isDisabled={isDisabled}
key={index}
label={sub[index]}
onChange={this.onChange(index)}
type={subTypes[index]}
withLabel={withLabel}
/>
))}
</Bare>
);
}
private onChange = (index: number) => {
return (value: RawParam): void => {
this.setState(
({ values }: State) => ({
values: values.map((svalue, sindex) =>
(sindex === index)
? value
: svalue
)}),
() => {
const { values } = this.state;
const { onChange } = this.props;
onChange && onChange({
isValid: values.reduce((result: boolean, { isValid }) => result && isValid, true),
value: values.map(({ value }) => value)
});
}
);
};
}
}
|
import { getNodeIdxClassName } from './block';
export const getEditorRoot = () =>
document.getElementById('VisualEditorEditMode');
export const getShadowRoot = () => getEditorRoot()?.shadowRoot!;
export const getBlockNodes = (inShadowDom: boolean = true) =>
Array.from(
(inShadowDom ? getShadowRoot() : document).querySelectorAll('.email-block')
);
export const findBlockNodeByIdx = (
idx: string,
inShadowDom = true
): HTMLElement | null => {
if (!idx) return null;
const idxClassName = getNodeIdxClassName(idx);
const node = getBlockNodes(inShadowDom).find((item) =>
item.classList?.contains(idxClassName)
) as HTMLElement;
return node;
};
|
<reponame>michelleN/cert-manager
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package acme
import (
"context"
"crypto/rand"
"crypto/x509"
"crypto/x509/pkix"
"encoding/base64"
"encoding/json"
"encoding/pem"
"fmt"
"io/ioutil"
"math/big"
"net/http"
"net/http/httptest"
"reflect"
"strings"
"testing"
"time"
)
// Decodes a JWS-encoded request and unmarshals the decoded JSON into a provided
// interface.
func decodeJWSRequest(t *testing.T, v interface{}, r *http.Request) {
// Decode request
var req struct{ Payload string }
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
t.Fatal(err)
}
payload, err := base64.RawURLEncoding.DecodeString(req.Payload)
if err != nil {
t.Fatal(err)
}
err = json.Unmarshal(payload, v)
if err != nil {
t.Fatal(err)
}
}
type jwsHead struct {
Alg string
Nonce string
JWK map[string]string `json:"jwk"`
}
func decodeJWSHead(r *http.Request) (*jwsHead, error) {
var req struct{ Protected string }
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
return nil, err
}
b, err := base64.RawURLEncoding.DecodeString(req.Protected)
if err != nil {
return nil, err
}
var head jwsHead
if err := json.Unmarshal(b, &head); err != nil {
return nil, err
}
return &head, nil
}
func TestDiscover(t *testing.T) {
const (
keyChange = "https://example.com/acme/key-change"
newAccount = "https://example.com/acme/new-account"
newNonce = "https://example.com/acme/new-nonce"
newOrder = "https://example.com/acme/new-order"
revokeCert = "https://example.com/acme/revoke-cert"
terms = "https://example.com/acme/terms"
)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
fmt.Fprintf(w, `{
"keyChange": %q,
"newAccount": %q,
"newNonce": %q,
"newOrder": %q,
"revokeCert": %q,
"meta": {
"termsOfService": %q
}
}`, keyChange, newAccount, newNonce, newOrder, revokeCert, terms)
}))
defer ts.Close()
c := Client{DirectoryURL: ts.URL}
dir, err := c.Discover(context.Background())
if err != nil {
t.Fatal(err)
}
if dir.KeyChangeURL != keyChange {
t.Errorf("dir.KeyChangeURL = %q; want %q", dir.KeyChangeURL, keyChange)
}
if dir.NewAccountURL != newAccount {
t.Errorf("dir.NewAccountURL = %q; want %q", dir.NewAccountURL, newAccount)
}
if dir.NewNonceURL != newNonce {
t.Errorf("dir.NewNonceURL = %q; want %q", dir.NewNonceURL, newNonce)
}
if dir.RevokeCertURL != revokeCert {
t.Errorf("dir.RevokeCertURL = %q; want %q", dir.RevokeCertURL, revokeCert)
}
if dir.Terms != terms {
t.Errorf("dir.Terms = %q; want %q", dir.Terms, terms)
}
}
func TestCreateAccount(t *testing.T) {
contacts := []string{"mailto:<EMAIL>"}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var j struct {
Contact []string
TermsOfServiceAgreed bool
}
decodeJWSRequest(t, &j, r)
if !reflect.DeepEqual(j.Contact, contacts) {
t.Errorf("j.Contact = %v; want %v", j.Contact, contacts)
}
if !j.TermsOfServiceAgreed {
t.Error("j.TermsOfServiceAgreed = false; want true")
}
w.Header().Set("Location", "https://example.com/acme/account/1")
w.WriteHeader(http.StatusCreated)
b, _ := json.Marshal(contacts)
fmt.Fprintf(w, `{"status":"valid","orders":"https://example.com/acme/orders","contact":%s}`, b)
}))
defer ts.Close()
c := Client{Key: testKeyEC, dir: &Directory{NewAccountURL: ts.URL, NewNonceURL: ts.URL}}
a := &Account{Contact: contacts, TermsAgreed: true}
var err error
if a, err = c.CreateAccount(context.Background(), a); err != nil {
t.Fatal(err)
}
if a.URL != "https://example.com/acme/account/1" {
t.Errorf("a.URL = %q; want https://example.com/acme/account/1", a.URL)
}
if a.OrdersURL != "https://example.com/acme/orders" {
t.Errorf("a.OrdersURL = %q; want https://example.com/acme/orders", a.OrdersURL)
}
if a.Status != StatusValid {
t.Errorf("a.Status = %q; want valid", a.Status)
}
if !reflect.DeepEqual(a.Contact, contacts) {
t.Errorf("a.Contact = %v; want %v", a.Contact, contacts)
}
}
func TestUpdateAccount(t *testing.T) {
contacts := []string{"mailto:<EMAIL>"}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var j struct {
Contact []string
}
decodeJWSRequest(t, &j, r)
if !reflect.DeepEqual(j.Contact, contacts) {
t.Errorf("j.Contact = %v; want %v", j.Contact, contacts)
}
b, _ := json.Marshal(contacts)
fmt.Fprintf(w, `{"status":"valid","orders":"https://example.com/acme/orders","contact":%s}`, b)
}))
defer ts.Close()
c := Client{Key: testKeyEC, dir: &Directory{NewNonceURL: ts.URL}}
a := &Account{URL: ts.URL, Contact: contacts}
var err error
if a, err = c.UpdateAccount(context.Background(), a); err != nil {
t.Fatal(err)
}
if a.OrdersURL != "https://example.com/acme/orders" {
t.Errorf("a.OrdersURL = %q; want https://example.com/acme/orders", a.OrdersURL)
}
if a.Status != StatusValid {
t.Errorf("a.Status = %q; want valid", a.Status)
}
if !reflect.DeepEqual(a.Contact, contacts) {
t.Errorf("a.Contact = %v; want %v", a.Contact, contacts)
}
if a.URL != ts.URL {
t.Errorf("a.URL = %q; want %q", a.URL, ts.URL)
}
}
func TestGetAccount(t *testing.T) {
contacts := []string{"mailto:<EMAIL>"}
var ts *httptest.Server
ts = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var req struct {
Existing bool `json:"onlyReturnExisting"`
}
decodeJWSRequest(t, &req, r)
if req.Existing {
w.Header().Set("Location", ts.URL)
w.WriteHeader(http.StatusOK)
return
}
b, _ := json.Marshal(contacts)
fmt.Fprintf(w, `{"status":"valid","orders":"https://example.com/acme/orders","contact":%s}`, b)
}))
defer ts.Close()
c := Client{Key: testKeyEC, dir: &Directory{NewNonceURL: ts.URL, NewAccountURL: ts.URL}}
a, err := c.GetAccount(context.Background())
if err != nil {
t.Fatal(err)
}
if a.OrdersURL != "https://example.com/acme/orders" {
t.Errorf("a.OrdersURL = %q; want https://example.com/acme/orders", a.OrdersURL)
}
if a.Status != StatusValid {
t.Errorf("a.Status = %q; want valid", a.Status)
}
if !reflect.DeepEqual(a.Contact, contacts) {
t.Errorf("a.Contact = %v; want %v", a.Contact, contacts)
}
if a.URL != ts.URL {
t.Errorf("a.URL = %q; want %q", a.URL, ts.URL)
}
}
func TestCreateOrder(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var j struct {
Identifiers []struct {
Type string
Value string
}
}
decodeJWSRequest(t, &j, r)
// Test request
if len(j.Identifiers) != 1 {
t.Errorf("len(j.Identifiers) = %d; want 1", len(j.Identifiers))
}
if j.Identifiers[0].Type != "dns" {
t.Errorf("j.Identifier.Type = %q; want dns", j.Identifiers[0].Type)
}
if j.Identifiers[0].Value != "example.com" {
t.Errorf("j.Identifier.Value = %q; want example.com", j.Identifiers[0].Value)
}
w.Header().Set("Location", "https://example.com/acme/order/1")
w.WriteHeader(http.StatusCreated)
fmt.Fprintf(w, `{
"identifiers": [{"type":"dns","value":"example.com"}],
"status":"pending",
"authorizations":["https://example.com/acme/order/1/1"],
"finalize":"https://example.com/acme/order/1/finalize"
}`)
}))
defer ts.Close()
cl := Client{Key: testKeyEC, accountURL: "https://example.com/acme/account", dir: &Directory{NewOrderURL: ts.URL, NewNonceURL: ts.URL}}
o, err := cl.CreateOrder(context.Background(), NewOrder("example.com"))
if err != nil {
t.Fatal(err)
}
if o.URL != "https://example.com/acme/order/1" {
t.Errorf("URL = %q; want https://example.com/acme/order/1", o.URL)
}
if o.Status != "pending" {
t.Errorf("Status = %q; want pending", o.Status)
}
if o.FinalizeURL != "https://example.com/acme/order/1/finalize" {
t.Errorf("FinalizeURL = %q; want https://example.com/acme/order/1/finalize", o.FinalizeURL)
}
if n := len(o.Identifiers); n != 1 {
t.Fatalf("len(o.Identifiers) = %d; want 1", n)
}
if o.Identifiers[0].Type != "dns" {
t.Errorf("Identifiers[0].Type = %q; want dns", o.Identifiers[0].Type)
}
if o.Identifiers[0].Value != "example.com" {
t.Errorf("Identifiers[0].Value = %q; want example.com", o.Identifiers[0].Value)
}
if n := len(o.Authorizations); n != 1 {
t.Fatalf("len(o.Authorizations) = %d; want 1", n)
}
if o.Authorizations[0] != "https://example.com/acme/order/1/1" {
t.Errorf("o.Authorizations[0] = %q; https://example.com/acme/order/1/1", o.Authorizations[0])
}
}
func TestGetAuthorization(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != "GET" {
t.Errorf("r.Method = %q; want GET", r.Method)
}
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, `{
"identifier": {"type":"dns","value":"example.com"},
"status":"pending",
"challenges":[
{
"type":"http-01",
"status":"pending",
"url":"https://example.com/acme/challenge/publickey/id1",
"token":"<PASSWORD>"
},
{
"type":"tls-sni-02",
"status":"pending",
"url":"https://example.com/acme/challenge/publickey/id2",
"token":"<PASSWORD>"
}
]
}`)
}))
defer ts.Close()
cl := Client{Key: testKeyEC, dir: &Directory{NewNonceURL: ts.URL}}
auth, err := cl.GetAuthorization(context.Background(), ts.URL)
if err != nil {
t.Fatal(err)
}
if auth.Status != "pending" {
t.Errorf("Status = %q; want pending", auth.Status)
}
if auth.Identifier.Type != "dns" {
t.Errorf("Identifier.Type = %q; want dns", auth.Identifier.Type)
}
if auth.Identifier.Value != "example.com" {
t.Errorf("Identifier.Value = %q; want example.com", auth.Identifier.Value)
}
if n := len(auth.Challenges); n != 2 {
t.Fatalf("len(set.Challenges) = %d; want 2", n)
}
c := auth.Challenges[0]
if c.Type != "http-01" {
t.Errorf("c.Type = %q; want http-01", c.Type)
}
if c.URL != "https://example.com/acme/challenge/publickey/id1" {
t.Errorf("c.URI = %q; want https://example.com/acme/challenge/publickey/id1", c.URL)
}
if c.Token != "<PASSWORD>" {
t.Errorf("c.Token = %q; want token1", c.Token)
}
c = auth.Challenges[1]
if c.Type != "tls-sni-02" {
t.Errorf("c.Type = %q; want tls-sni-02", c.Type)
}
if c.URL != "https://example.com/acme/challenge/publickey/id2" {
t.Errorf("c.URI = %q; want https://example.com/acme/challenge/publickey/id2", c.URL)
}
if c.Token != "token2" {
t.Errorf("c.Token = %q; want token2", c.Token)
}
}
func TestWaitAuthorization(t *testing.T) {
var count int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
count++
w.Header().Set("Retry-After", "0")
if count > 1 {
fmt.Fprintf(w, `{"status":"valid"}`)
return
}
fmt.Fprintf(w, `{"status":"pending"}`)
}))
defer ts.Close()
type res struct {
authz *Authorization
err error
}
done := make(chan res)
defer close(done)
go func() {
var client Client
a, err := client.WaitAuthorization(context.Background(), ts.URL)
done <- res{a, err}
}()
select {
case <-time.After(5 * time.Second):
t.Fatal("WaitAuthz took too long to return")
case res := <-done:
if res.err != nil {
t.Fatalf("res.err = %v", res.err)
}
if res.authz == nil {
t.Fatal("res.authz is nil")
}
}
}
func TestWaitAuthorizationInvalid(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, `{"status":"invalid"}`)
}))
defer ts.Close()
res := make(chan error)
defer close(res)
go func() {
var client Client
_, err := client.WaitAuthorization(context.Background(), ts.URL)
res <- err
}()
select {
case <-time.After(3 * time.Second):
t.Fatal("WaitAuthz took too long to return")
case err := <-res:
if err == nil {
t.Error("err is nil")
}
if _, ok := err.(AuthorizationError); !ok {
t.Errorf("err is %T; want *AuthorizationError", err)
}
}
}
func TestWaitAuthorizationClientError(t *testing.T) {
const code = http.StatusBadRequest
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(code)
}))
defer ts.Close()
ch := make(chan error, 1)
go func() {
var client Client
_, err := client.WaitAuthorization(context.Background(), ts.URL)
ch <- err
}()
select {
case <-time.After(3 * time.Second):
t.Fatal("WaitAuthz took too long to return")
case err := <-ch:
res, ok := err.(*Error)
if !ok {
t.Fatalf("err is %v (%T); want a non-nil *Error", err, err)
}
if res.StatusCode != code {
t.Errorf("res.StatusCode = %d; want %d", res.StatusCode, code)
}
}
}
func TestWaitAuthorizationCancel(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Retry-After", "60")
fmt.Fprintf(w, `{"status":"pending"}`)
}))
defer ts.Close()
res := make(chan error)
defer close(res)
go func() {
var client Client
ctx, cancel := context.WithTimeout(context.Background(), 200*time.Millisecond)
defer cancel()
_, err := client.WaitAuthorization(ctx, ts.URL)
res <- err
}()
select {
case <-time.After(time.Second):
t.Fatal("WaitAuthz took too long to return")
case err := <-res:
if err == nil {
t.Error("err is nil")
}
}
}
func TestDeactivateAuthorization(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "nonce")
return
}
switch r.URL.Path {
case "/1":
var req struct {
Status string
}
decodeJWSRequest(t, &req, r)
if req.Status != "deactivated" {
t.Errorf("req.Status = %q; want deactivated", req.Status)
}
case "/2":
w.WriteHeader(http.StatusInternalServerError)
case "/account":
w.Header().Set("Location", "https://example.com/acme/account/0")
w.Write([]byte("{}"))
}
}))
defer ts.Close()
client := &Client{Key: testKey, dir: &Directory{NewNonceURL: ts.URL, NewAccountURL: ts.URL + "/account"}}
ctx := context.Background()
if err := client.DeactivateAuthorization(ctx, ts.URL+"/1"); err != nil {
t.Errorf("err = %v", err)
}
if client.DeactivateAuthorization(ctx, ts.URL+"/2") == nil {
t.Error("nil error")
}
}
func TestGetChallenge(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != "GET" {
t.Errorf("r.Method = %q; want GET", r.Method)
}
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, `{
"type":"http-01",
"status":"pending",
"url":"https://example.com/acme/challenge/publickey/id1",
"validated": "2014-12-01T12:05:00Z",
"error": {
"type": "urn:ietf:params:acme:error:malformed",
"detail": "rejected",
"subproblems": [
{
"type": "urn:ietf:params:acme:error:unknown",
"detail": "invalid",
"identifier": {
"type": "dns",
"value": "_example.com"
}
}
]
},
"token":"token1"}`)
}))
defer ts.Close()
cl := Client{Key: testKeyEC}
chall, err := cl.GetChallenge(context.Background(), ts.URL)
if err != nil {
t.Fatal(err)
}
if chall.Status != "pending" {
t.Errorf("Status = %q; want pending", chall.Status)
}
if chall.Type != "http-01" {
t.Errorf("c.Type = %q; want http-01", chall.Type)
}
if chall.URL != "https://example.com/acme/challenge/publickey/id1" {
t.Errorf("c.URI = %q; want https://example.com/acme/challenge/publickey/id1", chall.URL)
}
if chall.Token != "token1" {
t.Errorf("c.Token = %q; want token1", chall.Token)
}
vt, _ := time.Parse(time.RFC3339, "2014-12-01T12:05:00Z")
if !chall.Validated.Equal(vt) {
t.Errorf("c.Validated = %v; want %v", chall.Validated, vt)
}
e := chall.Error
if e.Type != "urn:ietf:params:acme:error:malformed" {
t.Fatalf("e.Type = %q; want urn:ietf:params:acme:error:malformed", e.Type)
}
if e.Detail != "rejected" {
t.Fatalf("e.Detail = %q; want rejected", e.Detail)
}
if l := len(e.Subproblems); l != 1 {
t.Fatalf("len(e.Subproblems) = %d; want 1", l)
}
p := e.Subproblems[0]
if p.Type != "urn:ietf:params:acme:error:unknown" {
t.Fatalf("p.Type = %q; want urn:ietf:params:acme:error:unknown", p.Type)
}
if p.Detail != "invalid" {
t.Fatalf("p.Detail = %q; want rejected", p.Detail)
}
if p.Identifier.Type != "dns" {
t.Fatalf("p.Identifier.Type = %q; want dns", p.Identifier.Type)
}
if p.Identifier.Value != "_example.com" {
t.Fatalf("p.Identifier.Type = %q; want _example.com", p.Identifier.Value)
}
}
func TestAcceptChallenge(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var j struct {
Auth string `json:"keyAuthorization"`
}
decodeJWSRequest(t, &j, r)
keyAuth := "token1." + testKeyECThumbprint
if j.Auth != keyAuth {
t.Errorf(`keyAuthorization = %q; want %q`, j.Auth, keyAuth)
}
// Respond to request
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, `{
"type":"http-01",
"status":"pending",
"url":"https://example.com/acme/challenge/publickey/id1",
"token":"token1",
"keyAuthorization":%q
}`, keyAuth)
}))
defer ts.Close()
cl := Client{Key: testKeyEC, accountURL: "https://example.com/acme/account", dir: &Directory{NewNonceURL: ts.URL}}
c, err := cl.AcceptChallenge(context.Background(), &Challenge{
URL: ts.URL,
Token: "token1",
})
if err != nil {
t.Fatal(err)
}
if c.Type != "http-01" {
t.Errorf("c.Type = %q; want http-01", c.Type)
}
if c.URL != "https://example.com/acme/challenge/publickey/id1" {
t.Errorf("c.URL = %q; want https://example.com/acme/challenge/publickey/id1", c.URL)
}
if c.Token != "token1" {
t.Errorf("c.Token = %q; want token1", c.Token)
}
}
func TestFinalizeOrder(t *testing.T) {
notBefore := time.Now()
notAfter := notBefore.AddDate(0, 2, 0)
timeNow = func() time.Time { return notBefore }
var sampleCert []byte
var ts *httptest.Server
var orderGets int
ts = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "test-nonce")
return
}
if r.URL.Path == "/cert" && r.Method == "GET" {
pem.Encode(w, &pem.Block{Type: "CERTIFICATE", Bytes: sampleCert})
return
}
if r.URL.Path == "/order" {
status := "processing"
if orderGets > 0 {
status = "valid"
}
fmt.Fprintf(w, `{
"identifiers": [{"type":"dns","value":"example.com"}],
"status":%q,
"authorizations":["https://example.com/acme/order/1/1"],
"finalize":"https://example.com/acme/order/1/finalize",
"certificate":%q
}`, status, ts.URL+"/cert")
orderGets++
return
}
if r.Method != "POST" {
t.Errorf("r.Method = %q; want POST", r.Method)
}
var j struct {
CSR string `json:"csr"`
}
decodeJWSRequest(t, &j, r)
template := x509.Certificate{
SerialNumber: big.NewInt(int64(1)),
Subject: pkix.Name{
Organization: []string{"goacme"},
},
NotBefore: notBefore,
NotAfter: notAfter,
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
BasicConstraintsValid: true,
}
var err error
sampleCert, err = x509.CreateCertificate(rand.Reader, &template, &template, &testKeyEC.PublicKey, testKeyEC)
if err != nil {
t.Fatalf("Error creating certificate: %v", err)
}
w.Header().Set("Location", "/order")
fmt.Fprintf(w, `{
"identifiers": [{"type":"dns","value":"example.com"}],
"status":"processing",
"authorizations":["https://example.com/acme/order/1/1"],
"finalize":"https://example.com/acme/order/1/finalize"
}`)
}))
defer ts.Close()
csr := x509.CertificateRequest{
Version: 0,
Subject: pkix.Name{
CommonName: "example.com",
Organization: []string{"goacme"},
},
}
csrb, err := x509.CreateCertificateRequest(rand.Reader, &csr, testKeyEC)
if err != nil {
t.Fatal(err)
}
c := Client{Key: testKeyEC, accountURL: "https://example.com/acme/account", dir: &Directory{NewNonceURL: ts.URL}}
cert, err := c.FinalizeOrder(context.Background(), ts.URL, csrb)
if err != nil {
t.Fatal(err)
}
if cert == nil {
t.Errorf("cert is nil")
}
}
func TestWaitOrderInvalid(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "nonce")
return
}
const order = `{"status":%q}`
if r.URL.Path == "/invalid" {
fmt.Fprintf(w, order, "invalid")
}
if r.URL.Path == "/pending" {
fmt.Fprintf(w, order, "pending")
}
}))
defer ts.Close()
var client Client
_, err := client.WaitOrder(context.Background(), ts.URL+"/pending")
if e, ok := err.(OrderPendingError); ok {
if e.Order == nil {
t.Error("order is nil")
}
if e.Order.Status != "pending" {
t.Errorf("status = %q; want pending", e.Order.Status)
}
} else if err != nil {
t.Error(err)
}
_, err = client.WaitOrder(context.Background(), ts.URL+"/invalid")
if e, ok := err.(OrderInvalidError); ok {
if e.Order == nil {
t.Error("order is nil")
}
if e.Order.Status != "invalid" {
t.Errorf("status = %q; want invalid", e.Order.Status)
}
} else if err != nil {
t.Error(err)
}
}
func TestGetOrder(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, `{
"identifiers": [{"type":"dns","value":"example.com"}],
"status":"valid",
"authorizations":["https://example.com/acme/order/1/1"],
"finalize":"https://example.com/acme/order/1/finalize",
"certificate":"https://example.com/acme/cert"
}`)
}))
defer ts.Close()
var client Client
o, err := client.GetOrder(context.Background(), ts.URL)
if err != nil {
t.Fatal(err)
}
if o.URL != ts.URL {
t.Errorf("URL = %q; want %s", o.URL, ts.URL)
}
if o.Status != "valid" {
t.Errorf("Status = %q; want valid", o.Status)
}
if l := len(o.Authorizations); l != 1 {
t.Errorf("len(Authorizations) = %d; want 1", l)
}
if v := o.Authorizations[0]; v != "https://example.com/acme/order/1/1" {
t.Errorf("Authorizations[0] = %q; want https://example.com/acme/order/1/1", v)
}
if l := len(o.Identifiers); l != 1 {
t.Errorf("len(Identifiers) = %d; want 1", l)
}
if v := o.Identifiers[0].Type; v != "dns" {
t.Errorf("Identifiers[0].Type = %q; want dns", v)
}
if v := o.Identifiers[0].Value; v != "example.com" {
t.Errorf("Identifiers[0].Value = %q; want example.com", v)
}
if o.FinalizeURL != "https://example.com/acme/order/1/finalize" {
t.Errorf("FinalizeURL = %q; want https://example.com/acme/order/1/finalize", o.FinalizeURL)
}
if o.CertificateURL != "https://example.com/acme/cert" {
t.Errorf("FinalizeURL = %q; want https://example.com/acme/cert", o.CertificateURL)
}
}
func TestRevokeCert(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == "HEAD" {
w.Header().Set("Replay-Nonce", "nonce")
return
}
var req struct {
Certificate string
Reason int
}
decodeJWSRequest(t, &req, r)
if req.Reason != 1 {
t.Errorf("req.Reason = %d; want 1", req.Reason)
}
// echo -n cert | base64 | tr -d '=' | tr '/+' '_-'
cert := "Y2VydA"
if req.Certificate != cert {
t.Errorf("req.Certificate = %q; want %q", req.Certificate, cert)
}
}))
defer ts.Close()
client := &Client{Key: testKeyEC, accountURL: "https://example.com/acme/account", dir: &Directory{RevokeCertURL: ts.URL, NewNonceURL: ts.URL}}
ctx := context.Background()
if err := client.RevokeCert(ctx, nil, []byte("cert"), CRLReasonKeyCompromise); err != nil {
t.Fatal(err)
}
}
func TestNonce_add(t *testing.T) {
var c Client
c.addNonce(http.Header{"Replay-Nonce": {"nonce"}})
c.addNonce(http.Header{"Replay-Nonce": {}})
c.addNonce(http.Header{"Replay-Nonce": {"nonce"}})
nonces := map[string]struct{}{"nonce": {}}
if !reflect.DeepEqual(c.nonces, nonces) {
t.Errorf("c.nonces = %q; want %q", c.nonces, nonces)
}
}
func TestNonce_addMax(t *testing.T) {
c := &Client{nonces: make(map[string]struct{})}
for i := 0; i < maxNonces; i++ {
c.nonces[fmt.Sprintf("%d", i)] = struct{}{}
}
c.addNonce(http.Header{"Replay-Nonce": {"nonce"}})
if n := len(c.nonces); n != maxNonces {
t.Errorf("len(c.nonces) = %d; want %d", n, maxNonces)
}
}
func TestNonce_fetch(t *testing.T) {
tests := []struct {
code int
nonce string
}{
{http.StatusOK, "nonce1"},
{http.StatusBadRequest, "nonce2"},
{http.StatusOK, ""},
}
var i int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != "HEAD" {
t.Errorf("%d: r.Method = %q; want HEAD", i, r.Method)
}
w.Header().Set("Replay-Nonce", tests[i].nonce)
w.WriteHeader(tests[i].code)
}))
defer ts.Close()
for ; i < len(tests); i++ {
test := tests[i]
c := &Client{dir: &Directory{NewNonceURL: ts.URL}}
n, err := c.fetchNonce(context.Background())
if n != test.nonce {
t.Errorf("%d: n=%q; want %q", i, n, test.nonce)
}
switch {
case err == nil && test.nonce == "":
t.Errorf("%d: n=%q, err=%v; want non-nil error", i, n, err)
case err != nil && test.nonce != "":
t.Errorf("%d: n=%q, err=%v; want %q", i, n, err, test.nonce)
}
}
}
func TestNonce_fetchError(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusTooManyRequests)
}))
defer ts.Close()
c := &Client{dir: &Directory{NewNonceURL: ts.URL}}
_, err := c.fetchNonce(context.Background())
e, ok := err.(*Error)
if !ok {
t.Fatalf("err is %T; want *Error", err)
}
if e.StatusCode != http.StatusTooManyRequests {
t.Errorf("e.StatusCode = %d; want %d", e.StatusCode, http.StatusTooManyRequests)
}
}
func TestNonce_postJWS(t *testing.T) {
var count int
seen := make(map[string]bool)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
count++
w.Header().Set("Replay-Nonce", fmt.Sprintf("nonce%d", count))
if r.Method == "HEAD" {
// We expect the client do a HEAD request
// but only to fetch the first nonce.
return
}
// Make client.CreateOrder happy; we're not testing its result.
defer func() {
w.Header().Set("Location", "https://example.com/acme/order/1")
w.WriteHeader(http.StatusCreated)
w.Write([]byte(`{"status":"valid"}`))
}()
head, err := decodeJWSHead(r)
if err != nil {
t.Errorf("decodeJWSHead: %v", err)
return
}
if head.Nonce == "" {
t.Error("head.Nonce is empty")
return
}
if seen[head.Nonce] {
t.Errorf("nonce is already used: %q", head.Nonce)
}
seen[head.Nonce] = true
}))
defer ts.Close()
client := Client{Key: testKey, accountURL: "https://example.com/acme/account", dir: &Directory{NewOrderURL: ts.URL, NewNonceURL: ts.URL}}
if _, err := client.CreateOrder(context.Background(), NewOrder("example.com")); err != nil {
t.Errorf("client.CreateOrder 1: %v", err)
}
// The second call should not generate another extra HEAD request.
if _, err := client.CreateOrder(context.Background(), NewOrder("example.com")); err != nil {
t.Errorf("client.CreateOrder 2: %v", err)
}
if count != 3 {
t.Errorf("total requests count: %d; want 3", count)
}
if n := len(client.nonces); n != 1 {
t.Errorf("len(client.nonces) = %d; want 1", n)
}
for k := range seen {
if _, exist := client.nonces[k]; exist {
t.Errorf("used nonce %q in client.nonces", k)
}
}
}
func TestRetryPostJWS(t *testing.T) {
var count int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
count++
w.Header().Set("Replay-Nonce", fmt.Sprintf("nonce%d", count))
if r.Method == "HEAD" {
// We expect the client to do 2 head requests to fetch
// nonces, one to start and another after getting badNonce
return
}
head, err := decodeJWSHead(r)
if err != nil {
t.Errorf("decodeJWSHead: %v", err)
} else if head.Nonce == "" {
t.Error("head.Nonce is empty")
} else if head.Nonce == "nonce1" {
// return a badNonce error to force the call to retry
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(`{"type":"urn:ietf:params:acme:error:badNonce"}`))
return
}
// Make client.CreateOrder happy; we're not testing its result.
w.Header().Set("Location", "https://example.com/acme/order/1")
w.WriteHeader(http.StatusCreated)
w.Write([]byte(`{"status":"valid"}`))
}))
defer ts.Close()
client := Client{Key: testKey, accountURL: "https://example.com/acme/account", dir: &Directory{NewOrderURL: ts.URL, NewNonceURL: ts.URL}}
// This call will fail with badNonce, causing a retry
if _, err := client.CreateOrder(context.Background(), NewOrder("example.com")); err != nil {
t.Errorf("client.CreateOrder 1: %v", err)
}
if count != 4 {
t.Errorf("total requests count: %d; want 4", count)
}
}
func TestErrorResponse(t *testing.T) {
s := `{
"status": 400,
"type": "urn:acme:error:xxx",
"detail": "text"
}`
res := &http.Response{
StatusCode: 400,
Status: "400 Bad Request",
Body: ioutil.NopCloser(strings.NewReader(s)),
Header: http.Header{"X-Foo": {"bar"}},
}
err := responseError(res)
v, ok := err.(*Error)
if !ok {
t.Fatalf("err = %+v (%T); want *Error type", err, err)
}
if v.StatusCode != 400 {
t.Errorf("v.StatusCode = %v; want 400", v.StatusCode)
}
if v.Type != "urn:acme:error:xxx" {
t.Errorf("v.Type = %q; want urn:acme:error:xxx", v.Type)
}
if v.Detail != "text" {
t.Errorf("v.Detail = %q; want text", v.Detail)
}
if !reflect.DeepEqual(v.Header, res.Header) {
t.Errorf("v.Header = %+v; want %+v", v.Header, res.Header)
}
}
func TestHTTP01Challenge(t *testing.T) {
const (
token = "xxx"
// thumbprint is precomputed for testKeyEC in jws_test.go
value = token + "." + testKeyECThumbprint
urlpath = "/.well-known/acme-challenge/" + token
)
client := &Client{Key: testKeyEC}
val, err := client.HTTP01ChallengeResponse(token)
if err != nil {
t.Fatal(err)
}
if val != value {
t.Errorf("val = %q; want %q", val, value)
}
if path := client.HTTP01ChallengePath(token); path != urlpath {
t.Errorf("path = %q; want %q", path, urlpath)
}
}
func TestDNS01ChallengeRecord(t *testing.T) {
// echo -n xxx.<testKeyECThumbprint> | \
// openssl dgst -binary -sha256 | \
// base64 | tr -d '=' | tr '/+' '_-'
const value = "8DERMexQ5VcdJ_prpPiA0mVdp7imgbCgjsG4SqqNMIo"
client := &Client{Key: testKeyEC}
val, err := client.DNS01ChallengeRecord("xxx")
if err != nil {
t.Fatal(err)
}
if val != value {
t.Errorf("val = %q; want %q", val, value)
}
}
func TestBackoff(t *testing.T) {
tt := []struct{ min, max time.Duration }{
{time.Second, 2 * time.Second},
{2 * time.Second, 3 * time.Second},
{4 * time.Second, 5 * time.Second},
{8 * time.Second, 9 * time.Second},
}
for i, test := range tt {
d := backoff(i, time.Minute)
if d < test.min || test.max < d {
t.Errorf("%d: d = %v; want between %v and %v", i, d, test.min, test.max)
}
}
min, max := time.Second, 2*time.Second
if d := backoff(-1, time.Minute); d < min || max < d {
t.Errorf("d = %v; want between %v and %v", d, min, max)
}
bound := 10 * time.Second
if d := backoff(100, bound); d != bound {
t.Errorf("d = %v; want %v", d, bound)
}
}
|
package IA.Electrica;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import aima.search.framework.Successor;
import aima.search.framework.SuccessorFunction;
//import IA.probTSP.ProbTSPHeuristicFunction;
public class ElectricaSuccessorFunction6 implements SuccessorFunction {
//DEFINITIU
//per cada consumidor sense paquet buscar tot el vector i cada consumidor asignat intercanviar els paquets si es respecten restricc
public List getSuccessors(Object aState) {
ArrayList retVal = new ArrayList();
Electrica board = (Electrica) aState;
int nc = board.obteNdemandes();
int np = board.obteNpaquets();
// per cada consumidor sense paquet buscar tot el vector i cada consumidor asignat intercanviar els paquets si es respecten restricc
for (int i = 0; i < nc; i++) {
if (board.obteUnaDemanda(i).consultaNumPaquet() == -1){
for (int j=0;j<nc;j++){
if (i!=j && board.obteUnaDemanda(j).consultaNumPaquet()!=-1){
Electrica newboard = new Electrica(board);
int nump=newboard.obteUnaDemanda(j).consultaNumPaquet();
newboard.desassignarPaquet(j);
if (newboard.restriccions(nump,i)){
newboard.assignaPaquet(nump,i);
String S = new String("desassignar p"+nump+" a c" + j+" i assignar a c"+i + "\n");
retVal.add(new Successor(newboard.toString(), newboard));
}
}
}
}
}
return (retVal);
}
}
|
import UIKit
import ContactsUI
class SelectContactsViewController: UIViewController {
var contacts: [CNContact] = []
override func viewDidLoad() {
super.viewDidLoad()
}
@IBAction func onSelectContactsTapped(_ sender: Any) {
// Present contacts picker
let contactPicker = CNContactPickerViewController()
contactPicker.delegate = self
present(contactPicker, animated: true)
}
@IBAction func onSendMessageTapped(_ sender: Any) {
// TODO: Send message to selected contacts
}
Extension SelectContactsViewController: CNContactPickerDelegate {
func contactPicker(_ picker: CNContactPickerViewController, didSelect contacts: [CNContact]) {
self.contacts = contacts
dismiss(animated: true)
}
} |
package org.jooby;
import java.nio.charset.StandardCharsets;
import org.jooby.test.ServerFeature;
import org.junit.Test;
public class ByteBodyFeature extends ServerFeature {
{
post("/bytes", req -> {
return new String(req.body(byte[].class), StandardCharsets.UTF_8);
});
}
@Test
public void shouldReadBodyAsByteArray() throws Exception {
request()
.post("/bytes")
.body("foo", "text/plain")
.expect("foo");
}
}
|
#!/bin/bash
#
# CIS Debian 7/8 Hardening
#
#
# 7.5.1 Disable DCCP (Not Scored)
#
set -e # One error, it's over
set -u # One variable unset, it's over
# This function will be called if the script status is on enabled / audit mode
audit () {
info "Not implemented yet"
}
# This function will be called if the script status is on enabled mode
apply () {
info "Not implemented yet"
}
# This function will check config parameters required
check_config() {
:
}
# Source Root Dir Parameter
if [ ! -r /etc/default/cis-hardening ]; then
echo "There is no /etc/default/cis-hardening file, cannot source CIS_ROOT_DIR variable, aborting"
exit 128
else
. /etc/default/cis-hardening
if [ -z ${CIS_ROOT_DIR:-} ]; then
echo "No CIS_ROOT_DIR variable, aborting"
exit 128
fi
fi
# Main function, will call the proper functions given the configuration (audit, enabled, disabled)
if [ -r $CIS_ROOT_DIR/lib/main.sh ]; then
. $CIS_ROOT_DIR/lib/main.sh
else
echo "Cannot find main.sh, have you correctly defined your root directory? Current value is $CIS_ROOT_DIR in /etc/default/cis-hardening"
exit 128
fi
|
FILE=$HOME/tmp/passwd.backup
ORIGINAL_FILE=/etc/passwd
if [ -f "$FILE" ]; then
echo "$FILE exists."
md5_first=$(md5sum $FILE | awk '{ print $1 }')
md5_second=$(md5sum $ORIGINAL_FILE | awk '{ print $1 }')
if [ "$md5_first" = "$md5_second" ]; then
echo "MD5 OK"
else
echo "MD5 WARNING"
cp $ORIGINAL_FILE $FILE
fi
echo $md5_first
echo $md5_second
else
cp $ORIGINAL_FILE $FILE
fi
|
static int cubeSum(int a, int b)
{
return (a*a*a) + (b*b*b);
} |
<!DOCTYPE html>
<html>
<head>
<title>Contact List</title>
</head>
<body>
<h1>Contact List</h1>
<ul>
<% for (let contact of contacts) { %>
<li><strong>Username:</strong> <%= contact.username %>, <strong>Age:</strong> <%= contact.age %></li>
<% } %>
</ul>
</body>
</html> |
#!/bin/bash
npm install --save-dev --no-package-lock @babel/cli @babel/core @babel/preset-env @babel/preset-react @babel/plugin-proposal-class-properties
if [ -z "$INPUT_TYPENAME" ]
then
INPUT_TYPENAME="release"
fi
if [ -z "$INPUT_DIRECTORY" ]
then
INPUT_DIRECTORY="./"
fi
workspace="/github/workspace";
babel_workspace="$workspace/node_modules/@babel";
transpile_file(){
directory=$1
if [[ "$directory" != *"${INPUT_TYPENAME}."* ]];
then
basename=$(basename $directory);
extension="${basename##*.}"
filename="${basename%.*}"
if [ -z "$INPUT_OUTPUT" ];
then
output="${directory%/*}/"
else
mkdir -p $INPUT_OUTPUT
output="$INPUT_OUTPUT";
fi
output_path="${output}${filename}.${INPUT_TYPENAME}.${extension}"
rm ${output_path}
$(npx babel ${directory} --out-file ${output_path} --presets "$babel_workspace/preset-env","$babel_workspace/preset-react" --plugins "$babel_workspace/plugin-proposal-class-properties")
echo "COMPILE ${directory} | OUTPUT ${output_path}"
fi
}
find "$INPUT_DIRECTORY" -type f -iname '*.js' -not -path "*/node_modules/*" | while read fname
do
transpile_file $fname;
done
$(rm -r "$workspace/node_modules") |
#!/bin/bash
cd /usr/src
tar -xf nginx-1.12.2.tar.gz
cd nginx-1.12.2
id nginx || useradd -r -s/sbin/nologin nginx
./configure --prefix=/usr/local/nginx \
--user=nginx \
--group=nginx \
--with-http_stub_status_module \
--with-http_ssl_module \
--with-stream
make && make install
ln -sf /usr/local/nginx/sbin/nginx /usr/bin/nginx
/usr/bin/nginx
|
<filename>examples/dns_certificate/index.ts<gh_stars>0
import * as pulumi from "@pulumi/pulumi";
import * as acme from "@pulumi/acme";
import * as tls from "@pulumi/tls";
const config = new pulumi.Config("acme")
const projectConfig = new pulumi.Config("project")
const ovhConfig = new pulumi.Config("ovh")
const emailAddress = projectConfig.require("email")
const domain = projectConfig.require("domain")
const dnsConfig = {
OVH_APPLICATION_KEY: ovhConfig.require("applicationKey"),
OVH_APPLICATION_SECRET: ovhConfig.require("applicationSecret"),
OVH_CONSUMER_KEY: ovhConfig.require("consumerKey"),
OVH_ENDPOINT: ovhConfig.require("endpoint"),
OVH_HTTP_TIMEOUT: "500"
}
const key = new tls.PrivateKey("my-private-key",{
algorithm: "RSA",
}
)
export const registration = new acme.Registration(
"registration", {
accountKeyPem: key.publicKeyPem,
emailAddress
}
)
/*
new acme.Certificate(
"certificate", {
accountKeyPem: registration.accountKeyPem,
commonName: `*.${domain}`,
subjectAlternativeNames: [
domain
],
dnsChallenges: [
{
config: dnsConfig,
provider: 'ovh'
}
]
}
)*/ |
<filename>servicetalk-concurrent-api/src/testFixtures/java/io/servicetalk/concurrent/api/LegacyTestSingle.java
/*
* Copyright © 2018, 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.concurrent.api;
import io.servicetalk.concurrent.Cancellable;
import io.servicetalk.concurrent.SingleSource;
import io.servicetalk.concurrent.SingleSource.Subscriber;
import java.util.ArrayList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nullable;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Deprecated.
*
* @deprecated Use {@link TestSingle} instead.
*/
@Deprecated
public class LegacyTestSingle<T> extends Single<T> implements SingleSource.Subscriber<T> {
private static final Object NULL = new Object();
private final AtomicInteger subscribeCount = new AtomicInteger();
private final Queue<Subscriber<? super T>> subscribers = new ConcurrentLinkedQueue<>();
private final CancellableSet dynamicCancellable = new CancellableSet();
private final boolean invokeListenerPostCancel;
private final boolean cacheResults;
@Nullable
private Object cachedResult;
public LegacyTestSingle() {
this(false);
}
public LegacyTestSingle(boolean invokeListenerPostCancel) {
this(invokeListenerPostCancel, true);
}
public LegacyTestSingle(boolean invokeListenerPostCancel, boolean cacheResults) {
this.invokeListenerPostCancel = invokeListenerPostCancel;
this.cacheResults = cacheResults;
}
@Override
public synchronized void handleSubscribe(Subscriber<? super T> subscriber) {
subscribeCount.incrementAndGet();
subscribers.add(subscriber);
subscriber.onSubscribe(() -> {
if (!invokeListenerPostCancel) {
subscribers.remove(subscriber);
}
dynamicCancellable.cancel();
});
if (cachedResult != null) {
subscribers.remove(subscriber);
if (cachedResult instanceof Throwable) {
subscriber.onError((Throwable) cachedResult);
} else if (cachedResult == NULL) {
subscriber.onSuccess(null);
} else {
@SuppressWarnings("unchecked")
T t = (T) this.cachedResult;
subscriber.onSuccess(t);
}
}
}
@Override
public void onSubscribe(Cancellable cancellable) {
dynamicCancellable.add(cancellable);
}
@Override
public synchronized void onSuccess(@Nullable T result) {
List<Subscriber<? super T>> subs = new ArrayList<>(subscribers);
subscribers.clear();
for (Subscriber<? super T> sub : subs) {
sub.onSuccess(result);
}
if (cacheResults) {
cachedResult = result == null ? NULL : result;
}
}
@Override
public synchronized void onError(Throwable t) {
List<Subscriber<? super T>> subs = new ArrayList<>(subscribers);
subscribers.clear();
for (Subscriber<? super T> sub : subs) {
sub.onError(t);
}
if (cacheResults) {
cachedResult = t;
}
}
public boolean isCancelled() {
return dynamicCancellable.isCancelled();
}
public LegacyTestSingle<T> verifyListenCalled() {
assertThat("Listen not called.", subscribers, hasSize(greaterThan(0)));
return this;
}
public LegacyTestSingle<T> verifyListenCalled(int times) {
int count = subscribeCount.get();
assertThat("Listen not called " + times + " but instead " + count, count, equalTo(times));
return this;
}
public LegacyTestSingle<T> verifyListenNotCalled() {
assertThat("Listen called.", subscribers, hasSize(0));
return this;
}
public LegacyTestSingle<T> verifyCancelled() {
assertTrue(isCancelled(), "Subscriber did not cancel.");
return this;
}
public LegacyTestSingle<T> verifyNotCancelled() {
assertFalse(isCancelled(), "Subscriber cancelled.");
return this;
}
}
|
function findAnagrams(strings) {
const anagrams = [];
const stringArray = strings.map(word => word.split('').sort().join(''));
for (let i = 0; i < stringArray.length; i++) {
for (let j = i + 1; j < stringArray.length; j++) {
if (stringArray[i] === stringArray[j]) {
anagrams.push([strings[i], strings[j]]);
}
}
}
return anagrams;
} |
#!/usr/bin/env bash
THISDIR="$(dirname "${BASH_SOURCE[0]}")"
"${THISDIR}/entrypoint-dev-wait-for-install.sh"
BACKGROUND=true bundle exec rails resque:scheduler
exec "$@"
|
<reponame>melkishengue/cpachecker<filename>src/org/sosy_lab/cpachecker/cpa/interval/IntervalAnalysisState.java
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2017 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.interval;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.Splitter;
import com.google.common.collect.ComparisonChain;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.sosy_lab.common.collect.PathCopyingPersistentTreeMap;
import org.sosy_lab.common.collect.PersistentMap;
import org.sosy_lab.cpachecker.cfa.model.FunctionExitNode;
import org.sosy_lab.cpachecker.core.defaults.LatticeAbstractState;
import org.sosy_lab.cpachecker.core.interfaces.AbstractQueryableState;
import org.sosy_lab.cpachecker.core.interfaces.FormulaReportingState;
import org.sosy_lab.cpachecker.core.interfaces.Graphable;
import org.sosy_lab.cpachecker.core.interfaces.PseudoPartitionable;
import org.sosy_lab.cpachecker.exceptions.InvalidQueryException;
import org.sosy_lab.cpachecker.util.CheckTypesOfStringsUtil;
import org.sosy_lab.cpachecker.util.predicates.smt.FormulaManagerView;
import org.sosy_lab.java_smt.api.BooleanFormula;
import org.sosy_lab.java_smt.api.IntegerFormulaManager;
import org.sosy_lab.java_smt.api.NumeralFormula;
public class IntervalAnalysisState
implements Serializable,
LatticeAbstractState<IntervalAnalysisState>,
AbstractQueryableState,
Graphable,
FormulaReportingState,
PseudoPartitionable {
private static final long serialVersionUID = -2030700797958100666L;
private static final Splitter propertySplitter = Splitter.on("<=").trimResults();
/**
* the intervals of the element
*/
private final PersistentMap<String, Interval> intervals;
/**
* the reference counts of the element
*/
private final PersistentMap<String, Integer> referenceCounts;
/**
* This method acts as the default constructor, which initializes the intervals and reference counts to empty maps and the previous element to null.
*/
public IntervalAnalysisState() {
intervals = PathCopyingPersistentTreeMap.of();
referenceCounts = PathCopyingPersistentTreeMap.of();
}
/**
* This method acts as constructor, which initializes the intervals, the reference counts and the previous element to the respective objects.
*
* @param intervals the intervals
* @param referencesMap the reference counts
*/
public IntervalAnalysisState(PersistentMap<String, Interval> intervals, PersistentMap<String, Integer> referencesMap) {
this.intervals = intervals;
this.referenceCounts = referencesMap;
}
/**
* This method returns the intervals of a given variable.
*
* @param variableName the name of the variable
* @return the intervals of the variable
*/
// see ExplicitState::getValueFor
public Interval getInterval(String variableName) {
return intervals.getOrDefault(variableName, Interval.UNBOUND);
}
/**
* This method returns the reference count for a given variable.
*
* @param variableName of the variable to query the reference count on
* @return the reference count of the variable, or 0 if the the variable is not yet referenced
*/
private Integer getReferenceCount(String variableName) {
return referenceCounts.getOrDefault(variableName, 0);
}
/**
* This method determines if this element contains an interval for a variable.
*
* @param variableName the name of the variable
* @return true, if this element contains an interval for the given variable
*/
public boolean contains(String variableName) {
return intervals.containsKey(variableName);
}
/**
* This method assigns an interval to a variable and puts it in the map.
*
* @param variableName name of the variable
* @param interval the interval to be assigned
* @param pThreshold threshold from property valueAnalysis.threshold
* @return this
*/
// see ExplicitState::assignConstant
public IntervalAnalysisState addInterval(String variableName, Interval interval, int pThreshold) {
if (interval.isUnbound()) {
return removeInterval(variableName);
}
// only add the interval if it is not already present
if (!intervals.containsKey(variableName) || !intervals.get(variableName).equals(interval)) {
int referenceCount = getReferenceCount(variableName);
if (pThreshold == -1 || referenceCount < pThreshold) {
return new IntervalAnalysisState(
intervals.putAndCopy(variableName, interval),
referenceCounts.putAndCopy(variableName, referenceCount + 1));
} else {
return removeInterval(variableName);
}
}
return this;
}
/**
* This method removes the interval for a given variable.
*
* @param variableName the name of the variable whose interval should be removed
* @return this
*/
// see ExplicitState::forget
public IntervalAnalysisState removeInterval(String variableName) {
if (intervals.containsKey(variableName)) {
return new IntervalAnalysisState(intervals.removeAndCopy(variableName), referenceCounts);
}
return this;
}
public IntervalAnalysisState dropFrame(String pCalledFunctionName) {
IntervalAnalysisState tmp = this;
for (String variableName : intervals.keySet()) {
if (variableName.startsWith(pCalledFunctionName+"::")) {
tmp = tmp.removeInterval(variableName);
}
}
return tmp;
}
/**
* This element joins this element with a reached state.
*
* @param reachedState the reached state to join this element with
* @return a new state representing the join of this element and the reached state
*/
@Override
public IntervalAnalysisState join(IntervalAnalysisState reachedState) {
boolean changed = false;
PersistentMap<String, Interval> newIntervals = PathCopyingPersistentTreeMap.of();
PersistentMap<String, Integer> newReferences = referenceCounts;
for (String variableName : reachedState.intervals.keySet()) {
Integer otherRefCount = reachedState.getReferenceCount(variableName);
Interval otherInterval = reachedState.getInterval(variableName);
if (intervals.containsKey(variableName)) {
// update the interval
Interval mergedInterval = getInterval(variableName).union(otherInterval);
if (mergedInterval != otherInterval) {
changed = true;
}
if (!mergedInterval.isUnbound()) {
newIntervals = newIntervals.putAndCopy(variableName, mergedInterval);
}
// update the references
Integer thisRefCount = getReferenceCount(variableName);
if (mergedInterval != otherInterval && thisRefCount > otherRefCount) {
changed = true;
newReferences = newReferences.putAndCopy(variableName, thisRefCount);
} else {
newReferences = newReferences.putAndCopy(variableName, otherRefCount);
}
} else {
newReferences = newReferences.putAndCopy(variableName, otherRefCount);
changed = true;
}
}
if (changed) {
return new IntervalAnalysisState(newIntervals, newReferences);
} else {
return reachedState;
}
}
/**
* This method decides if this element is less or equal than the reached state, based on the order imposed by the lattice.
*
* @param reachedState the reached state
* @return true, if this element is less or equal than the reached state, based on the order imposed by the lattice
*/
@Override
public boolean isLessOrEqual(IntervalAnalysisState reachedState) {
if (intervals.equals(reachedState.intervals)) { return true; }
// this element is not less or equal than the reached state, if it contains less intervals
if (intervals.size() < reachedState.intervals.size()) {
return false;
}
// also, this element is not less or equal than the reached state, if any one interval of the reached state is not contained in this element,
// or if the interval of the reached state is not wider than the respective interval of this element
for (String variableName : reachedState.intervals.keySet()) {
if (!intervals.containsKey(variableName) || !reachedState.getInterval(variableName).contains(getInterval(variableName))) {
return false;
}
}
// else, this element < reached state on the lattice
return true;
}
/**
* @return the set of tracked variables by this state
*/
public Map<String,Interval> getIntervalMap() {
return intervals;
}
/** If there was a recursive function, we have wrong intervals for scoped variables in the returnState.
* This function rebuilds a new state with the correct intervals from the previous callState.
* We delete the wrong intervals and insert new intervals, if necessary. */
public IntervalAnalysisState rebuildStateAfterFunctionCall(final IntervalAnalysisState callState, final FunctionExitNode functionExit) {
// we build a new state from:
// - local variables from callState,
// - global variables from THIS,
// - the local return variable from THIS.
// we copy callState and override all global values and the return variable.
IntervalAnalysisState rebuildState = callState;
// first forget all global information
for (final String trackedVar : callState.intervals.keySet()) {
if (!trackedVar.contains("::")) { // global -> delete
rebuildState = rebuildState.removeInterval(trackedVar);
}
}
// second: learn new information
for (final String trackedVar : this.intervals.keySet()) {
if (!trackedVar.contains("::")) { // global -> override deleted value
rebuildState = rebuildState.addInterval(trackedVar, this.getInterval(trackedVar), -1);
} else if (functionExit.getEntryNode().getReturnVariable().isPresent() &&
functionExit.getEntryNode().getReturnVariable().get().getQualifiedName().equals(trackedVar)) {
assert (!rebuildState.contains(trackedVar)) :
"calling function should not contain return-variable of called function: " + trackedVar;
if (this.contains(trackedVar)) {
rebuildState = rebuildState.addInterval(trackedVar, this.getInterval(trackedVar), -1);
}
}
}
return rebuildState;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof IntervalAnalysisState) {
IntervalAnalysisState otherElement = (IntervalAnalysisState) other;
return intervals.equals(otherElement.intervals);
}
return false;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return intervals.hashCode();
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[\n");
for (Map.Entry<String, Interval> entry: intervals.entrySet()) {
sb.append(String.format(" < %s = %s :: %s >%n",
entry.getKey(), entry.getValue(), getReferenceCount(entry.getKey())));
}
return sb.append("] size -> ").append(intervals.size()).toString();
}
@Override
public String getCPAName() {
return "IntervalAnalysis";
}
@Override
public boolean checkProperty(String pProperty) throws InvalidQueryException {
List<String> parts = propertySplitter.splitToList(pProperty);
if (parts.size() == 2) {
// pProperty = value <= varName
if (CheckTypesOfStringsUtil.isLong(parts.get(0))) {
long value = Long.parseLong(parts.get(0));
Interval iv = getInterval(parts.get(1));
return (value <= iv.getLow());
}
// pProperty = varName <= value
else if (CheckTypesOfStringsUtil.isLong(parts.get(1))){
long value = Long.parseLong(parts.get(1));
Interval iv = getInterval(parts.get(0));
return (iv.getHigh() <= value);
}
// pProperty = varName1 <= varName2
else {
Interval iv1 = getInterval(parts.get(0));
Interval iv2 = getInterval(parts.get(1));
return (iv1.contains(iv2));
}
// pProperty = value1 <= varName <= value2
} else if (parts.size() == 3){
if ( CheckTypesOfStringsUtil.isLong(parts.get(0)) && CheckTypesOfStringsUtil.isLong(parts.get(2)) ) {
long value1 = Long.parseLong(parts.get(0));
long value2 = Long.parseLong(parts.get(2));
Interval iv = getInterval(parts.get(1));
return (value1 <= iv.getLow() && iv.getHigh() <= value2);
}
}
return false;
}
@Override
public String toDOTLabel() {
StringBuilder sb = new StringBuilder();
sb.append("{");
// create a string like: x = [low; high] (refCount)
for (Entry<String, Interval> entry : intervals.entrySet()) {
sb.append(String.format("%s = %s (%s), ",
entry.getKey(), entry.getValue(), getReferenceCount(entry.getKey())));
}
sb.append("}");
return sb.toString();
}
@Override
public boolean shouldBeHighlighted() {
return false;
}
@Override
public BooleanFormula getFormulaApproximation(FormulaManagerView pMgr) {
IntegerFormulaManager nfmgr = pMgr.getIntegerFormulaManager();
List<BooleanFormula> result = new ArrayList<>();
for (Entry<String, Interval> entry : intervals.entrySet()) {
Interval interval = entry.getValue();
if (interval.isEmpty()) {
// one invalid interval disqualifies the whole state
return pMgr.getBooleanFormulaManager().makeFalse();
}
// we assume that everything is an SIGNED INTEGER
// and build "LOW <= X" and "X <= HIGH"
NumeralFormula var = nfmgr.makeVariable(entry.getKey());
Long low = interval.getLow();
Long high = interval.getHigh();
if (low != null && low != Long.MIN_VALUE) { // check for unbound interval
result.add(pMgr.makeLessOrEqual(nfmgr.makeNumber(low), var, true));
}
if (high != null && high != Long.MIN_VALUE) { // check for unbound interval
result.add(pMgr.makeGreaterOrEqual(nfmgr.makeNumber(high), var, true));
}
}
return pMgr.getBooleanFormulaManager().and(result);
}
@Override
public Comparable<?> getPseudoPartitionKey() {
// The size alone is not sufficient for pseudo-partitioning, if we want to use object-identity
// as hashcode. Thus we need a second measurement: the absolute distance of all intervals.
// -> if the distance is "smaller" than the other state, we know nothing and have to compare the states.
// -> if the distance is "equal", we can compare by "identity".
// -> if the distance is "greater", we are "greater" than the other state.
// We negate the absolute distance to match the "lessEquals"-specifiction.
// Be aware of overflows! -> we use BigInteger, and zero should be a sound value.
BigInteger absDistance = BigInteger.ZERO;
for (Interval i : intervals.values()) {
long high = i.getHigh() == null ? 0 : i.getHigh();
long low = i.getLow() == null ? 0 : i.getLow();
checkArgument(low <= high, "LOW greater than HIGH: %s", i);
absDistance = absDistance.add(BigInteger.valueOf(high).subtract(BigInteger.valueOf(low)));
}
return new IntervalPseudoPartitionKey(intervals.size(), absDistance.negate());
}
@Override
public Object getPseudoHashCode() {
return this;
}
/** Just a pair of values, can be compared alphabetically. */
private static final class IntervalPseudoPartitionKey
implements Comparable<IntervalPseudoPartitionKey> {
private final int size;
private final BigInteger absoluteDistance;
public IntervalPseudoPartitionKey(int pSize, BigInteger pAbsoluteDistance) {
size = pSize;
absoluteDistance = pAbsoluteDistance;
}
@Override
public boolean equals(Object pObj) {
if (this == pObj) {
return true;
}
if (!(pObj instanceof IntervalPseudoPartitionKey)) {
return false;
}
IntervalPseudoPartitionKey other = (IntervalPseudoPartitionKey) pObj;
return size == other.size && absoluteDistance.equals(other.absoluteDistance);
}
@Override
public int hashCode() {
return 137 * size + absoluteDistance.hashCode();
}
@Override
public String toString() {
return "[" + size + ", " + absoluteDistance + "]";
}
@Override
public int compareTo(IntervalPseudoPartitionKey other) {
return ComparisonChain.start()
.compare(size, other.size)
.compare(absoluteDistance, other.absoluteDistance)
.result();
}
}
}
|
// Copyright (c) 2018-2020 Splunk Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package spark
import (
"fmt"
"os"
)
const (
deploymentTemplateStr = "splunk-%s-%s" // identifier, instance type (ex: spark-worker, spark-master)
statefulSetTemplateStr = "splunk-%s-%s" // identifier, instance type (ex: spark-worker, spark-master)
serviceTemplateStr = "splunk-%s-%s-%s" // identifier, instance type (ex: spark-worker, spark-master), "headless" or "service"
defaultSparkImage = "splunk/spark" // default docker image used for Spark instances
)
// GetSparkStatefulsetName uses a template to name a Kubernetes StatefulSet for Spark instances.
func GetSparkStatefulsetName(instanceType InstanceType, identifier string) string {
return fmt.Sprintf(statefulSetTemplateStr, identifier, instanceType)
}
// GetSparkDeploymentName uses a template to name a Kubernetes Deployment for Spark instances.
func GetSparkDeploymentName(instanceType InstanceType, identifier string) string {
return fmt.Sprintf(deploymentTemplateStr, identifier, instanceType)
}
// GetSparkServiceName uses a template to name a Kubernetes Service for Spark instances.
func GetSparkServiceName(instanceType InstanceType, identifier string, isHeadless bool) string {
var result string
if isHeadless {
result = fmt.Sprintf(serviceTemplateStr, identifier, instanceType, "headless")
} else {
result = fmt.Sprintf(serviceTemplateStr, identifier, instanceType, "service")
}
return result
}
// GetSparkImage returns the docker image to use for Spark instances.
func GetSparkImage(specImage string) string {
var name string
if specImage != "" {
name = specImage
} else {
name = os.Getenv("RELATED_IMAGE_SPLUNK_SPARK")
if name == "" {
name = defaultSparkImage
}
}
return name
}
|
<filename>common-email/src/test/java/com/atjl/email/service/MailServiceImplTest.java
package com.atjl.email.service;
import com.atjl.email.api.MailConstant;
import com.atjl.email.api.MailService;
import org.junit.*;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import javax.annotation.Resource;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:test-service.xml"})
public class MailServiceImplTest {
@Resource(name = MailConstant.MAIL_SERVICE_USE_GENERAL_PLAT)
MailService mailService;
@Test
public void testSendMail() throws Exception {
// SendMailDto send = new SendMailDto();
String add = "<EMAIL>";
// String add = "<EMAIL>";
// send.setUserId(add);
// send.setContent("content");
// send.setSubject("test-title");
String file = "D:\\a.pdf";
mailService.sendMail(add, "test_title", "testcontent", file, "aaa.pdf");
}
@Before
public void before() throws Exception {
}
@After
public void after() throws Exception {
}
@BeforeClass
public static void beforeClass() throws Exception {
}
@Rule
public final ExpectedException expectedException = ExpectedException.none();
}
|
package org.springframework.beans.factory;
public interface InitializingBean {} |
<filename>cmd/colf/main.go
package main
import (
"flag"
"io/ioutil"
"log"
"os"
"path"
"path/filepath"
"strings"
"github.com/pascaldekloe/colfer"
)
// ANSI escape codes for markup
const (
bold = "\x1b[1m"
italic = "\x1b[3m"
underline = "\x1b[4m"
clear = "\x1b[0m"
)
var (
basedir = flag.String("b", ".", "Use a specific destination base `directory`.")
prefix = flag.String("p", "", "Adds a package `prefix`. Use slash as a separator when nesting.")
format = flag.Bool("f", false, "Normalizes the format of all input schemas on the fly.")
verbose = flag.Bool("v", false, "Enables verbose reporting to "+italic+"standard error"+clear+".")
sizeMax = flag.String("s", "16 * 1024 * 1024", "Sets the default upper limit for serial byte sizes. The\n`expression` is applied to the target language under the name\nColferSizeMax.")
listMax = flag.String("l", "64 * 1024", "Sets the default upper limit for the number of elements in a\nlist. The `expression` is applied to the target language under\nthe name ColferListMax.")
superClass = flag.String("x", "", "Makes all generated classes extend a super `class`. Use slash as\na package separator. Java only.")
)
var report = log.New(ioutil.Discard, "", 0)
func main() {
flag.Parse()
log.SetFlags(0)
if *verbose {
report.SetOutput(os.Stderr)
}
var files []string
switch args := flag.Args(); len(args) {
case 0:
flag.Usage()
os.Exit(2)
case 1:
files = []string{"."}
default:
files = args[1:]
}
// select language
var gen func(string, colfer.Packages) error
switch lang := flag.Arg(0); strings.ToLower(lang) {
case "c":
report.Println("Set up for C")
gen = colfer.GenerateC
if *superClass != "" {
log.Fatal("colf: super class not supported with C")
}
case "go":
report.Println("Set up for Go")
gen = colfer.GenerateGo
if *superClass != "" {
log.Fatal("colf: super class not supported with Go")
}
case "java":
report.Println("Set up for Java")
gen = colfer.GenerateJava
case "javascript", "js", "ecmascript":
report.Println("Set up for ECMAScript")
gen = colfer.GenerateECMA
if *superClass != "" {
log.Fatal("colf: super class not supported with ECMAScript")
}
default:
log.Fatalf("colf: unsupported language %q", lang)
}
// resolve clean file set
var writeIndex int
for i := 0; i < len(files); i++ {
f := files[i]
info, err := os.Stat(f)
if err != nil {
log.Fatal(err)
}
if info.IsDir() {
colfFiles, err := filepath.Glob(filepath.Join(f, "*.colf"))
if err != nil {
log.Fatal(err)
}
files = append(files, colfFiles...)
continue
}
f = filepath.Clean(f)
for j := 0; ; j++ {
if j == writeIndex {
files[writeIndex] = f
writeIndex++
break
}
if files[j] == f {
report.Println("Duplicate inclusion of", f, "ignored")
break
}
}
}
files = files[:writeIndex]
report.Println("Found schema files", strings.Join(files, ", "))
packages, err := colfer.ParseFiles(files)
if err != nil {
log.Fatal(err)
}
if *format {
for _, file := range files {
changed, err := colfer.Format(file)
if err != nil {
log.Fatal(err)
}
if changed {
log.Println("colf: formatted", file)
}
}
}
if len(packages) == 0 {
log.Fatal("colf: no struct definitons found")
}
for _, p := range packages {
p.Name = path.Join(*prefix, p.Name)
p.SizeMax = *sizeMax
p.ListMax = *listMax
p.SuperClass = *superClass
}
if err := gen(*basedir, packages); err != nil {
log.Fatal(err)
}
}
func init() {
cmd := os.Args[0]
help := bold + "NAME\n\t" + cmd + clear + " \u2014 compile Colfer schemas\n\n"
help += bold + "SYNOPSIS\n\t" + cmd + clear
help += " [ " + underline + "options" + clear + " ] " + underline + "language" + clear
help += " [ " + underline + "file" + clear + " " + underline + "..." + clear + " ]\n\n"
help += bold + "DESCRIPTION\n\t" + clear
help += "Generates source code for a " + underline + "language" + clear + ". The options are: "
help += bold + "C" + clear + ", " + bold + "Go" + clear + ",\n"
help += "\t" + bold + "Java" + clear + " and " + bold + "JavaScript" + clear + ".\n"
help += "\tThe " + underline + "file" + clear + " operands specify schema input. Directories are scanned\n"
help += "\tfor files with the colf extension. When no files are given, then\n"
help += "\tthe current " + italic + "working directory" + clear + " is used.\n"
help += "\tA package definition may be spread over several schema files.\n"
help += "\tThe directory hierarchy of the input is not relevant for the\n"
help += "\tgenerated code.\n\n"
help += bold + "OPTIONS\n" + clear
tail := "\n" + bold + "EXIT STATUS" + clear + "\n"
tail += "\tThe command exits 0 on succes, 1 on compilation failure and 2\n"
tail += "\twhen invoked without arguments.\n"
tail += "\n" + bold + "EXAMPLES" + clear + "\n"
tail += "\tCompile ./io.colf with compact limits as C:\n\n"
tail += "\t\t" + cmd + " -b src -s 2048 -l 96 C io.colf\n\n"
tail += "\tCompile ./api/*.colf in package com.example as Java:\n\n"
tail += "\t\t" + cmd + " -p com/example -x com/example/Parent Java api\n"
tail += "\n" + bold + "BUGS" + clear + "\n"
tail += "\tReport bugs at <https://github.com/pascaldekloe/colfer/issues>.\n\n"
tail += "\tText validation is not part of the marshalling and unmarshalling\n"
tail += "\tprocess. C and Go just pass any malformed UTF-8 characters. Java\n"
tail += "\tand JavaScript replace unmappable content with the '?' character\n"
tail += "\t(ASCII 63).\n\n"
tail += bold + "SEE ALSO\n\t" + clear + "protoc(1), flatc(1)\n"
flag.Usage = func() {
os.Stderr.WriteString(help)
flag.PrintDefaults()
os.Stderr.WriteString(tail)
}
}
|
<filename>app/src/main/java/com/telenav/osv/data/sequence/model/details/reward/SequenceDetailsRewardBase.java<gh_stars>10-100
package com.telenav.osv.data.sequence.model.details.reward;
/**
* Abstract class which holds information of the reward used in sequence. This is shown by {@link #value} field.
* @author horatiuf
*/
public abstract class SequenceDetailsRewardBase {
/**
* The reward value for the current sequence. This is based on the user type, such as:
* <ul>
* <li>normal users - will have points, if enabled</li>
* </ul>
*/
private double value;
/**
* The unit symbol used for reward system.
* <p>
* This can be null for normal users if the points are not enabled.
*/
private String unit;
/**
* Default constructor for the current class.
*/
public SequenceDetailsRewardBase(double value, String unit) {
this.value = value;
this.unit = unit;
}
public double getValue() {
return value;
}
public String getUnit() {
return unit;
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/HYDateKit/HYDateKit.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/HYDateKit/HYDateKit.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<gh_stars>10-100
//#####################################################################
// Copyright 2009, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform/GRID.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_NODE.h>
#include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_ND.h>
#include <PhysBAM_Tools/Grids_Uniform_Arrays/GRID_ARRAYS_POLICY_UNIFORM.h>
#include <PhysBAM_Geometry/Basic_Geometry/BOX.h>
#include <PhysBAM_Geometry/Grids_Uniform_Computations/BOX_SIGNED_DISTANCE.h>
#include <PhysBAM_Geometry/Grids_Uniform_Computations/SIGNED_DISTANCE.h>
namespace PhysBAM{
namespace SIGNED_DISTANCE{
#define BOX_SIGNED_DISTANCE_HELPER_d(T,d) \
template void Calculate(BOX<VECTOR<T,d> >&,const GRID<VECTOR<T,d> >&,GRID_ARRAYS_POLICY<GRID<VECTOR<T,d> > >::ARRAYS_SCALAR&,bool);
BOX_SIGNED_DISTANCE_HELPER_d(float,1);
BOX_SIGNED_DISTANCE_HELPER_d(float,2);
BOX_SIGNED_DISTANCE_HELPER_d(float,3);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
BOX_SIGNED_DISTANCE_HELPER_d(double,1);
BOX_SIGNED_DISTANCE_HELPER_d(double,2);
BOX_SIGNED_DISTANCE_HELPER_d(double,3);
#endif
}
}
|
<filename>src/lifecycle/index.js
import { isEqualObj, cloneObj } from './utils';
const hasOwnProp = (obj, key) => {
if (obj.hasOwnProperty(key)) {
return true;
}
};
export const storeMap = {};
export const notifyStackMap = {};
const timeout = 5;
let batch = 0;
const batchKeysF = [];
export function notifyUpdate(storeNameF, keyName) {
batch++;
batchKeysF.push(keyName);
setTimeout(
(storeName, batchKeys) => {
if (--batch === 0) {
const notifyStack = notifyStackMap[storeName];
const len = notifyStack.length;
//
for (let i = len - 1; i >= 0; i--) {
const [targetPage, dataFn, oldData, getters] = notifyStack[i];
let newData = dataFn(storeMap[storeName]);
// 先更新 getters
if (getters.length) {
let patchData = {};
getters.forEach((item) => {
if (newData[item]) {
patchData[item] = newData[item];
}
});
try {
targetPage.setData(patchData);
} catch (error) {
console.log(error);
}
}
// 单一更新key
if (batchKeys.length) {
let patchData = {};
batchKeys.forEach((item) => {
// 目标模块和 更新模块都有 patch更新
if (hasOwnProp(oldData, item) && hasOwnProp(newData, item)) {
patchData[item] = newData[item];
}
});
try {
targetPage.setData(patchData);
} catch (error) {
console.log(error);
}
} else {
// if (!isEqualObj(oldData, newData)) {
// notifyStack[i][2] = cloneObj(newData);
try {
console.log(Object.keys(newData).join(','));
targetPage.setData(newData);
} catch (error) {
console.log(error);
}
// }
}
}
// 清空batch key
batchKeysF.length = 0;
}
},
timeout,
storeNameF,
batchKeysF
);
}
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Part } from './part.entity';
@Injectable()
export class PartService {
constructor(
@InjectRepository(Part)
private partRepository: Repository<Part>,
) {}
async findAll() {
return await this.partRepository.find({
order: {
created_at: 'DESC',
},
});
}
}
|
<reponame>xfyre/tapestry-5<filename>tapestry-core/src/main/java/org/apache/tapestry5/internal/transform/RenderCommandWorker.java
// Copyright 2007, 2008, 2010, 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.transform;
import org.apache.tapestry5.MarkupWriter;
import org.apache.tapestry5.internal.InternalComponentResources;
import org.apache.tapestry5.model.MutableComponentModel;
import org.apache.tapestry5.plastic.MethodDescription;
import org.apache.tapestry5.plastic.PlasticClass;
import org.apache.tapestry5.plastic.PlasticField;
import org.apache.tapestry5.plastic.PlasticUtils;
import org.apache.tapestry5.runtime.RenderCommand;
import org.apache.tapestry5.runtime.RenderQueue;
import org.apache.tapestry5.services.TransformConstants;
import org.apache.tapestry5.services.transform.ComponentClassTransformWorker2;
import org.apache.tapestry5.services.transform.TransformationSupport;
/**
* Ensures that all components implement {@link RenderCommand} by delegating to
* {@link InternalComponentResources#render(org.apache.tapestry5.MarkupWriter, org.apache.tapestry5.runtime.RenderQueue)}.
* This is also responsible for invoking {@link org.apache.tapestry5.internal.InternalComponentResources#postRenderCleanup()}
*/
public class RenderCommandWorker implements ComponentClassTransformWorker2
{
private final MethodDescription RENDER_DESCRIPTION = PlasticUtils.getMethodDescription(RenderCommand.class, "render", MarkupWriter.class, RenderQueue.class);
public void transform(PlasticClass plasticClass, TransformationSupport support, MutableComponentModel model)
{
// Subclasses don't need to bother, they'll inherit from super-classes.
if (!support.isRootTransformation())
{
return;
}
plasticClass.introduceInterface(RenderCommand.class);
PlasticField resourcesField = plasticClass.introduceField(InternalComponentResources.class, "resources").injectFromInstanceContext();
plasticClass.introduceMethod(RENDER_DESCRIPTION).delegateTo(resourcesField);
plasticClass.introduceMethod(TransformConstants.POST_RENDER_CLEANUP_DESCRIPTION).delegateTo(resourcesField);
}
}
|
<reponame>pdemirov/Currency-calculator
from django.apps import AppConfig
class CalcConfig(AppConfig):
name = 'Calc'
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tapestry5.jpa.test;
import javax.persistence.EntityManager;
import org.apache.tapestry5.ioc.Invokable;
import org.apache.tapestry5.jpa.EntityTransactionManager;
import org.apache.tapestry5.jpa.annotations.CommitAfter;
import org.apache.tapestry5.jpa.test.entities.ThingOne;
public class TopLevelServiceImpl implements TopLevelService
{
private final EntityManager em;
private final NestedService nestedService;
private final EntityTransactionManager entityTransactionManager;
public TopLevelServiceImpl(EntityManager em, NestedService nestedService,
EntityTransactionManager transactionalUnits)
{
this.em = em;
this.nestedService = nestedService;
this.entityTransactionManager = transactionalUnits;
}
@Override
@CommitAfter
public void createThingOneAndTwo(String nameOne, String nameTwo)
{
ThingOne thingOne = new ThingOne();
thingOne.setName(nameOne);
em.persist(thingOne);
nestedService.createThingTwo(nameTwo);
}
@Override
@CommitAfter
public void createThingOneThenTwo(final String nameOne, final String nameTwo)
{
entityTransactionManager.invokeAfterCommit(null, new Invokable<Boolean>()
{
@Override
public Boolean invoke()
{
nestedService.createThingTwo(nameTwo);
return true;
}
});
ThingOne thingOne = new ThingOne();
thingOne.setName(nameOne);
em.persist(thingOne);
}
@Override
@CommitAfter
public void createThingOneThenTwoWithNestedCommitAfter(final String nameOne,
final String nameTwo)
{
entityTransactionManager.runInTransaction(null, new Runnable()
{
@Override
public void run()
{
entityTransactionManager.invokeAfterCommit(null, new Invokable<Boolean>()
{
@Override
public Boolean invoke()
{
nestedService.createThingTwo(nameTwo);
return true;
}
});
ThingOne thingOne = new ThingOne();
thingOne.setName(nameOne);
em.persist(thingOne);
}
});
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package hermes.chat.vue.menu;
import hermes.chat.controleur.Chatter;
import hermes.chat.vue.menu.listeners.Entrer;
import hermes.chat.vue.menu.listeners.Sortir;
import hermes.chat.vue.menu.listeners.Supprimer;
import hermes.client.channels.Channel;
import java.util.List;
import javax.swing.JMenuItem;
/**
*
* @author <NAME> (d120041) <<EMAIL>>
*/
public class MenuChannel extends Menu<Channel>{
private final Chatter chat;
private final Entrer entrer;
private final Sortir sortir;
private final Supprimer supprimer;
public MenuChannel(Chatter chat) {
this.chat = chat;
entrer = new Entrer(chat);
sortir = new Sortir(chat);
supprimer = new Supprimer(chat);
initItems();
}
private void initItems() {
ajouterItem("entrer", entrer);
ajouterItem("sortir", sortir);
ajouterItem("supprimer", supprimer);
}
@Override
public List<JMenuItem> filtrerItemsPour(Channel model) {
configurer(model);
return getItems();
}
private void configurer(Channel model) {
entrer.setChannel(model.getNom());
sortir.setChannel(model.getNom());
supprimer.setChannel(model.getNom());
}
}
|
numbers = [1, 2, 3, 4, 5, 6]
const evenNumbers = numbers.filter(num => num % 2 === 0);
console.log(evenNumbers) |
<html>
<head>
<title>Registration</title>
</head>
<body>
<form method="post" action="/register">
<label>Name:</label><input type="text" name="name" /><br/>
<label>Email:</label><input type="text" name="email" /><br/>
<label>Password:</label><input type="password" name="password" /><br/>
<input type="submit" value="Register" />
</form>
</body>
</html> |
<reponame>egoist/babel-plugin-markdown
import fs from 'fs'
import path from 'path'
import * as babel from 'babel-core'
import plugin from '../'
test('it works', () => {
compare({
input: 'markdown`# hi ${1+1}`'
})
})
test('it only transforms markdown', () => {
compare({
input: 'const a = markdown`# hi`; const b = md`# bye`'
})
})
test('markdown-it options', () => {
compare({
input: fixture('opts.js').trim(),
pluginOptions: {
highlight() {
return 'hahah'
},
plugins: ['task-lists']
},
babelOptions: {
filename: __filename
}
})
})
test('external', () => {
compare({
input: 'markdown.require("./__fixtures__/external.md")',
babelOptions: {
filename: __filename
}
})
})
function compare({ input, pluginOptions, babelOptions }) {
const { code } = babel.transform(input, {
babelrc: false,
plugins: [[plugin, pluginOptions]],
...babelOptions
})
expect(code).toMatchSnapshot()
}
function fixture(...args) {
return fs.readFileSync(path.join(__dirname, '__fixtures__', ...args), 'utf8')
}
|
<reponame>snow-flake/gdax-api
package clients
import (
"net/url"
"testing"
"time"
)
//
//
//
func Test_live_GetProducts(t *testing.T) {
client := NewSandboxClient()
output, err := GetProducts(client)
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
if len(output) == 0 {
t.Fatalf("Output should have more than item, %v", output)
}
}
//
//
//
func Test_live_GetTime(t *testing.T) {
client := NewSandboxClient()
output := &GdaxTimeResponse{}
_, err := client.Get("/time", url.Values{}, output)
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
}
//
//
//
func Test_live_GetCurrencies(t *testing.T) {
client := NewSandboxClient()
output, err := GetCurrencies(client)
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
if len(output) == 0 {
t.Fatalf("Output should have >= 1 items, %v", output)
}
}
//
//
//
func Test_live_GetProduct24HrStats(t *testing.T) {
client := NewSandboxClient()
output, err := GetProduct24HrStats(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
//
//
//
func Test_live_GetProductHistoricRates(t *testing.T) {
client := NewSandboxClient()
start := time.Now().UTC().Add(-1 * HistoricRateGranularity_1day * time.Second)
end := time.Now().UTC()
output, err := GetProductHistoricRates(client, "BTC-USD", &start, &end, HistoricRateGranularity_1day)
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
//
//
//
func Test_live_GetProductTrades(t *testing.T) {
client := NewSandboxClient()
output, err := GetProductTrades(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
if len(output) == 0 {
t.Fatalf("Output should have >= 1 items, %v", output)
}
}
//
//
//
func Test_live_GetProductTicker(t *testing.T) {
client := NewSandboxClient()
output, err := GetProductTicker(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
//
//
//
func Test_live_GetProductOrderBookLevel1(t *testing.T) {
client := NewSandboxClient()
output, err := GetProductOrderBookLevel1(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
func Test_live_GetProductOrderBookLevel2(t *testing.T) {
client := NewSandboxClient()
output, err := GetProductOrderBookLevel2(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
func Test_live_GetProductOrderBookLevel3(t *testing.T) {
client := NewSandboxClient()
output, err := GetProductOrderBookLevel3(client, "BTC-USD")
if err != nil {
t.Fatalf("Error should be nil, %v", err)
}
if output == nil {
t.Fatalf("Output should not be nil, %v", output)
}
}
//
//
//
|
<filename>node_modules/@buffetjs/icons/dist/components/Pencil/index.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var _react = _interopRequireDefault(require("react"));
var _propTypes = _interopRequireDefault(require("prop-types"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var Pencil = function Pencil(_ref) {
var fill = _ref.fill;
return _react["default"].createElement("svg", {
width: "10",
height: "10",
xmlns: "http://www.w3.org/2000/svg"
}, _react["default"].createElement("path", {
d: "M2.396 9.155l.6-.6-1.55-1.552-.601.601v.706h.845v.845h.706zM5.848 3.03c0-.097-.048-.146-.145-.146a.153.153 0 0 0-.112.047L2.013 6.508a.153.153 0 0 0-.046.112c0 .097.048.146.145.146a.153.153 0 0 0 .112-.047l3.578-3.577a.153.153 0 0 0 .046-.112zm-.356-1.268l2.746 2.746L2.746 10H0V7.254l5.492-5.492zM10 2.396a.809.809 0 0 1-.244.594L8.66 4.086 5.914 1.34 7.01.25A.784.784 0 0 1 7.604 0a.82.82 0 0 1 .6.25l1.552 1.545a.845.845 0 0 1 .244.601z",
fill: fill,
fillRule: "nonzero"
}));
};
Pencil.defaultProps = {
fill: '#007EFF'
};
Pencil.propTypes = {
fill: _propTypes["default"].string
};
var _default = Pencil;
exports["default"] = _default; |
let canvas = document.getElementById('canvas1');
let ctx = canvas.getContext('2d');
let width = canvas.width;
let height = canvas.height;
class Point3 {
constructor(x, y, z) {
this.x = x;
this.y = y;
this.z = z;
}
sub(p) {
return new Point3(this.x - p.x, this.y - p.y, this.z - p.z);
}
add(p) {
return new Point3(this.x + p.x, this.y + p.y, this.z + p.z);
}
dot2(p) {
return this.x * p.x + this.y * p.y;
}
cross2(p) {
return this.x * p.y - this.y * p.x;
}
clone() {
return new Point3(this.x, this.y, this.z);
}
}
class Vertex {
constructor(point, varyingArray) {
this.point = point;
this.varyingArray = varyingArray;
}
}
class Triangle {
constructor(p1, p2, p3) {
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
}
}
class Fragment {
constructor(r, g, b, a) {
this.r = r;
this.g = g;
this.b = b;
this.a = a;
}
}
class Buffer {
constructor(w, h) {
this.imageData = new ImageData(w, h);
this.depth = new Array(w*h);
this.depth.fill(0);
}
}
let val = 0;
let buffer = new Buffer(width, height);
let fragShader = (varyings) => {
return new Fragment(varyings[0], varyings[1], varyings[2], varyings[3]);
}
function mainLoop() {
val += .01;
let a = Math.sin(val - 1.5 - Math.sin(val / 1.3538)) * 100 + 320;
let b = Math.cos(val - 1.5 - Math.sin(val / 1.3538)) * 100 + 240;
let c = Math.sin(val + 2) * 100 + 320;
let d = Math.cos(val + 2) * 100 + 240;
let e = Math.sin(val + 1) * 100 + 320;
let f = Math.cos(val + 1) * 100 + 240;
let v1 = new Vertex(new Point3(a, b, 0), [255, 0, 0, 255]);
let v2 = new Vertex(new Point3(c, d, 0), [0, 255, 0, 255]);
let v3 = new Vertex(new Point3(e, f, 0), [0, 0, 255, 255]);
let tri = new Triangle(v1, v2, v3);
drawTriangle(buffer, tri, fragShader);
ctx.putImageData(buffer.imageData, 0, 0);
buffer.imageData.data.fill(0);
}
mainLoop();
let running = false;
let interval = null;
let button = document.getElementById('start1');
function toggleRenderer1() {
if (!running) {
button.innerHTML = "Stop";
interval = setInterval(mainLoop, 1000/60.0);
running = true;
} else {
button.innerHTML = "Start";
clearInterval(interval);
running = false;
}
}
function calculateVaryingSlope(t) {
let v1 = t.p1.varyingArray;
let v2 = t.p2.varyingArray;
let v3 = t.p3.varyingArray;
let w12 = t.p2.point.x - t.p1.point.x;
let h12 = t.p2.point.y - t.p1.point.y;
let w13 = t.p3.point.x - t.p1.point.x;
let h13 = t.p3.point.y - t.p1.point.y;
let quot = w13 * h12 - w12 * h13;
if (quot === 0) {
return null;
}
let slopeArray = [];
for (let i = 0; i < v1.length; i++) {
let r1 = v1[i];
let r2 = v2[i];
let r3 = v3[i];
let dx = (h12 * (r3 - r1) + h13 * (r1 - r2)) / quot;
let dy = (w12 * (r3 - r1) + w13 * (r1 - r2)) / -quot;
slopeArray.push({dx: dx, dy: dy});
}
return slopeArray;
}
function drawTriangle(buffer, triangle, fragmentShader) {
let p1 = triangle.p1.point;
let p2 = triangle.p2.point;
let p3 = triangle.p3.point;
if (p2.y < p1.y && p2.y < p3.y) {
let tmp = p1;
p1 = p2;
p2 = p3;
p3 = tmp;
}
if (p3.y < p2.y && p3.y < p1.y) {
let tmp = p1;
p1 = p3;
p3 = p2;
p2 = tmp;
}
let vL = p2.sub(p1);
let vR = p3.sub(p1);
let cr = vL.cross2(vR);
//keep Clockwise Faces
//cull Counter Clockwise Faces
if (cr < 0) {
return;
}
let varyingSlopes = calculateVaryingSlope(triangle);
if (!varyingSlopes) {
return;
}
let yScanStart = Math.ceil(p1.y);
let yScanEnd = Math.ceil(Math.min(p2.y, p3.y));
if (yScanEnd !== yScanStart) {
let vec1 = p2.sub(p1);
let vec2 = p3.sub(p1);
doHalfTri(buffer, yScanStart, yScanEnd, p1.clone(), vec1.x/vec1.y, p1.clone(), vec2.x/vec2.y, triangle.p1, varyingSlopes, fragmentShader);
}
yScanStart = yScanEnd;
let vec1, vec2, start1, start2;
if (p2.y > p3.y) {
yScanEnd = Math.ceil(p2.y);
vec1 = p2.sub(p1);
vec2 = p2.sub(p3);
start1 = p1;
start2 = p3;
} else {
yScanEnd = Math.ceil(p3.y);
vec1 = p3.sub(p2);
vec2 = p3.sub(p1);
start1 = p2;
start2 = p1;
}
if (yScanStart !== yScanEnd) {
doHalfTri(buffer, yScanStart, yScanEnd, start1.clone(), vec1.x/vec1.y, start2.clone(), vec2.x/vec2.y, triangle.p1, varyingSlopes, fragmentShader);
}
}
function doHalfTri(buffer, scanStart, scanEnd, p1, slope1, p2, slope2, baseVertex, varyingSlopes, fragmentShader) {
//start right x pos
let sx1 = p1.x + (scanStart - p1.y) * slope1;
//start left x pos
let sx2 = p2.x + (scanStart - p2.y) * slope2;
//draw scan lines
for (let i = scanStart; i < scanEnd; i++) {
let low = Math.ceil(sx2);
let high = Math.ceil(sx1);
let varyingBase = calculateVaryingBase(baseVertex, varyingSlopes, low, i);
for (let j = low; j < high; j++) {
let frag = fragmentShader(varyingBase);
setPixelAlphaBlend(buffer.imageData, j, i, frag.r, frag.g, frag.b, frag.a);
incrementVaryingX(varyingBase, varyingSlopes);
}
sx1 += slope1;
sx2 += slope2;
}
}
function calculateVaryingBase(base, slopes, x, y) {
let varyingBase = base.varyingArray.slice(0);
let xDiff = x - base.point.x;
let yDiff = y - base.point.y;
incrementVarying(varyingBase, slopes, xDiff, yDiff);
return varyingBase;
}
function incrementVarying(varying, slopes, dxm, dym) {
for (let i = 0; i < varying.length; i++) {
varying[i] += slopes[i].dx * dxm + slopes[i].dy * dym;
}
}
function incrementVaryingX(varying, slopes) {
for (let i = 0; i < varying.length; i++) {
varying[i] += slopes[i].dx;
}
}
function setPixelAlphaBlend(data, x, y, r, g, b, a) {
if (x >= 0 && y >= 0 && x < data.width && y < data.height) {
let idx = (x + y * data.width) * 4;
if (a >= 255) {
data.data[idx] = r;
data.data[idx + 1] = g;
data.data[idx + 2] = b;
data.data[idx + 3] = a;
} else {
let af = a / 255.0
let oma = 1-af;
data.data[idx] = data.data[idx] * oma + r * af;
data.data[idx + 1] = data.data[idx + 1] * oma + g * af;
data.data[idx + 2] = data.data[idx + 2] * oma + b * af;
data.data[idx + 3] = data.data[idx + 3] + af * (255 - data.data[idx + 3]);
}
}
}
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 16 16:42:00 2022
@author: bpcos
"""
class Person(object):
def __init__(self, name: str, age: int, gender: str = 'ND') -> None:
assert gender in ['M', 'F', 'ND', 'B'], "Gender Type Not Valid"
self.name: str = name
self.age: int = age
self.gender: str = gender
def __str__(self) -> str:
return f"Person: Name:{self.name} Age:{self.age} Gender:{self.gender}"
class BankAccount(object):
def __init__(self, holder: Person) -> None:
self.holder = holder
self.account = 0.0
def deposit(self) -> None:
self.account += float(input("Digitare quantità: "))
self.show_balance()
def withdraw(self):
amount = float(input("Preleva: "))
if self.account < amount:
raise ValueError("The given amount is over your balance amount")
self.account -= amount
self.show_balance()
def show_balance(self) -> None:
print(self.account)
def get_holder_name(self) -> str:
return self.holder.name
def get_holder_age(self) -> str:
return self.holder.age
def get_holder_gender(self) -> str:
return self.holder.gender
class Bank(object):
def __init__(self) -> None:
self.accounts = set()
def add_account(self, account: BankAccount) -> None:
self.accounts.add(account)
# Persone
bruno = Person("Bruno", 16, 'M')
michele = Person("Michele", 20, 'B')
giorgia = Person("Giorgia", 17, 'F')
# Banche
banca = Bank()
# Conti
b1 = BankAccount(bruno)
m1 = BankAccount(michele)
g1 = BankAccount(giorgia)
banca.add_account(b1)
banca.add_account(m1)
banca.add_account(g1)
print(b1.get_holder_name()) |
#!/bin/bash
# shellcheck disable=SC1091
source ../libimport.bash
bash_import libhsm.bash
bash_import libdebug.bash
declare -gri SIG_A=0
declare -gri SIG_B=1
declare -gri SIG_C=2
declare -gri SIG_D=3
declare -gri SIG_E=4
declare -gri SIG_F=5
declare -gri SIG_G=6
declare -gri SIG_H=7
declare -gri SIG_I=8
declare -gra EVENT_NAMES=( A B C D E F G H I )
state_initial() {
printf "topState-INIT;"
FOO=0
STATE=s2 && return $RET_TRAN
}
state_s() {
case $1 in
$SIG_ENTRY)
printf "s-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s-EXIT;"
return $RET_HANDLED
;;
$SIG_INIT)
printf "s-INIT;"
STATE=s11 && return $RET_TRAN
;;
$SIG_E)
printf "s-E;"
STATE=s11 && return $RET_TRAN
;;
$SIG_I)
if (( FOO != 0 )); then
printf "s-I;"
FOO=0
return $RET_HANDLED
fi
;;
esac
STATE=TOP_STATE && return $RET_PARENT
}
state_s1() {
case $1 in
$SIG_ENTRY)
printf "s1-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s1-EXIT;"
return $RET_HANDLED
;;
$SIG_INIT)
printf "s1-INIT;"
STATE=s11 && return $RET_TRAN
;;
$SIG_A)
printf "s1-A;"
STATE=s1 && return $RET_TRAN
;;
$SIG_B)
printf "s1-B;"
STATE=s11 && return $RET_TRAN
;;
$SIG_C)
printf "s1-C;"
STATE=s2 && return $RET_TRAN
;;
$SIG_D)
choice1 "$@"
return $?
;;
$SIG_F)
printf "s1-F;"
STATE=s211 && return $RET_TRAN
;;
$SIG_I)
printf "s1-I;"
return $RET_HANDLED
;;
esac
STATE=s && return $RET_PARENT
}
state_s11() {
case $1 in
$SIG_ENTRY)
printf "s11-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s11-EXIT;"
return $RET_HANDLED
;;
$SIG_D)
if (( FOO != 0 )); then
printf "s11-D;"
FOO=0
STATE=s1 && return $RET_TRAN
fi
;;
$SIG_G)
printf "s11-G;"
STATE=s211 && return $RET_TRAN
;;
$SIG_H)
printf "s11-H;"
STATE=s && return $RET_TRAN
;;
esac
STATE=s1 && return $RET_PARENT
}
state_s2() {
case $1 in
$SIG_ENTRY)
printf "s2-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s2-EXIT;"
return $RET_HANDLED
;;
$SIG_INIT)
printf "s2-INIT;"
STATE=s211 && return $RET_TRAN
;;
$SIG_C)
printf "s2-C;"
STATE=s1 && return $RET_TRAN
;;
$SIG_F)
printf "s2-F;"
STATE=s11 && return $RET_TRAN
;;
$SIG_I)
if (( FOO == 0 )); then
printf "s2-I;"
FOO=1
return $RET_HANDLED
fi
;;
esac
STATE=s && return $RET_PARENT
}
state_s21() {
case $1 in
$SIG_ENTRY)
printf "s21-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s21-EXIT;"
return $RET_HANDLED
;;
$SIG_INIT)
printf "s21-INIT;"
STATE=s211 && return $RET_TRAN
;;
$SIG_A)
printf "s21-A;"
STATE=s21 && return $RET_TRAN
;;
$SIG_B)
printf "s21-B;"
STATE=s211 && return $RET_TRAN
;;
$SIG_G)
printf "s21-G;"
STATE=s1 && return $RET_TRAN
;;
esac
STATE=s2 && return $RET_PARENT
}
state_s211() {
case $1 in
$SIG_ENTRY)
printf "s211-ENTRY;"
return $RET_HANDLED
;;
$SIG_EXIT)
printf "s211-EXIT;"
return $RET_HANDLED
;;
$SIG_D)
printf "s211-D;"
STATE=s21 && return $RET_TRAN
;;
$SIG_H)
printf "s211-H;"
STATE=s && return $RET_TRAN
;;
esac
STATE=s21 && return $RET_PARENT
}
choice1() {
if (( FOO == 0 )); then
printf "s1-D;"
FOO=1
STATE=s && return $RET_TRAN
fi
return $RET_HANDLED
}
send() {
local SIG=$1
printf "\n%s:" "${EVENT_NAMES[$SIG]}"
hsm_dispatch $SIG
}
example_machine() {
local STATE # hsm state
local -a HSM_PATH=() # hsm path
local FOO
hsm_init initial
send $SIG_A
send $SIG_B
send $SIG_D
send $SIG_E
send $SIG_I
send $SIG_F
send $SIG_I
send $SIG_I
send $SIG_F
send $SIG_A
send $SIG_B
send $SIG_D
send $SIG_D
send $SIG_E
send $SIG_G
send $SIG_H
send $SIG_H
send $SIG_C
send $SIG_G
send $SIG_C
send $SIG_C
}
main() {
set_debugger_property locals.auto 1
set_debugger_property watch.auto 1
add_watch '$__EXITCODE__'
add_watch '$STATE'
add_watch '"$HSM_PATH[@]"'
set_debugger_trap
example_machine
}
|
package objects;
import java.util.Vector;
public class ClientPropertyMessage implements Message
{
private static final long serialVersionUID = 7125961928769121190L;
public Vector<String> properties;
public ClientPropertyMessage(Vector<String> properties)
{
this.properties = properties;
}
}
|
def letter_frequency(string):
letter_counts = {}
for letter in string:
if letter in letter_counts.keys():
letter_counts[letter] += 1
else:
letter_counts[letter] = 1
return letter_counts
print(letter_frequency(string)) |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.pagination = void 0;
const discord_js_1 = require("discord.js");
const defaultEmojis = {
first: "⬅️",
previous: "◀️",
next: "▶️",
last: "➡️",
number: "#️⃣",
};
const defaultStyles = {
first: "PRIMARY",
previous: "PRIMARY",
next: "PRIMARY",
last: "PRIMARY",
number: "SUCCESS",
};
const pagination = async (options) => {
const { author, channel, embeds, button, time, max, customFilter, fastSkip, pageTravel, } = options;
let currentPage = 1;
const getButtonData = (name) => {
return button?.find((btn) => btn.name === name);
};
const generateButtons = (state) => {
const checkState = (name) => {
if (["first", "previous"].includes(name) &&
currentPage === 1)
return true;
if (["next", "last"].includes(name) &&
currentPage === embeds.length)
return true;
return false;
};
let names = ["previous", "next"];
if (fastSkip)
names = ["first", ...names, "last"];
if (pageTravel)
names.push("number");
return names.reduce((accumulator, name) => {
accumulator.push(new discord_js_1.MessageButton()
.setEmoji(getButtonData(name)?.emoji || defaultEmojis[name])
.setCustomId(name)
.setDisabled(state || checkState(name))
.setStyle(getButtonData(name)?.style ||
defaultStyles[name]));
return accumulator;
}, []);
};
const components = (state) => [
new discord_js_1.MessageActionRow().addComponents(generateButtons(state)),
];
const changeFooter = () => {
const embed = embeds[currentPage - 1];
const newEmbed = new discord_js_1.MessageEmbed(embed);
if (embed?.footer?.text) {
return newEmbed.setFooter(`${embed.footer.text} - Page ${currentPage} of ${embeds.length}`, embed.footer.iconURL);
}
return newEmbed.setFooter(`Page ${currentPage} of ${embeds.length}`);
};
const initialMessage = await channel.send({
embeds: [changeFooter()],
components: components(),
});
const defaultFilter = (interaction) => {
if (!interaction.deferred)
interaction.deferUpdate();
return interaction.user.id === author.id;
};
const filter = customFilter || defaultFilter;
const collectorOptions = () => {
const opt = {
filter,
componentType: "BUTTON",
};
if (max)
opt["max"] = max;
if (time)
opt["time"] = time;
return opt;
};
const collector = channel.createMessageComponentCollector(collectorOptions());
const pageTravelling = new Set();
const numberTravel = async () => {
if (pageTravelling.has(author.id))
return channel.send("Type `end` to stop page travelling!");
const collector = channel.createMessageCollector({
filter: (msg) => msg.author.id === author.id,
time: 30000,
});
const numberTravelMessage = await channel.send(`${author.tag}, you have 30 seconds, send numbers in chat to change pages! Simply type \`end\` to exit from page travelling.`);
pageTravelling.add(author.id);
collector.on("collect", (message) => {
if (message.content.toLowerCase() === "end") {
message.delete().catch(() => { });
return collector.stop();
}
const int = parseInt(message.content);
if (isNaN(int) || !(int < embeds.length) || !(int >= 1))
return;
currentPage = int;
initialMessage.edit({
embeds: [changeFooter()],
components: components(),
});
if (message.guild.me.permissions.has("MANAGE_MESSAGES"))
message.delete();
});
collector.on("end", () => {
if (numberTravelMessage.deletable)
numberTravelMessage.delete();
pageTravelling.delete(author.id);
});
};
collector.on("collect", async (interaction) => {
const id = interaction.customId;
if (id === "first")
currentPage = 1;
if (id === "previous")
currentPage--;
if (id === "next")
currentPage++;
if (id === "last")
currentPage = embeds.length;
if (id === "number")
await numberTravel();
initialMessage.edit({
embeds: [changeFooter()],
components: components(),
});
});
collector.on("end", () => {
initialMessage.edit({
components: components(true),
});
});
};
exports.pagination = pagination;
//# sourceMappingURL=pagination.js.map |
<gh_stars>1-10
define(["require", "exports", '../../Observable', '../../operator/single'], function (require, exports, Observable_1, single_1) {
"use strict";
Observable_1.Observable.prototype.single = single_1.single;
});
//# sourceMappingURL=single.js.map |
/*-----------------------------------------------------------
Name: fisp
Author: <EMAIL>
Date: 2016.12
Copyright (c). All rights reserved.
------------------------------------------------------------*/
#include "../include/base.h"
/*-----------------------------------------------------------
class String
------------------------------------------------------------*/
namespace fisp
{
namespace utility
{
/*-----------------------------------------------------------
class IRoot
------------------------------------------------------------*/
IRoot::IRoot()
:meState(EState::State_MAX)
,meMemoryState(EMemory::Memory_Create)
{
}
IRoot::~IRoot()
{
destroy();
meMemoryState = Memory_Max;
meState = State_MAX;
}
IRoot::IRoot(const IRoot& other)
{
Tag::destroyMem<Tag>(mpTag);
}
IRoot& IRoot::operator = (const IRoot& other)
{
return *this;
}
bool IRoot::operator < (const IRoot& other) const
{
return true;
}
bool IRoot::operator == (const IRoot& other) const
{
return true;
}
bool IRoot::create(void* pData /* = nullptr */)
{
if (State_MAKE == state())
return false;
bool rd = doCreate(pData);
if (rd)
{
//Init();
state(State_MAKE);
}
meMemoryState = EMemory::Memory_Create;
return rd;
}
void IRoot::destroy()
{
if (State_FREE == state())
return;
//UnInit();
doDestroy();
state(State_FREE);
}
void IRoot::copyOther(const IRoot& other)
{
meMemoryState = other.meMemoryState;
meState = other.meState;
deepCopy<Tag>(mpTag, other.mpTag);
mbEnable = other.mbEnable;
}
void IRoot::memoryState(const EMemory& eState)
{
meMemoryState = eState;
}
const EMemory& IRoot::memoryState() const
{
return meMemoryState;
}
bool IRoot::isMemoryCreate() const
{
return (Memory_Create == meMemoryState);
}
bool IRoot::isMemoryReference() const
{
return (Memory_Refrence == meMemoryState);
}
void IRoot::state(const EState& eState)
{
if (eState >= 0 && eState < State_MAX)
meState = eState;
}
const EState& IRoot::state() const
{
return meState;
}
void IRoot::enable(bool bEnable)
{
mbEnable = bEnable;
}
bool IRoot::enable() const
{
return mbEnable;
}
bool IRoot::isMake() const
{
return (State_MAKE == meState);
}
bool IRoot::isFree() const
{
return (State_FREE == meState);
}
void IRoot::tag(const Tag& tag)
{
if (nullptr == mpTag)
{
mpTag = Tag::createMem<Tag>();
}
*mpTag = tag;
}
Tag* IRoot::tag()
{
return mpTag;
}
const Tag* IRoot::tag() const
{
return mpTag;
}
const Tag& IRoot::getTag() const
{
if (nullptr != mpTag)
{
return *mpTag;
}
return Tag();
}
bool IRoot::isUnknowTag() const
{
return (nullptr == mpTag || mpTag->name().isEmpty());
}
void IRoot::isUnknowTagThenSet(const Tag& flag)
{
if (isUnknowTag() && !flag.name().isEmpty())
{
tag(flag);
}
}
bool IRoot::doCreate(void* pData /* = NULL */)
{
meMemoryState = Memory_Max;
meState = State_MAX;
return true;
}
void IRoot::doDestroy()
{
}
/*-----------------------------------------------------------
class String
------------------------------------------------------------*/
const String String::Space(" ");
const String String::Tab2(" ");
const String String::Tab4(" ");
const String String::Tab8(" ");
const String String::Enter("\r\n");
const String String::Terminate("\0");
const String String::Comma(",");
const String String::Semicolon(";");
const String String::Point(".");
String::String()
:mbEncryption(false)
{
//m_String.clear();
}
String::~String()
{
//m_String.clear();
}
String::String(const byte* bt)
:mbEncryption(false)
, m_String((const char*)bt)
{
}
String::String(const char* s)
: mbEncryption(false)
//, m_String(s)
{
m_String = std::string(s);
}
String::String(const wchar* s)
: mbEncryption(false)
//,m_String(s)
{
m_WString = std::wstring(s);
m_String = toString(m_WString);
}
String::String(const std::string& str)
: m_String(str)
, mbEncryption(false)
{
}
String::String(const std::wstring& wstr)
: m_String("")
, mbEncryption(false)
{
m_String = toString(wstr);
}
String::String(char ch)
:mbEncryption(false)
{
char szStr[4] = { ch };
m_String = szStr;
}
String::String(bool b)
:mbEncryption(false)
{
char szStr[8];
b ? sprintf(szStr, "%s", "true") : sprintf(szStr, "%s", "false");
m_String = szStr;
}
String::String(int index)
:mbEncryption(false)
{
char szStr[256];
sprintf(szStr, "%d", index);
m_String = szStr;
}
String::String(uint uStr)
:mbEncryption(false)
{
char szStr[256];
sprintf(szStr, "%u", uStr);
m_String = szStr;
}
String::String(long uStr)
:mbEncryption(false)
{
char szStr[256];
sprintf(szStr, "%lu", uStr);
m_String = szStr;
}
String::String(ulong uStr)
:mbEncryption(false)
{
char szStr[256];
sprintf(szStr, "%u", uStr);
m_String = szStr;
}
String::String(float fStr)
:mbEncryption(false)
{
char szStr[256];
sprintf(szStr, "%.15f", fStr);
m_String = szStr;
ulong idx = 0;
if (find(String(".")))
{
trimRight("0");
//trimRight(".");
if (find(String("."), idx) && (getSize() == idx + 1))
{
operator += ("0");
}
}
}
//String::String(fdword dwStr)
//{
// fromDWord(dwStr);
//}
//String::String(char* szStr)
//{
// if(NULL == szMsg || strlen(szMsg) <= 0)
// return ;
// m_String = szStr;
//}
//String::String(char* szMsg, ...)
// :mbEncryption(false)
//{
// if (NULL == szMsg || strlen(szMsg) <= 0)
// return;
// const ulong uSize = strlen(szMsg) + 1;
// assert(uSize <= 65535);
// //char* msg = new char[uSize];
// //char msg[uSize];
// char msg[65535];
// va_list valist;
// va_start(valist, szMsg);
// vsprintf(msg, szMsg, valist);
// va_end(valist);
// m_String = msg;
// //delete []msg;
// //msg = NULL;
//}
String::String(const String& other)
:mbEncryption(false)
{
copyOther(other);
}
//String::String(String& s)
//{
// m_String.clear();
// m_String.append(s.m_String);
//}
//FRD String::create()
//{
// return F_OK;
//}
//FRD String::release()
//{
// m_String.clear();
// return F_OK;
//}
String& String::operator = (const String& other)
{
IRoot::operator = (other);
copyOther(other);
return *this;
}
void String::copyOther(const String& other)
{
IRoot::operator = (other);
m_String = other.m_String;
mbEncryption = other.mbEncryption;
}
bool String::operator == (const String& s) const
{
return (m_String.compare(s.m_String) == 0);
}
bool String::operator != (const String& s) const
{
return (m_String.compare(s.m_String) != 0);
}
bool String::operator > (const String& s) const
{
return (m_String.compare(s.m_String) > 0);
}
bool String::operator < (const String& s) const
{
return (m_String.compare(s.m_String) < 0);
}
String String::operator + (const String& s) const
{
return String(m_String + s.m_String);
}
String& String::operator += (const String& s)
{
m_String += s.m_String;
return *this;
}
bool String::doCreate()
{
m_String.clear();
mbEncryption = false;
return true;
}
void String::doRelease()
{
m_String.clear();
mbEncryption = false;
}
//LPWSTR String::sCharToWChar(const char* szStr, int size, const String::EnumCodePage& CodePage /* = CodePage_ACP */)
//{
// if (szStr == NULL || size <= 0)
// return NULL;
// LPWSTR wStr = new WCHAR[size];
// MultiByteToWideChar(CodePage, 0, szStr, size, wStr, size);
// return wStr;
//}
//LPCWSTR String::sCharToCWChar(const char* szStr, int size, const String::EnumCodePage& CodePage /* = CodePage_ACP */)
//{
// if (szStr == NULL || size <= 0)
// return NULL;
// LPWSTR wStr = new WCHAR[size];
// MultiByteToWideChar(CodePage, 0, szStr, size, wStr, size);
// return wStr;
//}
//LPTSTR String::sWCharToChar(LPCWSTR wStr, int size, EnumCodePage CodePage)
//{
// if (wStr == NULL || size <= 0)
// return NULL;
// LPCPINFO cp = NULL;
// GetCPInfo(CodePage, cp);
// LPSTR tStr = new char[size];
// WideCharToMultiByte(CodePage, 0, wStr, size, tStr, size, NULL, NULL);
// return (LPTSTR)tStr;
//}
//LPCTSTR String::sWCharToCChar(LPCWSTR wStr, int size, EnumCodePage CodePage)
//{
// if (wStr == NULL || size <= 0)
// return NULL;
// LPCPINFO cp = NULL;
// GetCPInfo(CodePage, cp);
// LPSTR tStr = new char[size];
// WideCharToMultiByte(CodePage, 0, wStr, size, tStr, size, NULL, NULL);
// return (LPCTSTR)tStr;
//}
void String::setString(const char* s)
{
if (NULL == s)
{
m_String.clear();
return;
}
m_String = s;
}
void String::setString(const String& s)
{
m_String = s.m_String;
}
void String::append(const char* s)
{
if (s == NULL)
return;
m_String.append(s);
}
void String::append(const String& s)
{
m_String += s.m_String;
}
bool String::insert(ulong index, const String& s, ulong count /*= 1*/)
{
FCHECK(index >= 0 && index <= getSize() && !s.isEmpty() && count > 0, false);
m_String.insert(index, s.getString());
return true;
}
bool String::insert(ulong uIndex, char ch)
{
m_String.insert(uIndex, 1, ch);
return true;
}
bool String::setAt(ulong uIndex, char ch)
{
replace(uIndex, 1, &ch);
return true;
}
char String::getAt(ulong uIndex) const
{
char ch = m_String.at(uIndex);
return ch;
}
void String::encryption()
{
if (isEmpty())
{
return;
}
const ulong uSize = getSize();
for (ulong i = 0; i<uSize; i++)
{
char ch = getAt(i);
ch ^= 0xfa;
if (0 == ch)
continue;
setAt(i, ch);
}
mbEncryption = true;
}
void String::decryption()
{
if (isEmpty())
{
return;
}
//char szVal[1];
const ulong uSize = getSize();
for (ulong i = 0; i<uSize; i++)
{
char ch = getAt(i);
ch ^= 0xfa;
if (0 == ch)
continue;
setAt(i, ch);
}
mbEncryption = false;
}
bool String::isEncryption() const
{
return mbEncryption;
}
void String::replace(ulong index, ulong count, const char* strReplace)
{
assert(index >= 0 && index < getSize() && count > 0 && strReplace != NULL);
m_String.replace(index, count, strReplace);
}
void String::replace(const String& strOld, const String& strNew, ulong uFrom /* = 0 */, ulong uCount /* = 0 */, bool bAll /* = true */)
{
assert(!strOld.isEmpty() && !isEmpty());
assert(uFrom >= 0 && uFrom < getSize());
if (uCount <= 0)
{
uCount = getSize() - uFrom;
}
assert(uFrom + uCount <= getSize());
bool bSameSize = (strOld.getSize() == strNew.getSize());
ulong uSize = strOld.getSize();
ulong uIndex = 0;
while (find(strOld, uIndex, uFrom, uFrom + uCount))
{
if (bSameSize)
{
replace(uIndex, uSize, strNew.getChar());
}
else
{
//remove();
//insert(uIndex)
}
if (!bAll)
break;
}
}
bool String::split(ulong index, String& strLeft, String& strRight, bool bIgnoreDelimit /* = false */) const
{
if (!Maths::isBetween(index, getSize()))
return false;
const string l = m_String.substr(0, index);
if (bIgnoreDelimit)
index++;
const string r = (index <= m_String.size() - 1) ? m_String.substr(index, getSize()) : string("");
strLeft = String(l.c_str());
strRight = String(r.c_str());
return true;
}
bool String::split(const String& delimit, String& strLeft, String& strRight, bool bIgnoreDelimit /* = false */) const
{
FCHECK(!isEmpty() && !delimit.isEmpty(), false);
String str(delimit);
ulong index = m_String.find(str.getString().c_str(), 0);
return split(index, strLeft, strRight, bIgnoreDelimit);
}
TArray<String> String::split(const String& delimit, bool bIgnoreEmptySubString /* = true */) const
{
TArray<String> strList;
strList.clear();
if (isEmpty() || delimit.isEmpty())
return strList;
String strTemp(m_String.c_str());
String strLine("");
ulong index = 0;
while (!strTemp.isEmpty() && strTemp.find(delimit, index))
{
strTemp.subString(strLine, 0, index);
if (!strLine.isEmpty() || !bIgnoreEmptySubString)
strList.add(strLine);
strTemp.remove(strLine + delimit);
}
if (!strTemp.isEmpty())
{
strList.add(strTemp);
}
return strList;
}
bool String::split(const String& delimit, TArray<String>& strArray, bool bIgnoreEmptySubString /* = true */) const
{
FCHECK(!isEmpty() && !delimit.isEmpty(), false);
strArray = split(delimit, bIgnoreEmptySubString);
return strArray.size() > 0;
}
//TArrayV<String> String::split(const String& delimit) const
//{
// TArrayV<String> strList;
// strList.clear();
// if(isEmpty() || delimit.isEmpty())
// return strList;
// String strTemp(m_String.c_str());
// String strLine("");
// ulong index = 0;
// while(!strTemp.isEmpty() && strTemp.find(delimit,index))
// {
// strTemp.subString(strLine,0,index);
// if(!strLine.isEmpty())
// strList.append(strLine);
// strTemp.remove(strLine + delimit);
// }
// if(!strTemp.isEmpty())
// {
// strList.append(strTemp);
// }
// return strList;
//}
//bool String::split(const String& delimit,TArrayV<String>& strArray) const
//{
// strArray.clear();
// if(isEmpty() || delimit.isEmpty())
// return false;
// String strTemp(m_String.c_str());
// String strLine("");
// ulong index = 0;
// while(!strTemp.isEmpty() && strTemp.find(delimit,index))
// {
// strTemp.subString(strLine,0,index);
// if(!strLine.isEmpty())
// strArray.append(strLine);
// strTemp.remove(strLine + delimit);
// }
// if(!strTemp.isEmpty())
// {
// strArray.append(strTemp);
// }
// return true;
//}
const String& String::subString(String& sub, ulong from, ulong count) const
{
sub.clear();
if (!Maths::isBetween(from, getSize()) || !Maths::isBetween(count - 1, getSize()) || (from + count - 1 > getSize()))
return sub;
string strRe = m_String.substr(from, count);
sub.setString(strRe.c_str());
return sub;
}
bool String::subString(String& strSub, const String& strFrom, const String& strTo, ulong &uIndexSubStr, bool bDelimit /* = true */, ulong uFrom /* = 0 */) const
{
FCHECK(!isEmpty() && !strFrom.isEmpty() && !strTo.isEmpty() && uFrom >= 0 && uFrom < m_String.size(), false);
ulong idxFrom, idxTo;
bool bOk = find(strFrom, idxFrom, uFrom);
if (bOk && idxFrom + 1 < m_String.size())
{
bOk = find(strTo, idxTo, idxFrom + 1);
}
else
{
bOk = false;
}
if (!bOk)
{
strSub.clear();
return false;
}
if (!bDelimit)
{
idxFrom += strFrom.getSize();
idxTo--;
}
else
{
idxTo += strTo.getSize() - 1;
}
assert(idxFrom >= 0 && idxFrom < m_String.size());
assert(idxTo >= idxFrom && idxTo < m_String.size());
subString(strSub, idxFrom, idxTo - idxFrom + 1);
uIndexSubStr = idxFrom;
return true;
}
const TArray<String>& String::subString(TArray<String>& subArray, const String& strFrom, const String& strTo, bool bDelimit /* = true */, ulong uFrom /* = 0 */) const
{
subArray.clear();
FCHECK(!isEmpty() && !strFrom.isEmpty() && !strTo.isEmpty() && uFrom >= 0 && uFrom < m_String.size(), subArray);
ulong idxFrom = uFrom;
String strSub;
while (idxFrom < m_String.size())
{
if (!subString(strSub, strFrom, strTo, idxFrom, bDelimit, idxFrom))
{
break;
}
if (!strSub.isEmpty())
{
subArray.add(strSub);
}
idxFrom += strSub.getSize();
}
return subArray;
}
std::string& String::getString()
{
return m_String;
}
const string& String::getString() const
{
return m_String;
}
std::wstring String::getWString() const
{
//wstring wstr(m_String.begin(),m_String.end());
//return wstr;
return toWString(m_String);
}
ulong String::getSize() const
{
return m_String.size();
}
bool String::isEmpty() const
{
return (m_String.empty() || ('\0' == m_String.at(0)));
}
bool String::remove(ulong index, ulong count, String* strRemoved /* = NULL */)
{
FCHECK(index >= 0 && index < getSize() && index + count <= getSize(), false);
string strRemovedTemp = m_String.substr(index, count);
m_String.erase(index, count);
if (NULL != strRemoved)
{
*strRemoved = String(strRemovedTemp.c_str());
}
return true;
}
bool String::remove(const String& sub, bool bAll /* = false */)
{
ulong index = m_String.find(sub.getString());
bool bOk = remove(index, sub.getSize());
if (bAll && bOk)
{
bool bRemove = true;
while (bRemove)
{
index = m_String.find(sub.getString());
bRemove = remove(index, sub.getSize());
}
}
return bOk;
}
void String::toUpper()
{
char* str = const_cast<char*>(m_String.c_str());
strupr(str);
str = NULL;
}
void String::toLower()
{
char* str = const_cast<char*>(m_String.c_str());
strlwr(str);
str = NULL;
}
bool String::compareIgnoreCase(const String& strCmp) const
{
String strSrc(*this), strDec(strCmp);
strSrc.toLower();
strDec.toLower();
return strSrc == strDec;
}
void String::trim(const String& strTrim /* = " " */)
{
trimLeft(strTrim);
trimRight(strTrim);
}
void String::trimLeft(const String& strTrim /* = " " */)
{
FCHECK(!isEmpty() && !strTrim.isEmpty(), void());
ulong uTrimSize = strTrim.getSize();
ulong idx;
String strRemove;
while (m_String.size() > 0)
{
if (find(strTrim, idx) && idx == 0)
{
remove(idx, uTrimSize, &strRemove);
assert(strRemove == strTrim);
}
else
{
break;
}
}
}
void String::trimRight(const String& strTrim /* = " " */)
{
FCHECK(!isEmpty() && !strTrim.isEmpty(), void());
ulong uTrimSize = strTrim.getSize(), uCnt = getSize();
ulong idx;
String strRemove;
while (m_String.size() > 0)
{
if (find(strTrim, idx, getSize() - strTrim.getSize()) && idx + uTrimSize == uCnt)
{
remove(idx, uTrimSize, &strRemove);
uCnt = getSize();
assert(strRemove == strTrim);
}
else
{
break;
}
}
}
bool String::findIgnoreCase(const String& sub) const
{
String strSrc(*this), strDec(sub);
strSrc.toLower();
strDec.toLower();
return strSrc.find(strDec);
}
bool String::find(const String& sub) const
{
ulong idx = 0;
return find(sub, idx, 0);
}
bool String::find(const String& sub, ulong& index, ulong from /* = 0 */, bool bReverse /* = false */) const
{
if (isEmpty() || sub.isEmpty() || from < 0 || from >= getSize())
return false;
if (bReverse)
index = (ulong)m_String.rfind(sub.getChar(), from);
else
index = (ulong)m_String.find(sub.getChar(), from);
return Maths::isBetween(index, getSize());
}
bool String::find(const String& sub, ulong& index, ulong from, ulong to) const
{
//if(to == 0)
// to = getSize();
if (isEmpty() || sub.isEmpty() || to - from <= 0)
return false;
std::string str = m_String.substr(from, to - from);
String strSub(str.c_str());
bool bRe = strSub.find(sub, index, 0);
return (bRe && Maths::isBetween(index, getSize()));
}
ulong String::getSubStringCount(const String& sub) const
{
if (isEmpty() || sub.isEmpty())
return 0;
ulong index = 0, from = 0, cnt = 0;
while (find(sub, index, from))
{
cnt++;
from = index + 1;
}
return cnt;
}
void String::clear()
{
m_String.clear();
}
char* String::getChar()
{
return (char*)(m_String.c_str());
}
const char* String::getChar() const
{
return m_String.c_str();
}
byte String::getASCII(ulong uIdx /* = 0 */) const
{
byte ascii = 0;
if (!m_String.empty())
{
ascii = m_String[uIdx];
}
return ascii;
}
ulong String::getChar(char* sz, ulong uSize) const
{
FCHECK(NULL != sz && uSize > getSize(), 0);
strcpy(sz, getChar());
return getSize();
}
ulong String::getByte(byte* bt, ulong uSize) const
{
FCHECK(NULL != bt && uSize > getSize(), 0);
memset(bt, 0, uSize);
memcpy(bt, getChar(), getSize());
return getSize();
}
std::wstring String::toWString() const
{
return toWString(m_String);
}
std::string String::toString() const
{
return m_String;
}
const wchar_t* String::toWChar()
{
m_WString = toWString();
return m_WString.c_str();
}
const wchar_t* String::toWChar(std::wstring& wstrTemp) const
{
wstrTemp = toWString();
return wstrTemp.c_str();
}
//LPTSTR String::getTChar()
//{
// return getWChar();
//}
//LPCTSTR String::getCTChar() const
//{
// return (TCHAR*)m_String.c_str();
//}
//LPWSTR String::getWChar()
//{
// int len = (int)m_String.size() + 1;
// return sCharToWChar(m_String.c_str(), len);
//}
//LPCWSTR String::getCWChar() const
//{
// int len = (int)m_String.size() + 1;//sizeof(m_String.c_str());
// return sCharToCWChar(m_String.c_str(), len);
//}
bool String::isDigital() const
{
if (isEmpty())
return false;
uint uSize = getSize();
uint uDotSum = 0;
uint uPositiveSignSum = 0;
uint uNegtiveSignSum = 0;
char ch;
for (uint i = 0; i<uSize; i++)
{
ch = m_String.at(i);
if (ch == '+')
{
uPositiveSignSum++;
if (i != 0 || uPositiveSignSum > 1)
return false;
}
else if (ch == '-')
{
uNegtiveSignSum++;
if (i != 0 || uNegtiveSignSum > 1)
return false;
}
else if (ch == '.')
{
uDotSum++;
if (uDotSum > 1)
return false;
}
else if (ch < '0' || ch > '9')
return false;
}
return true;
}
bool String::isDigital(float& val) const
{
if (isDigital())
{
val = (float)(atof(m_String.c_str()));
return true;
}
return false;
}
bool String::isDigital(dword& val) const
{
float fVal = 0.f;
if (isDigital(fVal))
{
val = *((dword*)(&fVal));//Maths::PFFloatToDW(fVal);
return true;
}
return false;
}
bool String::isDigital(uint& val) const
{
float fVal = 0.f;
if (isDigital(fVal))
{
fVal = fabs(fVal);
val = (uint)fVal;//Maths::PFFloatToDW(fVal);
return true;
}
return false;
}
//bool String::isDigital(fint& val) const
//{
// if(isEmpty())
// return false;
// fuint uSize = getSize();
// fuint uDotSum = 0;
// char ch;
// for(fuint i=0; i<uSize; i++)
// {
// ch = m_String.at(i);
// if(ch == '.')
// {
// uDotSum ++;
// if(uDotSum > 1)
// return false;
// else
// continue;
// }
// if(ch < '0' || ch > '9')
// return false;
// }
// val = atoi(m_String.c_str());
// return true;
//}
//bool String::isDigital(flong& val) const
//{
// if(isEmpty())
// return false;
// fuint uSize = getSize();
// fuint uDotSum = 0;
// char ch;
// for(fuint i=0; i<uSize; i++)
// {
// ch = m_String.at(i);
// if(ch == '.')
// {
// uDotSum ++;
// if(uDotSum > 1)
// return false;
// else
// continue;
// }
// if(ch < '0' || ch > '9')
// return false;
// }
// val = atol(m_String.c_str());
// return true;
//}
std::wstring String::toWString(const std::string& s)
{
std::string curLocale = setlocale(LC_ALL, NULL); // curLocale = "C";
setlocale(LC_ALL, "chs");
const char* _Source = s.c_str();
size_t _Dsize = s.size() + 1;
wchar_t *_Dest = new wchar_t[_Dsize];
wmemset(_Dest, 0, _Dsize);
mbstowcs(_Dest, _Source, _Dsize);
std::wstring result = _Dest;
delete[]_Dest;
setlocale(LC_ALL, curLocale.c_str());
return result;
}
std::string String::toString(const std::wstring& ws)
{
string curLocale = setlocale(LC_ALL, NULL); // curLocale = "C";
setlocale(LC_ALL, "chs");
const wchar_t* _Source = ws.c_str();
size_t _Dsize = 2 * ws.size() + 1;
char *_Dest = new char[_Dsize];
memset(_Dest, 0, _Dsize);
wcstombs(_Dest, _Source, _Dsize);
string result = _Dest;
delete[]_Dest;
setlocale(LC_ALL, curLocale.c_str());
return result;
}
void String::encryption(std::string& s)
{
String str(s);
str.encryption();
s = str.getString();
}
void String::decryption(std::string& s)
{
String str(s);
str.decryption();
s = str.getString();
}
String String::sGetExePath()
{
return String();
//TCHAR sz[MAX_PATH] = { 0 };
//::GetModuleFileName(NULL, sz, MAX_PATH);
//String str(sz);
//ulong uIdx = 0, uFrom = 0;
//while (str.find(String("\\"), uIdx, uFrom, false))
//{
// uFrom = uIdx + 1;
//}
//str.remove(uFrom, str.getSize() - uFrom);
//return str;
}
String String::sGetCurrentPath()
{
return String();
//WCHAR path[MAX_PATH];
//::GetCurrentDirectory(MAX_PATH, path);
//return String(path);
}
String String::sGetSystemTempPath()
{
return String();
//WCHAR path[MAX_PATH];
//::GetTempPath(MAX_PATH, path);
//return String(path);
}
String String::sGetSystemTime(const String strDelimit /* = "," */, bool bAddDayOfWeek /* = false */, dword* dwDayOfWeek /* = NULL */)
{
return String();
//char day[16], time[16];
//SYSTEMTIME st;
//::GetLocalTime(&st);
////sprintf(time,"%04d-%02d-%02d, %02d:%02d:%02d %03d ",st.wYear,st.wMonth,st.wDay,st.wHour,st.wMinute,st.wSecond,st.wMilliseconds);
//sprintf(day, "%04d-%02d-%02d", st.wYear, st.wMonth, st.wDay);
//sprintf(time, "%02d:%02d:%02d %03d ", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
//String strTime(day);
//strTime += strDelimit;
//strTime += String(time);
//if (bAddDayOfWeek)
//{
// String dow;
// switch (st.wDayOfWeek)
// {
// case 0: dow = "Sunday"; break;
// case 1: dow = "Monday"; break;
// case 2: dow = "Tuesday"; break;
// case 3: dow = "Wednesday"; break;
// case 4: dow = "Thursday"; break;
// case 5: dow = "Friday"; break;
// case 6: dow = "Saturday"; break;
// default:
// dow = "";
// break;
// }
// strTime += dow;
//}
//if (NULL != dwDayOfWeek)
//{
// *dwDayOfWeek = st.wDayOfWeek;
//}
//return strTime;
}
String String::sExtractFileName(const String& strPathFile)
{
assert(!strPathFile.isEmpty());
String strRe("");
ulong uIndex, uIndex_0;
const ulong uFrom = strPathFile.getSize() - 1;
bool bFind = strPathFile.find("/", uIndex, uFrom, true);
bool bFind_0 = strPathFile.find("\\", uIndex_0, uFrom, true);
if (bFind && bFind_0)
{
uIndex = (uIndex >= uIndex_0) ? uIndex : uIndex_0;
}
else if (bFind)
{
;//uIndex = uIndex
}
else if (bFind_0)
{
uIndex = uIndex_0;
}
else
{
strRe = strPathFile;
return strRe;
}
uIndex++;
strPathFile.subString(strRe, uIndex, strPathFile.getSize() - uIndex);
return strRe;
}
String String::sSpace(ulong uSize)
{
String strRe("");
for (ulong i = 0; i<uSize; i++)
{
strRe += String::Space;
}
return strRe;
}
String String::sRandomChar(char* singleChar /* = NULL */, bool bWChar /* = false */, String* strWhole /* = NULL */)
{
const uint uMax = bWChar ? 256 : 128;
char flag[256];
for (uint i = 0; i<256; i++)
flag[i] = i;
const uint uTurns = rand() % 5;
for (uint i = 0; i<uTurns; i++)
std::random_shuffle(flag, flag + uMax);
char ch = flag[0];
if (NULL != singleChar)
*singleChar = ch;
if (NULL != strWhole)
*strWhole = String(flag);
String strRe(ch);
return strRe;
}
String String::sRandomString(uint uCnt, bool bWChar /* = false */)
{
String strRe, strTemp;
const uint uMax = bWChar ? 256 : 128;
uint uTimes = uCnt / uMax;
for (uint i = 0; i<uTimes; i++)
{
sRandomChar(NULL, bWChar, &strTemp);
strRe += strTemp;
}
uTimes = uCnt - uTimes * uMax;
assert(uTimes < uMax);
if (uTimes > 0)
{
sRandomChar(NULL, bWChar, &strTemp);
strTemp.remove(uTimes, strTemp.getSize() - uTimes);
strRe += strTemp;
}
return strRe;
}
bool String::sToValue(float& val, const String& strValue)
{
CHECK_EXP(!strValue.isEmpty(), false);
try
{
val = (float)(atof(strValue.getChar()));
return true;
}
//catch (CMemoryException* e)
//{
// return false;
//}
//catch (CFileException* e)
//{
// return false;
//}
//catch (CException* e)
//{
// return false;
//}
catch (...)
{
return false;
}
}
bool String::sToValue(int& val, const String& strValue)
{
CHECK_EXP(!strValue.isEmpty(), false);
try
{
val = atoi(strValue.getChar());
return true;
}
//catch (CMemoryException* e)
//{
// return false;
//}
//catch (CFileException* e)
//{
// return false;
//}
//catch (CException* e)
//{
// return false;
//}
catch (...)
{
return false;
}
}
bool String::sToValue(long& val, const String& strValue)
{
CHECK_EXP(!strValue.isEmpty(), false);
try
{
val = atol(strValue.getChar());
return true;
}
//catch (CMemoryException* e)
//{
// return false;
//}
//catch (CFileException* e)
//{
// return false;
//}
//catch (CException* e)
//{
// return false;
//}
catch (...)
{
return false;
}
}
bool String::sToValue(ulong& val, const String& strValue)
{
CHECK_EXP(!strValue.isEmpty(), false);
try
{
val = atol(strValue.getChar());
return true;
}
//catch (CMemoryException* e)
//{
// return false;
//}
//catch (CFileException* e)
//{
// return false;
//}
//catch (CException* e)
//{
// return false;
//}
catch (...)
{
return false;
}
}
void String::fromByte(byte byVal)
{
char sz[256];
_itoa(byVal, sz, 10);
m_String = sz;
}
byte String::toByte() const
{
FCHECK(!isEmpty(), 0);
uint val = atoi(getChar());
return (byte)val;
}
void String::fromDWord(dword dwStr, bool bKeep /* = true */)
{
char szStr[256];
if (bKeep)
{
_ltoa(dwStr, szStr, 10);
}
else
{
//ltoa(dwStr,szStr,16);
char* szStr = (char*)dwStr;
m_String = szStr;
}
m_String = szStr;
}
dword String::toDWord(bool bKeep /* = true */) const
{
dword dwStr = 0;
if (bKeep)
{
dwStr = (dword)atol(m_String.c_str());
}
else
{
dwStr = (dword)(m_String.c_str());
}
return dwStr;
}
float String::toFloat() const
{
FCHECK(!isEmpty(), 0);
return (float)(atof(getChar()));
}
int String::toInt() const
{
FCHECK(!isEmpty(), 0);
return atoi(getChar());
}
uint String::toUInt() const
{
FCHECK(!isEmpty(), 0);
return atoi(getChar());
}
long String::toLong() const
{
FCHECK(!isEmpty(), 0);
return atol(getChar());
}
ulong String::toULong() const
{
FCHECK(!isEmpty(), 0);
return atol(getChar());
}
word String::toWord() const
{
return (word)(toLong());
}
//fdword String::toDWord() const
//{
// return toLong();
//}
bool String::toBool() const
{
assert(checkFormat(String("true,false")));
bool bRe = false;
if (compareIgnoreCase("true"))
bRe = true;
else if (compareIgnoreCase("false") || isEmpty())
bRe = false;
return bRe;
}
bool String::checkFormat(const String& strPredefine, bool bCanBeEmpty /* = true */, bool bIgnoreCase /* = true */) const
{
TArray<String> strArray;
strArray = strPredefine.split(",");
return checkFormat(strArray, bCanBeEmpty, bIgnoreCase);
}
bool String::checkFormat(const TArray<String>& aPredefine, bool bCanBeEmpty /* = true */, bool bIgnoreCase /* = true */) const
{
if (bCanBeEmpty && isEmpty())
return true;
bool bHas = false;
for (uint i = 0; i<aPredefine.size(); i++)
{
//bHas = bIgnoreCase? aPredefine[i].compareIgnoreCase(*this) : (aPredefine[i] == *this);
bHas = bIgnoreCase ? aPredefine.getAt(i).compareIgnoreCase(*this) : (aPredefine.getAt(i) == *this);
if (bHas)
return true;
}
return false;
}
void String::wordWrap(const String& strEnterFlag, uint uSizePerLine /* = 0 */)
{
if (isEmpty())
return;
if (!strEnterFlag.isEmpty())
{
replace(strEnterFlag, String::Enter);
}
if (uSizePerLine > 0 && uSizePerLine < getSize())
{
ulong uIndex = 0, uFrom = 0;
const ulong uCnt = uSizePerLine + String::Enter.getSize();
bool bFind = false;
while (uFrom < getSize())
{
bFind = find(String::Enter, uIndex, uFrom, uFrom + uCnt);
if (bFind)
{
uFrom += uIndex + String::Enter.getSize();
continue;
}
else
{
insert(uFrom + uSizePerLine, String::Enter);
uFrom += uCnt;
}
}
}
}
//EnumClassType String::getClassType(String* pStrClassName /* = NULL */) const
//{
// if (NULL != pStrClassName)
// *pStrClassName = "String";
// return Class_FString;
//}
//EnumClassType String::sGetClassType()
//{
// return Class_FString;
//}
//EnumResourceType String::sGetResourceType()
//{
// return Resource_RawString;
//}
/*-----------------------------------------------------------
class Tag
------------------------------------------------------------*/
Tag::Tag()
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
}
Tag::Tag(const String& name)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = name;
mstrName = name;
}
Tag::Tag(int iTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(iTag);
mstrName = String(iTag);
}
Tag::Tag(uint uTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(uTag);
mstrName = String(uTag);
}
Tag::Tag(long lTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(lTag);
mstrName = String(lTag);
}
Tag::Tag(ulong uTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(uTag);
mstrName = String(uTag);
}
Tag::Tag(float fTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(fTag);
mstrName = String(fTag);
}
Tag::Tag(char* szTag)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
create();
//*mpStrName = String(szTag);
mstrName = String(szTag);
}
Tag::Tag(const Tag &other)
//:mpStrName(NULL)
:mstrName("")
, muID(0)
{
copyOther(other);
}
Tag::~Tag()
{
destroy();
}
bool Tag::operator ==(const Tag& other) const
{
return (mstrName == other.mstrName && muID == other.muID);
//return (NULL != mpStrName && NULL != other.mpStrName && *mpStrName == other.getName() && muID == other.muID);
}
bool Tag::operator !=(const Tag& other) const
{
return !(*this == (other));
}
//const Tag& Tag::operator =(const Tag& other) const
//{
// if(*this == other)
// return *this;
// //m_strName = other.getName();
// return *this;
//}
Tag& Tag::operator = (const Tag& other)
{
copyOther(other);
return *this;
}
const Tag& Tag::operator += (const Tag& other)
{
//assert(NULL != mpStrName);
//*mpStrName += other.getName();
mstrName += other.mstrName;
return *this;
}
bool Tag::operator <(const Tag& other) const
{
return (muID == other.muID) ? (mstrName < other.mstrName) : (muID < other.muID);
//return (muID == other.muID)? (*mpStrName < other.getName()) : (muID < other.muID);
}
Tag Tag::operator + (const Tag& other) const
{
Tag tagResult(*this);
tagResult.name(mstrName + other.mstrName);
//tagResult.getName() += other.getName();
return tagResult;
}
void Tag::copyOther(const Tag& other)
{
IRoot::operator = (other);
if (*this == other)
return;
mstrName = other.mstrName;
//String::sDeepCopy<String>(mpStrName,other.mpStrName);
muID = other.muID;
}
bool Tag::doCreate()
{
mstrName = String("");
//mpStrName = String::sNew<String>();
muID = 0;
return true;
}
void Tag::doDestroy()
{
mstrName = String("");
//String::sDelete<String>(mpStrName);
muID = 0;
}
const String& Tag::name() const
{
//assert(NULL != mpStrName);
//return *mpStrName;
return mstrName;
}
void Tag::name(const String& name)
{
//if(NULL == mpStrName)
// create();
//*mpStrName = name;
mstrName = name;
}
void Tag::id(ulong uID)
{
muID = uID;
}
ulong Tag::id() const
{
return muID;
}
bool Tag::isEmpty() const
{
//if(NULL == mpStrName)
// return true;
//return mpStrName->isEmpty();
return mstrName.isEmpty();
}
void Tag::clear()
{
//if(NULL != mpStrName)
// mpStrName->clear();
mstrName.clear();
muID = 0;
}
/*-----------------------------------------------------------
class Maths
------------------------------------------------------------*/
Maths::Maths()
{
}
Maths::~Maths()
{
}
float Maths::tolerance()
{
return 0.0000000001f;
}
int Maths::maxSize16()
{
return 65535; //2^16 -1
}
int Maths::maxSize32()
{
return 4294967295; //2^32 -1;
}
float Maths::minSizeFloat()
{
return -1.7976931348623158e+38f;//2.2250738585072014e-308;//DBL_MIN
}
float Maths::maxSizeFloat()
{
return 1.7976931348623158e+38f;//1.7976931348623158e+308;//DBL_MAX
}
float Maths::PI()
{
return 3.14159265358979323846f;
}
float Maths::PIHalf()
{
return 1.57079632679489661923f;
}
float Maths::radianPerDegreen()
{
return 0.0174533f; // PI/180.0
}
float Maths::degreenPerRadian()
{
return 57.29578f; // 180.0/PI
}
float Maths::radianToAngle(float radian)
{
return (radian * 180.f / PI());
}
float Maths::angleToRadian(float angle)
{
return (angle * PI() / 180.f);
}
bool Maths::isZero(float val)
{
return val == 0.f;
}
bool Maths::isNearZero(float val)
{
return (val < tolerance()) && (val >= -1.f * tolerance());
}
bool Maths::isNear(float val, float cmp)
{
return fabs(val - cmp) <= Maths::tolerance();
}
bool Maths::isEqual(float val, float cmp)
{
return val == cmp;
}
bool Maths::isBetween(float val, float max, float min /* = 0.0f */)
{
return (val >= min && val < max);
}
bool Maths::isBetween(ulong val, ulong max, ulong min /* = 0 */)
{
return (val >= min && val < max);
}
float Maths::DWToFloat(dword dwordVal)
{
return *(float*)(&dwordVal);
}
dword Maths::FloatToDW(float floatVal)
{
return *(dword*)(&floatVal);
}
/*-----------------------------------------------------------
class FUnit
------------------------------------------------------------*/
float FUnit::mfUnitPerPixel = 10;
FUnit::FUnit()
{
;
}
FUnit::~FUnit()
{
;
}
float FUnit::getMapValue(float meter)
{
assert(mfUnitPerPixel > 0.f);
return (meter / mfUnitPerPixel);
}
float FUnit::getActualValue(float pixel)
{
assert(mfUnitPerPixel > 0.f);
return (pixel * mfUnitPerPixel);
}
void FUnit::setScale(float unitPerPixel)
{
assert(!Maths::isNearZero(unitPerPixel));
if (unitPerPixel > 0.f)
{
mfUnitPerPixel = unitPerPixel;
}
}
float FUnit::getScale()
{
return mfUnitPerPixel;
}
float FUnit::getMapScale()
{
assert(mfUnitPerPixel > 0.f);
return 1.f / mfUnitPerPixel;
}
/*-----------------------------------------------------------
class ThreadLock
------------------------------------------------------------*/
ThreadLock::ThreadLock()
{
//InitializeCriticalSection(&mCriSec);
}
ThreadLock::~ThreadLock()
{
//DeleteCriticalSection(&mCriSec);
}
void ThreadLock::lock()
{
mMutex.lock();
//EnterCriticalSection(&mCriSec);
}
void ThreadLock::unLock()
{
mMutex.unlock();
//LeaveCriticalSection(&mCriSec);
}
}
} |
package cm.xxx.minos.data.struct;
/**
* Description:
* Author: lishangmin
* Created: 2018-08-12 19:04
*/
public class Recursion {
public static ListNode removeElement(ListNode head,int val){
if(head == null){
return null;
}
head.next = removeElement(head.next,val);
if(head.e.equals(val)){
return head.next;
}else{
return head;
}
}
public static ListNode print(ListNode head){
if(head == null){
return null;
}
System.out.println(head.e);
head.next = print(head.next);
return head;
}
public static void main(String[] args) {
Integer[] arr = {21,2,4,8,4};
ListNode<Integer> listNode = new ListNode<>(arr);
System.out.println(listNode);
listNode = removeElement(listNode,4);
System.out.println(listNode);
ListNode arrs = print(listNode);
System.out.println(arrs);
}
}
|
Step 1: Initialize two index pointers i and j to 0.
Step 2: Create a new array C, which is a combination of array A and B.
Step 3: Iterate over the length of array C.
Step 4: Each time, compare the current element of A[i] with the current element of B[j]. Whichever is smaller, add that element to array C and increment the corresponding index pointer.
Step 5: In the end, calculate the median of C. If the total number of elements in C is odd, return the middle element, otherwise return the average of the two middle elements. |
<gh_stars>0
import { AppRoutes } from '../src/@types/routes.type';
type RouteName = Pick<AppRoutes[number], 'name'>;
export function routeName(name: string | null): RouteName {
name = name ?? '';
return { name };
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.