text stringlengths 1 1.05M |
|---|
/*! React Starter Kit | MIT License | http://www.reactstarterkit.com/ */
import React, { Component, PropTypes } from 'react';
import ThemeManager from 'material-ui/lib/styles/theme-manager';
import MyRawTheme from '../../theme';
class Html extends Component {
static propTypes = {
title: PropTypes.string,
description: PropTypes.string,
css: PropTypes.string,
body: PropTypes.string.isRequired,
};
static childContextTypes = {
muiTheme: React.PropTypes.object,
};
static defaultProps = {
title: 'Title',
description: '',
};
getChildContext() {
return {
muiTheme: ThemeManager.getMuiTheme(MyRawTheme),
};
}
getStyles() {
return {
html: {
color: '#222',
fontWeight: 100,
fontSize: '1em',
fontFamily: "'Segoe UI', 'HelveticaNeue-Light', sans-serif",
lineHeight: 1.375,
height: '100%',
},
body: {
height: '100%',
},
};
}
render() {
const styles = this.getStyles();
return (
<html className="no-js" lang="" style={styles.html}>
<head>
<meta charSet="utf-8" />
<meta httpEquiv="X-UA-Compatible" content="IE=edge" />
<title>{this.props.title}</title>
<meta name="description" content={this.props.description} />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="apple-touch-icon" href="apple-touch-icon.png" />
<style id="css" dangerouslySetInnerHTML={{__html: this.props.css}} />
</head>
<body style={styles.body}>
<div id="app" dangerouslySetInnerHTML={{__html: this.props.body}} />
<script src="/app.js"></script>
</body>
</html>
);
}
}
export default Html;
|
<reponame>ngochai94/laminext
package com.raquo.airstream.eventstream
import com.raquo.laminar.api.L._
import com.raquo.airstream.core.Transaction
import com.raquo.airstream.common.InternalNextErrorObserver
import com.raquo.airstream.common.SingleParentObservable
import scala.concurrent.duration.FiniteDuration
import scala.scalajs.js
class DelayForEventStream[A](
override protected val parent: EventStream[A],
projectDelayMillis: A => FiniteDuration
) extends EventStream[A]
with SingleParentObservable[A, A]
with InternalNextErrorObserver[A] {
/**
* Async stream, so reset rank
*/
override protected[airstream] val topoRank: Int = 1
override protected[airstream] def onNext(nextValue: A, transaction: Transaction): Unit = {
val _ = js.timers.setTimeout(projectDelayMillis(nextValue)) {
val _ = new Transaction(fireValue(nextValue, _))
}
}
override def onError(nextError: Throwable, transaction: Transaction): Unit = {
val _ = new Transaction(fireError(nextError, _))
}
}
|
#!/bin/bash
# Prepare Diffutils for compilation:
./configure --prefix=/tools
# Compile the package:
make
# Compilation is now complete. As discussed earlier, running the test suite is not mandatory for the temporary tools here in this chapter. To run the Diffutils test suite anyway, issue the following command:
[ "$TESTING" == "True" ] && make check
# Install the package:
make install
|
#include "../src/float.hpp"
#include <iostream>
#include <cmath>
using namespace std;
int main()
{
constexpr double PI = 3.14159265358979323846;
cout << "Hello World!\n";
// Initialization + static_cast
{
Float f{4.2f};
Float g{3.2f};
float vf = static_cast<float>( g );
cout << "-" << f << " " << vf << endl;
}
// Opposite value
{
Float f{4.2f};
Float fe{ -4.2f};
if ( ( -f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 1;
}
}
// postfix '+'
{
Float f{4.2f};
Float fe{4.2f};
Float fee{f.v + 1.0f};
if ( ( f++ ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 2;
}
if ( f != fee )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 3;
}
}
// prefix '+'
{
Float f{4.2f};
Float fe{f.v + 1.0f};
if ( ( ++f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 4;
}
}
// postfix '-'
{
Float f{4.2f};
Float fe{4.2f};
Float fee{f.v - 1.0f};
if ( ( f-- ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 5;
}
if ( f != fee )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 6;
}
}
// prefix '-'
{
Float f{4.2f};
Float fe{f.v - 1.0f};
if ( ( --f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 7;
}
}
// '+'
{
Float f{4.2f};
Float fe{4.2f + 4.2f};
if ( ( f + f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 8;
}
}
// '-'
{
Float f{4.2f};
Float fe{4.2f - 4.2f};
if ( ( f - f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 9;
}
}
// '*'
{
Float f{4.2f};
Float fe{4.2f * 4.2f};
if ( ( f * f ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 10;
}
}
// '/'
{
Float f{10.0f};
Float fe{10.0f / 2.0f};
if ( ( f / Float{2.0f} ).v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 11;
}
}
// '+='
{
Float f{4.2f};
Float fe{4.2f + 4.2f};
f += f;
if ( f.v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 12;
}
}
// '-='
{
Float f{4.2f};
Float fe{4.2f - 4.2f};
f -= f;
if ( f.v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 13;
}
}
// '*='
{
Float f{4.2f};
Float fe{4.2f * 4.2f};
f *= f;
if ( f.v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 14;
}
}
// '/='
{
Float f{10.0f};
Float fe{10.0f / 2.0f};
f /= Float{2.0f};
if ( f.v != fe.v )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 15;
}
}
// '==' easy
{
Float f{4.2f};
Float fe{4.2f};
bool res = ( fe == f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 16;
}
}
// '==' medium
{
Float f{0.3333333333f};
Float fe{( 1.0f / 3.0f )};
bool res = ( fe == f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 17;
}
}
// '==' hard
{
Float f = FloatBox::fbox<decltype( PI )>( PI );
Float fe{static_cast<float>( PI )};
bool res = ( fe == f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 18;
}
}
// '!=' easy
{
Float f{4.2f};
Float fe{4.3f};
bool res = ( fe != f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 19;
}
}
// '!=' medium
{
Float f{0.3334343333f};
Float fe{( 1.0f / 3.0f )};
bool res = ( fe != f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 20;
}
}
// '!=' hard
{
Float f = FloatBox::fbox<decltype( PI )>( PI );
Float fe{static_cast<float>( PI ) + static_cast<float>( 1.0 / PI )};
bool res = ( fe != f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 21;
}
}
// '<'
{
Float f{4.2f};
Float fe{4.1f};
bool res = ( fe < f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 22;
}
}
// '>'
{
Float f{4.2f};
Float fe{4.3f};
bool res = ( fe > f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 23;
}
}
// '<='
{
Float f{4.2f};
Float fe{4.1f};
bool res = ( fe <= f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 24;
}
}
// '>='
{
Float f{4.2f};
Float fe{4.3f};
bool res = ( fe >= f );
if ( !res )
{
cerr << "failure @" << __FILE__ << ":" << __LINE__ << "\n";
return 25;
}
}
// fbox
{
using namespace FloatBox;
float x = 3.14f;
Float f1 = fbox( x );
Float f2 = fbox( 42 );
///Float wrong = fbox("hello"); // must not compile
cout << f1 << " " << f2 << "\n";
}
cout << "SUCCESS\n";
return 0;
}
|
// LanguageError.swift
enum LanguageError: Error {
case languageDetectionFailed
}
func detectLanguage(_ text: String) throws -> String {
// Implement the language detection logic here
// For example, using a third-party library or custom algorithm
// If language detection fails, throw an error
throw LanguageError.languageDetectionFailed
}
// Example usage
do {
let detectedLanguage = try detectLanguage("Bonjour tout le monde")
print("Detected language: \(detectedLanguage)")
} catch {
print("Error detecting language: \(error)")
} |
import React from 'react'
import { Link } from 'react-router-dom'
import StdProps from '../../../common/std-props'
import _ from 'lodash'
export interface NavBarProps extends StdProps {
noIcon?: boolean;
}
export default function NavBar(props: NavBarProps) {
let { className, noIcon, ...attr } = props;
return <nav className={`navbar navbar-expand-sm bg-light navbar-light ${className}`} {...attr}>
<div className="container">
{
noIcon === true ? null : <Link className="navbar-brand" to="/">
<img style={{ height: "30px" }} className="img-fluid" src={require("../../../app/icon/icon.png")}></img>
</Link>
}
<Link className="navbar-brand" to="/">Yin's Cat Paw</Link>
<button className="navbar-toggler" type="button" data-toggle="collapse" data-target="#collapsibleNavbar">
<span className="navbar-toggler-icon"></span>
</button>
<div className="collapse navbar-collapse justify-content-end" id="collapsibleNavbar">
<ul className="navbar-nav" >
<li className="nav-item">
<Link className="nav-link" to="/flow">Flow</Link>
</li>
<li className="nav-item dropdown">
<a className="nav-link dropdown-toggle" href="#" id="navbardrop" data-toggle="dropdown">
更多
</a>
<div className="dropdown-menu">
<Link className="dropdown-item" to="/fragment/loveclock">The Clock</Link>
<Link className="dropdown-item" to="/read/tea">茶</Link>
{/* <Link className="dropdown-item" to="/fragment/links">万箭穿心</Link> */}
<Link className="dropdown-item" to="/read/loveletters">情书</Link>
</div>
</li>
<li className="nav-item dropdown">
<a className="nav-link dropdown-toggle" href="#" id="navbardrop" data-toggle="dropdown">
关于
</a>
<div className="dropdown-menu">
<Link className="dropdown-item" to="/about">关于</Link>
<Link className="dropdown-item" to="/about/opensource">开源</Link>
<Link className="dropdown-item" to="/about/install">安装</Link>
</div>
</li>
</ul>
</div>
</div>
</nav>
} |
<filename>src/main/java/org/la4j/vector/SparseVector.java
/*
* Copyright 2011-2014, by <NAME> and Contributors.
*
* This file is part of la4j project (http://la4j.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributor(s): -
*
*/
package org.la4j.vector;
import java.io.IOException;
import java.io.InputStream;
import java.text.NumberFormat;
import java.util.Collection;
import java.util.Map;
import java.util.Random;
import org.la4j.iterator.VectorIterator;
import org.la4j.Matrix;
import org.la4j.matrix.sparse.CRSMatrix;
import org.la4j.matrix.ColumnMajorSparseMatrix;
import org.la4j.matrix.RowMajorSparseMatrix;
import org.la4j.Vector;
import org.la4j.Vectors;
import org.la4j.vector.functor.VectorAccumulator;
import org.la4j.vector.functor.VectorProcedure;
import org.la4j.operation.VectorMatrixOperation;
import org.la4j.operation.VectorOperation;
import org.la4j.operation.VectorVectorOperation;
import org.la4j.vector.sparse.CompressedVector;
/**
* A sparse vector.
*
* A vector represents an array of elements. It can be re-sized.
*
* A sparse data structure does not store blank elements, and instead just stores
* elements with values. A sparse data structure can be initialized with a large
* length but take up no storage until the space is filled with non-zero elements.
*
* However, there is a performance cost. Fetch/store operations take O(log n)
* time instead of the O(1) time of a dense data structure.
*
*/
public abstract class SparseVector extends Vector {
protected int cardinality;
public SparseVector(int length) {
this(length, 0);
}
public SparseVector(int length, int cardinality) {
super(length);
this.cardinality = cardinality;
}
/**
* Creates a zero {@link SparseVector} of the given {@code length}.
*/
public static SparseVector zero(int length) {
return CompressedVector.zero(length);
}
/**
* Creates a zero {@link SparseVector} of the given {@code length} with
* the given {@code capacity}.
*/
public static SparseVector zero(int length, int capacity) {
return CompressedVector.zero(length);
}
/**
* Creates a constant {@link SparseVector} of the given {@code length} with
* the given {@code value}.
*/
public static SparseVector random(int length, double density, Random random) {
return CompressedVector.random(length, density, random);
}
/**
* Creates a new {@link SparseVector} from the given {@code array} with
* compressing (copying) the underlying array.
*/
public static SparseVector fromArray(double[] array) {
return CompressedVector.fromArray(array);
}
/**
* Parses {@link SparseVector} from the given CSV string.
*
* @param csv the CSV string representing a vector
*
* @return a parsed vector
*/
public static SparseVector fromCSV(String csv) {
return Vector.fromCSV(csv).to(Vectors.SPARSE);
}
/**
* Parses {@link SparseVector} from the given Matrix Market.
*
* @param is the input stream in Matrix Market format
*
* @return a parsed vector
* @exception IOException if an I/O error occurs.
*/
public static SparseVector fromMatrixMarket(InputStream is) throws IOException {
return Vector.fromMatrixMarket(is).to(Vectors.SPARSE);
}
/**
* Creates new {@link SparseVector} from collection
*
* @param list value list
*
* @return created vector
*/
public static SparseVector fromCollection(Collection<? extends Number> list) {
return Vector.fromCollection(list).to(Vectors.SPARSE);
}
/**
* Creates new {@link SparseVector} from given index-value map
*
* @param map
*
* @return
*/
public static SparseVector fromMap(Map<Integer, ? extends Number> map, int length) {
return CompressedVector.fromMap(map, length);
}
/**
* Returns the cardinality (the number of non-zero elements)
* of this sparse vector.
*
* @return the cardinality of this vector
*/
public int cardinality() {
return cardinality;
}
/**
* Returns the density (non-zero elements divided by total elements)
* of this sparse vector.
*
* @return the density of this vector
*/
public double density() {
return cardinality / (double) length;
}
@Override
public double get(int i) {
return getOrElse(i, 0.0);
}
/**
* Gets the specified element, or a {@code defaultValue} if there
* is no actual element at index {@code i} in this sparse vector.
*
* @param i the element's index
* @param defaultValue the default value
*
* @return the element of this vector or a default value
*/
public abstract double getOrElse(int i, double defaultValue);
/**
* Whether or not the specified element is zero.
*
* @param i element's index
*
* @return {@code true} if specified element is zero, {@code false} otherwise
*/
public boolean isZeroAt(int i) {
return !nonZeroAt(i);
}
/**
* * Whether or not the specified element is not zero.
*
* @param i element's index
*
* @return {@code true} if specified element is zero, {@code false} otherwise
*/
public abstract boolean nonZeroAt(int i);
/**
* Folds non-zero elements of this vector with given {@code accumulator}.
*
* @param accumulator the vector accumulator
*
* @return the accumulated value
*/
public double foldNonZero(VectorAccumulator accumulator) {
eachNonZero(Vectors.asAccumulatorProcedure(accumulator));
return accumulator.accumulate();
}
/**
* Applies given {@code procedure} to each non-zero element of this vector.
*
* @param procedure the vector procedure
*/
public void eachNonZero(VectorProcedure procedure) {
VectorIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
int i = it.index();
procedure.apply(i, x);
}
}
@Override
public Vector add(double value) {
Vector result = DenseVector.constant(length, value);
VectorIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
int i = it.index();
result.set(i, x + value);
}
return result;
}
@Override
public Vector multiply(double value) {
Vector result = blank();
VectorIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
int i = it.index();
result.set(i, x * value);
}
return result;
}
@Override
public double max() {
double max = foldNonZero(Vectors.mkMaxAccumulator());
return (max > 0.0) ? max : 0.0;
}
@Override
public double min() {
double min = foldNonZero(Vectors.mkMinAccumulator());
return (min < 0.0) ? min : 0.0;
}
@Override
public double euclideanNorm() {
return foldNonZero(Vectors.mkEuclideanNormAccumulator());
}
@Override
public double manhattanNorm() {
return foldNonZero(Vectors.mkManhattanNormAccumulator());
}
@Override
public double infinityNorm() {
double norm = foldNonZero(Vectors.mkInfinityNormAccumulator());
return (norm > 0.0) ? norm : 0.0;
}
/**
* Returns a non-zero vector iterator.
*
* @return a non-zero vector iterator
*/
public abstract VectorIterator nonZeroIterator();
@Override
public <T extends Vector> T to(VectorFactory<T> factory) {
T result = factory.apply(length);
VectorIterator it = nonZeroIterator();
while (it.hasNext()) {
double x = it.next();
int i = it.index();
result.set(i, x);
}
return result;
}
@Override
public int hashCode() {
int result = 17;
VectorIterator it = nonZeroIterator();
while (it.hasNext()) {
long x = it.next().longValue();
long i = (long) it.index();
result = 37 * result + (int) (x ^ (x >>> 32));
result = 37 * result + (int) (i ^ (i >>> 32));
}
return result;
}
@Override
public <T> T apply(VectorOperation<T> operation) {
operation.ensureApplicableTo(this);
return operation.apply(this);
}
@Override
public <T> T apply(VectorVectorOperation<T> operation, Vector that) {
return that.apply(operation.partiallyApply(this));
}
@Override
public <T> T apply(VectorMatrixOperation<T> operation, Matrix that) {
return that.apply(operation.partiallyApply(this));
}
@Override
public Matrix toRowMatrix() {
VectorIterator it = nonZeroIterator();
Matrix result = CRSMatrix.zero(1, length);
while (it.hasNext()) {
double x = it.next();
int j = it.index();
result.set(0, j, x);
}
return result;
}
@Override
public Matrix toColumnMatrix() {
VectorIterator it = nonZeroIterator();
Matrix result = ColumnMajorSparseMatrix.zero(length, 1);
while (it.hasNext()) {
double x = it.next();
int i = it.index();
result.set(i, 0, x);
}
return result;
}
@Override
public Matrix toDiagonalMatrix() {
VectorIterator it = nonZeroIterator();
Matrix result = RowMajorSparseMatrix.zero(length, length);
while (it.hasNext()) {
double x = it.next();
int i = it.index();
result.set(i, i, x);
}
return result;
}
@Override
public String toMatrixMarket(NumberFormat formatter) {
StringBuilder out = new StringBuilder();
VectorIterator it = nonZeroIterator();
out.append("%%MatrixMarket vector coordinate real\n");
out.append(length).append(' ').append(cardinality).append('\n');
while (it.hasNext()) {
double x = it.next();
int i = it.index();
out.append(i + 1).append(' ').append(formatter.format(x)).append('\n');
}
return out.toString();
}
/**
* Ensures the provided index is in the bounds of this {@link SparseVector}.
*
* @param i The index to check.
*/
protected void ensureIndexIsInBounds(int i) {
if (i < 0 || i >= length) {
throw new IndexOutOfBoundsException("Index '" + i + "' is invalid.");
}
}
}
|
#! /bin/bash
redirectedTempFolder=tmp
PatientCancerType=$redirectedTempFolder/GSE62944_06_01_15_TCGA_24_Normal_CancerType_Samples.txt.gz
NormalTPM=$redirectedTempFolder/GSM1697009_06_01_15_TCGA_24.normal_Rsubread_TPM.txt.gz
tcgaHtml=$redirectedTempFolder/"tcga_abbreviations.html"
nameToAbbreviation=$redirectedTempFolder/"nameToAbbreviation.txt"
dataOutFilegz=data.tsv.gz
metadataOutFilegz=metadata.tsv.gz
#source activate WishBuilderDependencies
Rscript scrapeWebTCGA.R $tcgaHtml $nameToAbbreviation
python parse.py $PatientCancerType $NormalTPM $dataOutFilegz $metadataOutFilegz $nameToAbbreviation
|
import * as requestContext from 'express-http-context';
import { WebSocketChannel, Channel } from '../common';
import ws = require('ws');
import { Dispatcher } from './dispatcher';
import { Session } from './session/session-protocol';
import { Cookies } from './cookies';
import { Request, Response } from './http/http-protocol';
import { WebSocketChannelStrategy, ChannelStrategy, HttpChannelStrategy, CheckAliveWS } from './channel';
export enum AttributeScope { App, Request, Session }
export const CURRENT_CONTEXT_REQUEST_KEY = 'CurrentContextRequest';
export const CURRENT_COOKIES_REQUEST_KEY = 'CurrentCookiesRequest';
export const CURRENT_SESSION_REQUEST_KEY = 'CurrentSessionRequest';
export const CURRENT_CHANNEL_STRATEGY_REQUEST_KEY = 'CurrentChannelStrategyRequest';
const appAttrs = new Map<string, any>();
export interface Context {
readonly request: Request;
readonly response: Response;
channelStrategy?: ChannelStrategy;
}
export namespace Context {
export function run(fn: (...args: any[]) => void) {
requestContext.ns.run(fn);
}
export function setCurrent(context: Context) {
requestContext.set(CURRENT_CONTEXT_REQUEST_KEY, context);
}
export function getCurrent<T extends Context>(): T {
return requestContext.get(CURRENT_CONTEXT_REQUEST_KEY);
}
export function getRequest(): Request {
return getCurrent().request;
}
export function getResponse(): Response {
return getCurrent().response;
}
export function getCookies(): Cookies {
return requestContext.get(CURRENT_COOKIES_REQUEST_KEY);
}
export function setCookies(cookies: Cookies): void {
requestContext.set(CURRENT_COOKIES_REQUEST_KEY, cookies);
}
export function getSession(): Session {
return requestContext.get(CURRENT_SESSION_REQUEST_KEY);
}
export function setSession(session: Session): void {
requestContext.set(CURRENT_SESSION_REQUEST_KEY, session);
}
export function getChannalStrategy(): ChannelStrategy {
return requestContext.get(CURRENT_CHANNEL_STRATEGY_REQUEST_KEY);
}
export function setChannalStrategy(channelStrategy: ChannelStrategy): void {
requestContext.set(CURRENT_CHANNEL_STRATEGY_REQUEST_KEY, channelStrategy);
}
export function setAttr(key: string, value: any, scope: AttributeScope = AttributeScope.Request) {
if (scope === AttributeScope.Request) {
requestContext.set(key, value);
} else if (scope === AttributeScope.Session) {
getSession()[key] = value;
} else {
appAttrs.set(key, value);
}
}
export function getAttr<T>(key: string, scope?: AttributeScope): T {
if (scope) {
if (scope === AttributeScope.Request) {
return requestContext.get(key);
} else if (scope === AttributeScope.Session) {
return getSession()[key];
} else {
return appAttrs.get(key);
}
} else {
let value = requestContext.get(key);
value = value ? value : getSession()[key];
return value ? value : appAttrs.get(key);
}
}
}
export class HttpContext implements Context {
readonly channelStrategy = new HttpChannelStrategy();
constructor(public request: Request, public response: Response) {
}
}
export class WebSocketContext extends HttpContext {
protected message: Channel.Message;
protected socket: CheckAliveWS;
protected checkAliveTimeout = 30000;
protected channels = new Map<number, WebSocketChannel>();
channelStrategy: ChannelStrategy;
constructor(request: Request, response: Response, protected readonly server: ws.Server, socket: ws, protected dispatcher: Dispatcher<WebSocketContext>) {
super(request, response);
this.socket = <CheckAliveWS>socket;
this.socket.alive = true;
this.channelStrategy = new WebSocketChannelStrategy(this.socket, this.channels);
socket.on('pong', () => this.socket.alive = true);
setInterval(() => {
server.clients.forEach((s: CheckAliveWS) => {
if (s.alive === false) {
s.terminate();
return;
}
s.alive = false;
s.ping();
});
}, this.checkAliveTimeout);
this.socket.on('message', data => {
this.message = JSON.parse(data.toString());
Context.run(() => this.dispatcher.dispatch(this));
});
this.socket.on('error', err => {
for (const channel of this.channels.values()) {
channel.fireError(err);
}
});
this.socket.on('close', (code, reason) => {
for (const channel of [...this.channels.values()]) {
channel.close(code, reason);
}
this.channels.clear();
});
}
}
|
<gh_stars>0
import { expectEvals } from "test/common";
describe("map()", () => {
test("empty list", () => expectEvals("map([], (k) => 6)", []));
test("number list", () => expectEvals("map([1, 2, 3], (k) => k + 4)", [5, 6, 7]));
test("string list", () => expectEvals('map(["a", "be", "ced"], (k) => length(k))', [1, 2, 3]));
});
describe("filter()", () => {
test("empty list", () => expectEvals("filter(list(), (k) => true)", []));
test("number list", () => expectEvals("filter(list(1, 2, 3), (k) => k >= 2)", [2, 3]));
});
describe("sum()", () => {
test("number list", () => expectEvals("sum(list(2, 3, 1))", 6));
test("string list", () => expectEvals('sum(list("a", "b", "c"))', "abc"));
test("empty list", () => expectEvals("sum(list())", null));
});
describe("any()", () => {
test("true, false", () => expectEvals("any(true, false)", true));
test("[true, false]", () => expectEvals("any(list(true, false))", true));
});
describe("all()", () => {
test("true, false", () => expectEvals("all(true, false)", false));
test("true, [false]", () => expectEvals("all(true, list(false))", true));
test("[true, false]", () => expectEvals("all(list(true, false))", false));
test("vectorized", () => {
expectEvals("all(regexmatch(\"a+\", list(\"a\", \"aaaa\")))", true);
expectEvals("all(regexmatch(\"a+\", list(\"a\", \"aaab\")))", false);
expectEvals("any(regexmatch(\"a+\", list(\"a\", \"aaab\")))", true);
});
}); |
<filename>scintilla/include/Sci_Position.h
// Scintilla source code edit control
/** @file Sci_Position.h
** Define the Sci_Position type used in Scintilla's external interfaces.
** These need to be available to clients written in C so are not in a C++ namespace.
**/
// Copyright 2015 by <NAME> <<EMAIL>>
// The License.txt file describes the conditions under which this software may be distributed.
#pragma once
#if defined(__cplusplus)
#include <cstddef>
#else
#include <stddef.h>
#endif
// Basic signed type used throughout interface
typedef ptrdiff_t Sci_Position;
typedef ptrdiff_t Sci_Line;
// Unsigned variant used for ILexer::Lex and ILexer::Fold
typedef size_t Sci_PositionU;
// For Sci_CharacterRange, previously defined as long to be compatible with Win32 CHARRANGE.
// long is 32-bit on LLP64 system (e.g. 64-bit Windows), thus can not be used to access text beyond 2 GiB.
// ScintillaWin::EditMessage() added translations between CHARRANGE and Sci_CharacterRange.
// Using legacy Win32 EM_* messages and related structures to interact with Scintilla
// is deprecated, supporting for WIN32 EM_* messages will be removed in the future.
typedef Sci_Position Sci_PositionCR;
#ifdef _WIN32
#define SCI_METHOD __stdcall
#else
#define SCI_METHOD
#endif
#if defined(__cplusplus)
namespace sci {
template <typename T>
constexpr T min(T x, T y) noexcept {
return (x < y) ? x : y;
}
template <typename T>
constexpr T max(T x, T y) noexcept {
return (x > y) ? x : y;
}
}
#endif
|
Rails.application.routes.draw do
namespace :admin do
resources :univs
resources :faculties
resources :comps
resources :users
resources :kosens
resources :news
resources :events
root to: "univs#index"
end
resources :univs #param: :name
resources :faculties #param: :s_name
#get '/career' => "top#coming_soon"
get '/career' => "career#index"
get '/transfer' => "transfer#index"
post '/transfer' => "transfer#index"
get '/news' => "news#index"
get '/event' => "event#index"
get '/about' => "about#index"
get '/transfer/:name' => "transfer#show", as: :transfer_show
get '/career/:name' => "career#show", as: :career_show
get '/event/:name' => "event#show", as: :event_show
get '/news/:id' => "news#show", as: :news_show
get '/coming_soon' => "top#coming_soon"
get '/login/:id' => "login#show"
get '/login' => "login#login_form"
post '/login' => "login#login"
get '/registar' => "login#new"
post '/registar' => "login#create"
get '/logout' => "login#logout"
root 'top#index'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
|
package api_test
import (
. "cf/api"
"cf/net"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"net/http"
"net/http/httptest"
testapi "testhelpers/api"
testconfig "testhelpers/configuration"
testnet "testhelpers/net"
)
var _ = Describe("AppSshRepository", func() {
It("TestGetSshCurrentSpace", func() {
getAppSshInfoRequest := testapi.NewCloudControllerTestRequest(testnet.TestRequest{
Method: "GET",
Path: "/v2/apps/my-app-guid/instances/0/ssh",
Response: testnet.TestResponse{Status: http.StatusOK, Body: getSshInfoResponseBody},
})
ts, handler, repo := createSshInfoRepo([]testnet.TestRequest{getAppSshInfoRequest})
defer ts.Close()
apiResponse, sshDetails := repo.GetSshDetails("my-app-guid", 0)
Expect(handler.AllRequestsCalled()).To(BeTrue())
Expect(apiResponse.IsSuccessful()).To(BeTrue())
Expect(sshDetails.Ip).To(Equal("10.0.0.1"))
Expect(sshDetails.Port).To(Equal(1234))
Expect(sshDetails.User).To(Equal("vcap"))
Expect(sshDetails.SshKey).To(Equal("fakekey"))
})
})
var getSshInfoResponseBody = `
{
"ip": "10.0.0.1",
"sshkey": "fakekey",
"user": "vcap",
"port": 1234
}`
func createSshInfoRepo(requests []testnet.TestRequest) (ts *httptest.Server, handler *testnet.TestHandler, repo AppSshRepository) {
ts, handler = testnet.NewTLSServer(requests)
configRepo := testconfig.NewRepositoryWithDefaults()
configRepo.SetApiEndpoint(ts.URL)
gateway := net.NewCloudControllerGateway()
repo = NewCloudControllerAppSshRepository(configRepo, gateway)
return
}
|
const foo = async () => {
return 'bar';
}
|
#!/bin/sh
set -e
[ -d build ] && node-gyp clean
node-gyp configure build install
|
{
cd ..
echo " ################# Cleaning Site #################"
jekyll clean;
echo " ################# Building Site #################"
jekyll build;
echo " ################# Serving Site with DRAFTS in interactive Mode at $(date) #################"
jekyll serve -I --drafts;
} |
require 'pry'
class State
attr_accessor :name, :url, :campgrounds
@@all = []
def initialize(name)
@name = name
@campgrounds = []
@url = "https://koa.com/states-provinces/#{@name.downcase.gsub(" ", "-")}/"
@@all << self
end
def self.all
@@all
end
def self.scrape_koa_states
doc = Nokogiri::HTML(open("https://koa.com/campgrounds/"))
states = doc.search("a h4").text
states_array = states.split("Campgrounds in ")
new_array = states_array[1, 47]
new_array.each {|s| s = self.new(s)}
end
def self.list_display
puts "------------------------------------------------------------------------------------------------------"
puts " 1. #{self.all[0].name} 2. #{self.all[1].name} 3. #{self.all[2].name} 4. #{self.all[3].name} 5. #{self.all[4].name}"
puts " 6. #{self.all[5].name} 7. #{self.all[6].name} 8. #{self.all[7].name} 9. #{self.all[8].name} 10. #{self.all[9].name}"
puts "11. #{self.all[10].name} 12. #{self.all[11].name} 13. #{self.all[12].name} 14. #{self.all[13].name} 15. #{self.all[14].name}"
puts "16. #{self.all[15].name} 17. #{self.all[16].name} 18. #{self.all[17].name} 19. #{self.all[18].name} 20. #{self.all[19].name}"
puts "21. #{self.all[20].name} 22. #{self.all[21].name} 23. #{self.all[22].name} 24. #{self.all[23].name} 25. #{self.all[24].name}"
puts "26. #{self.all[25].name} 27. #{self.all[26].name} 28. #{self.all[27].name} 29. #{self.all[28].name} 30. #{self.all[29].name}"
puts "31. #{self.all[30].name} 32. #{self.all[31].name} 33. #{self.all[32].name} 34. #{self.all[33].name} 35. #{self.all[34].name}"
puts "36. #{self.all[35].name} 37. #{self.all[36].name} 38. #{self.all[37].name} 39. #{self.all[38].name} 40. #{self.all[39].name}"
puts "41. #{self.all[40].name} 42. #{self.all[41].name} 43. #{self.all[42].name} 44. #{self.all[43].name} 45. #{self.all[44].name}"
puts "46. #{self.all[45].name} 47. #{self.all[46].name}"
puts "------------------------------------------------------------------------------------------------------"
puts
end
end |
<!DOCTYPE html>
<html>
<head>
<title>Employee Salaries</title>
</head>
<body>
<h2>Employee Salaries</h2>
<table>
<thead>
<tr>
<th>Name</th>
<th>Salary</th>
</tr>
</thead>
<tbody>
<tr>
<td>John</td>
<td>$50000</td>
</tr>
<tr>
<td>Jane</td>
<td>$40000</td>
</tr>
<tr>
<td>Adam</td>
<td>$35000</td>
</tr>
</tbody>
</table>
</body>
</html> |
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
#include <pybind11/stl.h>
#include "export_cell_vector.hxx"
#include "nifty/python/converter.hxx"
#include "nifty/cgp/bounds.hxx"
namespace py = pybind11;
typedef std::vector<nifty::cgp::CellBounds<2,0> > VectorCellBounds2_0;
typedef std::vector<nifty::cgp::CellBounds<2,1> > VectorCellBounds2_1;
PYBIND11_MAKE_OPAQUE(VectorCellBounds2_0);
PYBIND11_MAKE_OPAQUE(VectorCellBounds2_1);
namespace nifty{
namespace cgp{
template<
size_t DIM,
size_t CELL_TYPE,
class CLS
>
void exportCellBoundsT(py::module & m, py::class_<CLS> & pyCls) {
pyCls
.def("__len__", &CLS::size)
.def("__getitem__", [](const CLS & self, uint32_t i){
return self[i];
})
.def("__iter__",[](const CLS & self){
const auto begin = &(self[0]);
return py::make_iterator(begin, begin + self.size());
}, py::keep_alive<0, 1>())
;
}
void exportBounds2D(py::module & m) {
// cell 0 bounds
{
typedef CellBounds<2,0> Cell0Bounds2D;
typedef CellBoundsVector<2,0> Cells0BoundsVector2D;
const std::string clsName = std::string("Cell0Bounds2D");
auto cls = py::class_<Cell0Bounds2D>(m, clsName.c_str());
exportCellBoundsT<2, 0, Cell0Bounds2D>(m, cls);
const std::string clsNameVec = std::string("Cell0BoundsVector2D");
auto clsVec = py::class_<Cells0BoundsVector2D>(m, clsNameVec.c_str());
clsVec
.def("__array__",[](const Cells0BoundsVector2D & self){
nifty::marray::PyView<uint32_t> ret({size_t(self.size()),size_t(4)});
for(size_t ci=0 ;ci<self.size(); ++ci){
for(size_t i=0; i<self[ci].size(); ++i){
ret(ci,i) = self[ci][i];
}
for(size_t i=self[ci].size(); i<4; ++i){
ret(ci,i) = 0;
}
}
return ret;
})
;
exportCellVector<Cells0BoundsVector2D>(m, clsVec);
}
// cell 1 bounds
{
typedef CellBounds<2,1> Cell1Bounds2D;
typedef CellBoundsVector<2,1> Cell1BoundsVector2D;
const std::string clsName = std::string("Cell1Bounds2D");
auto cls = py::class_<Cell1Bounds2D>(m, clsName.c_str());
exportCellBoundsT<2, 1, Cell1Bounds2D>(m, cls);
const std::string clsNameVec = std::string("Cell1BoundsVector2D");
auto clsVec = py::class_<Cell1BoundsVector2D>(m, clsNameVec.c_str());
clsVec
.def("__array__",[](const Cell1BoundsVector2D & self){
nifty::marray::PyView<uint32_t> ret({size_t(self.size()),size_t(2)});
for(uint32_t ci=0 ;ci<self.size(); ++ci){
ret(ci,0) = self[ci][0];
ret(ci,1) = self[ci][1];
}
return ret;
})
;
exportCellVector<Cell1BoundsVector2D>(m, clsVec);
}
// cell 1 bounded by (in 2D junctions of boundaries)
{
typedef CellBoundedBy<2,1> Cell1BoundedBy2D;
typedef CellBoundedByVector<2,1> Cell1BoundedByVector2D;
const std::string clsName = std::string("Cell1BoundedBy2D");
auto cls = py::class_<Cell1BoundedBy2D>(m, clsName.c_str());
exportCellBoundsT<2, 1, Cell1BoundedBy2D>(m, cls);
const std::string clsNameVec = std::string("Cell1BoundedByVector2D");
auto clsVec = py::class_<Cell1BoundedByVector2D>(m, clsNameVec.c_str());
clsVec
.def(py::init<const CellBoundsVector<2,0 > &>())
.def("__array__",[](const Cell1BoundedByVector2D & self){
nifty::marray::PyView<uint32_t> ret({size_t(self.size()),size_t(2)});
for(uint32_t ci=0 ;ci<self.size(); ++ci){
const auto & b = self[ci];
ret(ci,0) = b[0];
ret(ci,1) = b[1];
}
return ret;
})
.def("cellsWithCertainBoundedBySize",[](const Cell1BoundedByVector2D & self,const size_t size){
std::vector<uint32_t> cell1Labels;
for(uint32_t ci=0 ;ci<self.size(); ++ci){
const auto & b = self[ci];
if(b.size() == size){
cell1Labels.push_back(ci+1);
}
}
nifty::marray::PyView<uint32_t> ret({size_t(cell1Labels.size())});
for(auto i=0; i<ret.size(); ++i){
ret[i] = cell1Labels[i];
}
return ret;
})
;
exportCellVector<Cell1BoundedByVector2D>(m, clsVec);
}
// cell 2 bounded by (in 2D boundaries of regions)
{
typedef CellBoundedBy<2,2> Cell2BoundedBy2D;
typedef CellBoundedByVector<2,2> Cell2BoundedByVector2D;
const std::string clsName = std::string("Cell2BoundedBy2D");
auto cls = py::class_<Cell2BoundedBy2D>(m, clsName.c_str());
exportCellBoundsT<2, 2, Cell2BoundedBy2D>(m, cls);
const std::string clsNameVec = std::string("Cell2BoundedByVector2D");
auto clsVec = py::class_<Cell2BoundedByVector2D>(m, clsNameVec.c_str());
clsVec
.def(py::init<const CellBoundsVector<2,1 > &>())
//.def("__array__",[](const Cell2BoundedByVector2D & self){
// nifty::marray::PyView<uint32_t> ret({size_t(self.size()),size_t(2)});
// for(uint32_t ci=0 ;ci<self.size(); ++ci){
// ret(ci,0) = self[ci][0];
// ret(ci,1) = self[ci][1];
// }
// return ret;
//})
;
exportCellVector<Cell2BoundedByVector2D>(m, clsVec);
}
// bounds
{
typedef TopologicalGrid<2> TopologicalGridType;
typedef Bounds<2> BoundsType;
const std::string clsName = std::string("Bounds2D");
py::class_<BoundsType>(m, clsName.c_str())
.def(py::init<const TopologicalGridType &>())
//.def("__init__",[](
// BoundsType & self,
// const TopologicalGridType & tgrid
//){
// new (&self) BoundsType(tgrid);
//})
//.def("bounds",
//)
.def("cell0Bounds",[](
const BoundsType & self
){
//return self.bounds0_;
return self. template bounds<0>();
//return r;
},py::return_value_policy::reference_internal)
.def("cell1Bounds",[](
const BoundsType & self
){
//return self.bounds1_;
return self. template bounds<1>();
//return r;
},py::return_value_policy::reference_internal)
;
}
}
void exportBounds(py::module & m) {
exportBounds2D(m);
}
}
}
|
<gh_stars>0
import logging
from cmd import Cmd
from bostonregression import BostonRegression
from preparedata import PrepareData
from testaskjunoace import TestAskJunoACE
from askjunoace import AskJunoACE
from askjunoapi import AskJunoAPI
logger = logging.getLogger("ACE")
class HotDogCmdFactory(Cmd):
commands = ['prepup', 'feature', 'train', 'test', 'eval', 'bostonmodel', 'calimeta', 'calihead', 'calihead','bostonmeta','kfold','ajfit','ajfit2','cpltrain',"cpleval"]
config_obj = None
def do_cpltrain(self, input_file):
try:
logger.info(f'Initialising ace engine')
aj = AskJunoACE(self.config_obj, input_file)
logger.info(f'Loading data for the ace engine to get ready')
aj.load_data()
logger.info(f'Normalising data for the ace engine to get ready')
aj.normalize_data()
logger.info(f'Splitting training and testing data for the ace engine ')
aj.test_train_split()
logger.info(f'Initializing the deep learning engine')
aj.model_init()
logger.info(f'Training the deep learning engine')
aj.model_train()
aj.model_save()
except(RuntimeError, TypeError, NameError) as error:
logger.error("Error preparing data ", error.original_traceback)
pass
def do_cpleval(self, input_file):
try:
eval_model = AskJunoAPI(self.config_obj)
eval_model.restore_model()
eval_model.test(input_file)
except(RuntimeError, TypeError, NameError) as error:
logger.error("Error preparing data ", error.original_traceback)
pass
def do_ajfit(self, args):
try:
aj = TestAskJunoACE()
aj.fit_1('/home/jpvel/Workspace/hotdogworkspace/hotdog/data/omega.csv')
except(RuntimeError, TypeError, NameError) as error:
logger.error("Error preparing data ", error.original_traceback)
pass
def do_ajfit2(self, args):
try:
aj = TestAskJunoACE()
aj.fit_2('/home/jpvel/Workspace/hotdogworkspace/hotdog/data/omega.csv')
except(RuntimeError, TypeError, NameError) as error:
logger.error("Error preparing data ", error.original_traceback)
pass
def do_prepup(self, args):
try:
prepareData = PrepareData(self.config_obj)
prepareData.execute()
except(RuntimeError, TypeError, NameError) as error:
logger.error("Error preparing data ", error.original_traceback)
pass
def do_bostonmeta(self, args):
br = BostonRegression()
br.showbostonmeta()
def do_calihead(self, args):
br = BostonRegression()
br.showhead()
def do_calimeta(self, args):
br = BostonRegression()
br.showmetadata()
def do_bostonmodel(self, args):
br = BostonRegression()
br.build_model()
br.train_model()
br.test()
def do_kfold(self, args):
br = BostonRegression()
br.kfold_validation()
br.plot_kfold()
def do_quit(self, args):
"""Quits the program."""
logger.info("Quitting")
raise SystemExit
|
var group__sai__configure__control =
[
[ "SAI Mode", "group__sai__mode__control.html", "group__sai__mode__control" ],
[ "SAI Synchronization", "group__sai__sync__control.html", "group__sai__sync__control" ],
[ "SAI Protocol", "group__sai__protocol__control.html", "group__sai__protocol__control" ],
[ "SAI Data Size", "group__sai__data__bits__control.html", "group__sai__data__bits__control" ],
[ "SAI Bit Order", "group__sai__bit__order__control.html", "group__sai__bit__order__control" ],
[ "SAI Mono Mode", "group__sai__mono__control.html", "group__sai__mono__control" ],
[ "SAI Companding", "group__sai__companding__control.html", "group__sai__companding__control" ],
[ "SAI Clock Polarity", "group__sai__clock__pol__control.html", "group__sai__clock__pol__control" ],
[ "SAI Frame", "group__sai__frame__control.html", "group__sai__frame__control" ],
[ "SAI Slot", "group__sai__slot__control.html", "group__sai__slot__control" ],
[ "SAI Master Clock Pin", "group__sai__mclk__pin__control.html", "group__sai__mclk__pin__control" ],
[ "SAI Master Clock Prescaler", "group__sai__mclk__pres__control.html", "group__sai__mclk__pres__control" ]
]; |
#!/bin/sh
docker_run="docker run"
if [ -n "$INPUT_MYSQL_ROOT_PASSWORD" ]; then
echo "Root password not empty, use root superuser"
docker_run="$docker_run -e MYSQL_ROOT_PASSWORD=$INPUT_MYSQL_ROOT_PASSWORD"
elif [ -n "$INPUT_MYSQL_USER" ]; then
if [ -z "$INPUT_MYSQL_PASSWORD" ]; then
echo "The mysql password must not be empty when mysql user exists"
exit 1
fi
echo "Use specified user and password"
docker_run="$docker_run -e MYSQL_RANDOM_ROOT_PASSWORD=true -e MYSQL_USER=$INPUT_MYSQL_USER -e MYSQL_PASSWORD=$INPUT_MYSQL_PASSWORD"
else
echo "Both root password and superuser are empty, must contains one superuser"
exit 1
fi
if [ -n "$INPUT_MYSQL_DATABASE" ]; then
echo "Use specified database"
docker_run="$docker_run -e MYSQL_DATABASE=$INPUT_MYSQL_DATABASE"
fi
docker_run="$docker_run -d -p $INPUT_HOST_PORT:$INPUT_CONTAINER_PORT $INPUT_MYSQL_VARIANT:$INPUT_MYSQL_VERSION --port=$INPUT_CONTAINER_PORT"
docker_run="$docker_run --character-set-server=$INPUT_CHARACTER_SET_SERVER --collation-server=$INPUT_COLLATION_SERVER --default-authentication-plugin=$INPUT_AUTHENTICATION_PLUGIN"
sh -c "$docker_run"
|
<reponame>leongaban/redux-saga-exchange
import makeCommunicationActionCreators from 'shared/helpers/redux/communication/makeCommunicationActionCreators';
import * as NS from './../../namespace';
export const { execute: loadAnnouncements, completed: loadAnnouncementsSuccess, failed: loadAnnouncementsFail } =
makeCommunicationActionCreators<NS.ILoad, NS.ILoadSuccess, NS.ILoadFail>(
'ANNOUNCEMENT_ADMIN:LOAD',
'ANNOUNCEMENT_ADMIN:LOAD_SUCCESS',
'ANNOUNCEMENT_ADMIN:LOAD_FAIL',
);
export const { execute: saveAnnouncement, completed: saveAnnouncementsSuccess, failed: saveAnnouncementsFail } =
makeCommunicationActionCreators<NS.ISave, NS.ISaveSuccess, NS.ISaveFail>(
'ANNOUNCEMENT_ADMIN:SAVE',
'ANNOUNCEMENT_ADMIN:SAVE_SUCCESS',
'ANNOUNCEMENT_ADMIN:SAVE_FAIL',
);
|
<filename>spec/tfind_spec.rb<gh_stars>0
require_relative "spec_helper"
module WithEthics
describe Tfind do
describe "init" do
it "should have ruby defaults" do
t = Tfind.new
expect(t.target).to eq('ruby')
end
it "should pick default folders" do
pt = ['test', 'spec']
t = Tfind.new paths: pt
expect(t.paths).to eq(pt)
end
end
describe "search" do
before do
@t = Tfind.new
@cur = Dir.pwd
end
it "should find easy test folder" do
allow(Dir).to receive(:glob).with("#{ @cur }/**/test").and_return(["#{ @cur }/test"])
allow(Dir).to receive(:entries).and_return(['.', '..', "some_test.rb"])
pref = "#{ @cur }/test"
expect(@t.search).to eq([pref, ["#{ pref }/some_test.rb"]] )
end
it "should find lower level test folder" do
allow(Dir).to receive(:glob).with("#{ @cur }/**/test").and_return(["#{ @cur }/one/test"])
allow(Dir).to receive(:entries).and_return(['.', '..', "some_test.rb"])
pref = "#{ @cur }/one/test"
expect(@t.search).to eq([pref, ["#{ pref }/some_test.rb"]] )
end
end
end
end |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bus.management;
/**
*
* @author
*/
public class BusManagement {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
// TODO code application logic here
MainScreen mr=new MainScreen();
mr.setLocationRelativeTo(null);
mr.setVisible(true);
}
}
|
#!/usr/bin/env bash
. "${BASH_SOURCE%/*}/common-include.sh" || exit 1
FILE="$1"
DEST="$2"
if [ $# -ne 2 ]; then
USAGE "FILE" "DEST" && exit 1
fi
# [file].xlsx:$office$*2007*20*128*16*d13e91007439a2fbe62b96c790d4e243*8b2ee1b9ea360e13d75dec51d9c5fe44*6cb9a97be306019f1f1d2bb7bfa987f2122e6d48
JTR="$HOME/Development/GitHub/magnumripper/JohnTheRipper/run/office2john.py"
RESULT="$("$JTR" "$FILE")"
HASH="$(echo "$RESULT" | grep '\$office\$' | gsed -r 's/.*(\$office\$.*)/\1/')"
if [ -n "$HASH" ]; then
echo "$HASH" | tee "$DEST/$(STRIP_EXTENSION "$(basename "$FILE")").hash"
else
echo "$RESULT"
fi
|
#!/usr/bin/env bash
set -e
MARKER=$(basename "${BASH_SOURCE%.*}")
###############################################################################
# Clojure (+ Leiningen)
###############################################################################
# URL: https://clojure.org
###############################################################################
NAME="Clojure (+ Leiningen)"
###############################################################################
# CLOJURE_VERSION
[ -z "${CLOJURE_VERSION}" ] && CLOJURE_VERSION="1.10.3.986"
###############################################################################
CLOJURE_URL=https://download.clojure.org/install/linux-install-${CLOJURE_VERSION}.sh
LEININUNG_URL=https://raw.githubusercontent.com/technomancy/leiningen/stable/bin/lein
CLOJURE_TEMP_FILE="$(mktemp -u).sh"
###############################################################################
print_banner "$NAME" "CLOJURE_VERSION=$CLOJURE_VERSION"
if [ ! -f "$MARKER_DIRECTORY"/"$MARKER" ]; then
curl -L $CLOJURE_URL -o "$CLOJURE_TEMP_FILE" \
&& sudo bash "$CLOJURE_TEMP_FILE" \
&& rm "$CLOJURE_TEMP_FILE" \
&& curl -L $LEININUNG_URL -o "$BIN_DIRECTORY"/lein \
&& chmod a+x "$BIN_DIRECTORY"/lein \
&& "$BIN_DIRECTORY"/lein \
&& finish_install "$MARKER" "CLOJURE_VERSION=$CLOJURE_VERSION"
else
already_installed "$MARKER"
fi
|
const productExceptSelf = require('.');
test('Given [1, 2, 4, 16] the product of all the sums except itself would be [128, 64, 32, 8]', () => {
expect(productExceptSelf([1, 2, 4, 16])).toEqual([128, 64, 32, 8]);
});
|
package com.xyoye.common_component.utils;
import android.content.Context;
import com.xyoye.common_component.base.app.BaseApplication;
/**
* Created by xyoye on 2021/1/6.
*/
public class SecurityHelper {
private static final String ERROR_RESULT = "error";
private static final int KEY_DANDAN = 0xC1000001;
private static final int KEY_BUGLY = 0xC1000002;
private final Context appContext;
static {
System.loadLibrary("security");
}
private SecurityHelper() {
appContext = BaseApplication.Companion.getAppContext();
}
private static class Holder {
static SecurityHelper instance = new SecurityHelper();
}
public static SecurityHelper getInstance() {
return Holder.instance;
}
public String getBuglyId() {
return getKey(KEY_BUGLY, appContext);
}
public String getAppId() {
return getKey(KEY_DANDAN, appContext);
}
public String buildHash(String hashInfo) {
return buildHash(hashInfo, appContext);
}
public Boolean isOfficialApplication() {
return !ERROR_RESULT.equals(getAppId());
}
private static native String getKey(int position, Context context);
private static native String buildHash(String hashInfo, Context context);
}
|
<reponame>eengineergz/Lambda<filename>7-assets/past-student-repos/Lambda-School-master/Week 15/webauth-i-challenge/data/seeds/04-todos.js
exports.seed = function(knex) {
return knex("todos")
.truncate()
.then(() => {
return knex("todos").insert([
{
resource_id: 2,
project_id: 1
},
{
resource_id: 3,
project_id: 1
},
{
resource_id: 5,
project_id: 1
},
{
resource_id: 4,
project_id: 2
},
{
resource_id: 5,
project_id: 2
},
{
resource_id: 1,
project_id: 3
},
{
resource_id: 2,
project_id: 3
},
{
resource_id: 5,
project_id: 3
}
]);
});
};
|
//package mindustry.desktop.steam;
//
//import arc.*;
//import arc.func.*;
//import arc.struct.*;
//import arc.util.*;
//import arc.util.pooling.*;
//import com.codedisaster.steamworks.*;
//import com.codedisaster.steamworks.SteamFriends.*;
//import com.codedisaster.steamworks.SteamMatchmaking.*;
//import com.codedisaster.steamworks.SteamNetworking.*;
//import mindustry.core.GameState.*;
//import mindustry.core.*;
//import mindustry.game.EventType.*;
//import mindustry.game.*;
//import mindustry.net.ArcNetProvider.*;
//import mindustry.net.*;
//import mindustry.net.Net.*;
//import mindustry.net.Packets.*;
//
//import java.io.*;
//import java.nio.*;
//import java.util.concurrent.*;
//
//import static mindustry.Vars.*;
//
//public class SNet implements SteamNetworkingCallback, SteamMatchmakingCallback, SteamFriendsCallback, NetProvider{
// public final SteamNetworking snet = new SteamNetworking(this);
// public final SteamMatchmaking smat = new SteamMatchmaking(this);
// public final SteamFriends friends = new SteamFriends(this);
//
// final NetProvider provider;
//
// final PacketSerializer serializer = new PacketSerializer();
// final ByteBuffer writeBuffer = ByteBuffer.allocateDirect(1024 * 4);
// final ByteBuffer readBuffer = ByteBuffer.allocateDirect(1024 * 4);
//
// final CopyOnWriteArrayList<SteamConnection> connections = new CopyOnWriteArrayList<>();
// final IntMap<SteamConnection> steamConnections = new IntMap<>(); //maps steam ID -> valid net connection
//
// SteamID currentLobby, currentServer;
// Cons<Host> lobbyCallback;
// Runnable lobbyDoneCallback, joinCallback;
//
// public SNet(NetProvider provider){
// this.provider = provider;
//
// Events.on(ClientLoadEvent.class, e -> Core.app.addListener(new ApplicationListener(){
// //read packets
// int length;
// SteamID from = new SteamID();
//
// @Override
// public void update(){
// while((length = snet.isP2PPacketAvailable(0)) != 0){
// try{
// readBuffer.position(0);
// snet.readP2PPacket(from, readBuffer, 0);
// int fromID = from.getAccountID();
// Object output = serializer.read(readBuffer);
//
// if(net.server()){
// SteamConnection con = steamConnections.get(fromID);
// try{
// //accept users on request
// if(con == null){
// con = new SteamConnection(SteamID.createFromNativeHandle(from.handle()));
// Connect c = new Connect();
// c.addressTCP = "steam:" + from.getAccountID();
//
// Log.info("&bRecieved STEAM connection: {0}", c.addressTCP);
//
// steamConnections.put(from.getAccountID(), con);
// connections.add(con);
// net.handleServerReceived(con, c);
// }
//
// net.handleServerReceived(con, output);
// }catch(Throwable e){
// Log.err(e);
// }
// }else if(currentServer != null && fromID == currentServer.getAccountID()){
// net.handleClientReceived(output);
// }
// }catch(SteamException e){
// e.printStackTrace();
// }
// }
// }
// }));
//
// Events.on(WaveEvent.class, e -> {
// if(currentLobby != null && net.server()){
// smat.setLobbyData(currentLobby, "wave", state.wave + "");
// }
// });
// }
//
// public boolean isSteamClient(){
// return currentServer != null;
// }
//
// @Override
// public void connectClient(String ip, int port, Runnable success) throws IOException{
// if(ip.startsWith("steam:")){
// String lobbyname = ip.substring("steam:".length());
// try{
// SteamID lobby = SteamID.createFromNativeHandle(Long.parseLong(lobbyname));
// joinCallback = success;
// smat.joinLobby(lobby);
// }catch(NumberFormatException e){
// throw new IOException("Invalid Steam ID: " + lobbyname);
// }
// }else{
// provider.connectClient(ip, port, success);
// }
// }
//
// @Override
// public void sendClient(Object object, SendMode mode){
// if(isSteamClient()){
// if(currentServer == null){
// Log.info("Not connected, quitting.");
// return;
// }
//
// try{
// writeBuffer.limit(writeBuffer.capacity());
// writeBuffer.position(0);
// serializer.write(writeBuffer, object);
// writeBuffer.flip();
//
// snet.sendP2PPacket(currentServer, writeBuffer, mode == SendMode.tcp ? P2PSend.Reliable : P2PSend.UnreliableNoDelay, 0);
// }catch(Exception e){
// net.showError(e);
// }
// Pools.free(object);
// }else{
// provider.sendClient(object, mode);
// }
// }
//
//
// @Override
// public void disconnectClient(){
// if(isSteamClient()){
// if(currentLobby != null){
// smat.leaveLobby(currentLobby);
// snet.closeP2PSessionWithUser(currentServer);
// currentServer = null;
// currentLobby = null;
// net.handleClientReceived(new Disconnect());
// }
// }else{
// provider.disconnectClient();
// }
// }
//
// @Override
// public void discoverServers(Cons<Host> callback, Runnable done){
// smat.addRequestLobbyListResultCountFilter(32);
// smat.requestLobbyList();
// lobbyCallback = callback;
// lobbyDoneCallback = done;
// }
//
// @Override
// public void pingHost(String address, int port, Cons<Host> valid, Cons<Exception> failed){
// provider.pingHost(address, port, valid, failed);
// }
//
// @Override
// public void hostServer(int port) throws IOException{
// provider.hostServer(port);
// smat.createLobby(Core.settings.getBool("publichost") ? LobbyType.Public : LobbyType.FriendsOnly, Core.settings.getInt("playerlimit"));
//
// Core.app.post(() -> Core.app.post(() -> Core.app.post(() -> Log.info("Server: {0}\nClient: {1}\nActive: {2}", net.server(), net.client(), net.active()))));
// }
//
// public void updateLobby(){
// if(currentLobby != null && net.server()){
// smat.setLobbyType(currentLobby, Core.settings.getBool("publichost") ? LobbyType.Public : LobbyType.FriendsOnly);
// smat.setLobbyMemberLimit(currentLobby, Core.settings.getInt("playerlimit"));
// }
// }
//
// @Override
// public void closeServer(){
// provider.closeServer();
//
// if(currentLobby != null){
// smat.leaveLobby(currentLobby);
// for(SteamConnection con : steamConnections.values()){
// con.close();
// }
// currentLobby = null;
// }
//
// steamConnections.clear();
// }
//
// @Override
// public Iterable<? extends NetConnection> getConnections(){
// //merge provider connections
// CopyOnWriteArrayList<NetConnection> connectionsOut = new CopyOnWriteArrayList<>(connections);
// for(NetConnection c : provider.getConnections()) connectionsOut.add(c);
// return connectionsOut;
// }
//
// void disconnectSteamUser(SteamID steamid){
// //a client left
// int sid = steamid.getAccountID();
// snet.closeP2PSessionWithUser(steamid);
//
// if(steamConnections.containsKey(sid)){
// SteamConnection con = steamConnections.get(sid);
// net.handleServerReceived(con, new Disconnect());
// steamConnections.remove(sid);
// connections.remove(con);
// }
// }
//
// @Override
// public void onFavoritesListChanged(int i, int i1, int i2, int i3, int i4, boolean b, int i5){
//
// }
//
// @Override
// public void onLobbyInvite(SteamID steamIDUser, SteamID steamIDLobby, long gameID){
// Log.info("onLobbyInvite {0} {1} {2}", steamIDLobby.getAccountID(), steamIDUser.getAccountID(), gameID);
// }
//
// @Override
// public void onLobbyEnter(SteamID steamIDLobby, int chatPermissions, boolean blocked, ChatRoomEnterResponse response){
// Log.info("enter lobby {0} {1}", steamIDLobby.getAccountID(), response);
//
// if(response != ChatRoomEnterResponse.Success){
// ui.loadfrag.hide();
// ui.showErrorMessage(Core.bundle.format("cantconnect", response.toString()));
// return;
// }
//
// logic.reset();
// net.reset();
// state.set(State.menu);
//
// currentLobby = steamIDLobby;
// currentServer = smat.getLobbyOwner(steamIDLobby);
//
// Log.info("Connect to owner {0}: {1}", currentServer.getAccountID(), friends.getFriendPersonaName(currentServer));
//
// if(joinCallback != null){
// joinCallback.run();
// joinCallback = null;
// }
//
// Connect con = new Connect();
// con.addressTCP = "steam:" + currentServer.getAccountID();
//
// net.setClientConnected();
// net.handleClientReceived(con);
//
// Core.app.post(() -> Core.app.post(() -> Core.app.post(() -> Log.info("Server: {0}\nClient: {1}\nActive: {2}", net.server(), net.client(), net.active()))));
// }
//
// @Override
// public void onLobbyDataUpdate(SteamID steamID, SteamID steamID1, boolean b){
//
// }
//
// @Override
// public void onLobbyChatUpdate(SteamID lobby, SteamID who, SteamID changer, ChatMemberStateChange change){
// Log.info("lobby {0}: {1} caused {2}'s change: {3}", lobby.getAccountID(), who.getAccountID(), changer.getAccountID(), change);
// if(change == ChatMemberStateChange.Disconnected || change == ChatMemberStateChange.Left){
// if(net.client()){
// //host left, leave as well
// if(who.equals(currentServer) || who.equals(currentLobby)){
// net.disconnect();
// Log.info("Current host left.");
// }
// }else{
// //a client left
// disconnectSteamUser(who);
// }
// }
// }
//
// @Override
// public void onLobbyChatMessage(SteamID steamID, SteamID steamID1, ChatEntryType chatEntryType, int i){
//
// }
//
// @Override
// public void onLobbyGameCreated(SteamID steamID, SteamID steamID1, int i, short i1){
//
// }
//
// @Override
// public void onLobbyMatchList(int matches){
// Log.info("found {0} matches {1}", matches, lobbyDoneCallback);
//
// if(lobbyDoneCallback != null){
// Array<Host> hosts = new Array<>();
// for(int i = 0; i < matches; i++){
// try{
// SteamID lobby = smat.getLobbyByIndex(i);
// Host out = new Host(
// smat.getLobbyData(lobby, "name"),
// "steam:" + lobby.handle(),
// smat.getLobbyData(lobby, "mapname"),
// Strings.parseInt(smat.getLobbyData(lobby, "wave"), -1),
// smat.getNumLobbyMembers(lobby),
// Strings.parseInt(smat.getLobbyData(lobby, "version"), -1),
// smat.getLobbyData(lobby, "versionType"),
// Gamemode.valueOf(smat.getLobbyData(lobby, "gamemode")),
// smat.getLobbyMemberLimit(lobby),
// ""
// );
// hosts.add(out);
// }catch(Exception e){
// Log.err(e);
// }
// }
//
// hosts.sort(Structs.comparingInt(h -> -h.players));
// hosts.each(lobbyCallback);
//
// lobbyDoneCallback.run();
// }
// }
//
// @Override
// public void onLobbyKicked(SteamID steamID, SteamID steamID1, boolean b){
// Log.info("Kicked: {0} {1} {2}", steamID, steamID1, b);
// }
//
// @Override
// public void onLobbyCreated(SteamResult result, SteamID steamID){
// if(!net.server()){
// Log.info("Lobby created on server: {0}, ignoring.", steamID);
// return;
// }
//
// Log.info("Lobby {1} created? {0}", result, steamID.getAccountID());
// if(result == SteamResult.OK){
// currentLobby = steamID;
//
// smat.setLobbyData(steamID, "name", player.name);
// smat.setLobbyData(steamID, "mapname", world.getMap() == null ? "Unknown" : state.rules.zone == null ? world.getMap().name() : state.rules.zone.localizedName);
// smat.setLobbyData(steamID, "version", Version.build + "");
// smat.setLobbyData(steamID, "versionType", Version.type);
// smat.setLobbyData(steamID, "wave", state.wave + "");
// smat.setLobbyData(steamID, "gamemode", Gamemode.bestFit(state.rules).name() + "");
// }
// }
//
// public void showFriendInvites(){
// if(currentLobby != null){
// friends.activateGameOverlayInviteDialog(currentLobby);
// Log.info("Activating overlay dialog");
// }
// }
//
// @Override
// public void onFavoritesListAccountsUpdated(SteamResult steamResult){
//
// }
//
// @Override
// public void onP2PSessionConnectFail(SteamID steamIDRemote, P2PSessionError sessionError){
// if(net.server()){
// Log.info("{0} has disconnected: {1}", steamIDRemote.getAccountID(), sessionError);
// disconnectSteamUser(steamIDRemote);
// }else if(steamIDRemote.equals(currentServer)){
// Log.info("Disconnected! {1}: {0}", steamIDRemote.getAccountID(), sessionError);
// net.handleClientReceived(new Disconnect());
// }
// }
//
// @Override
// public void onP2PSessionRequest(SteamID steamIDRemote){
// Log.info("Connection request: {0}", steamIDRemote.getAccountID());
// if(net.server()){
// Log.info("Am server, accepting request from " + steamIDRemote.getAccountID());
// snet.acceptP2PSessionWithUser(steamIDRemote);
// }
// }
//
// @Override
// public void onSetPersonaNameResponse(boolean b, boolean b1, SteamResult steamResult){
//
// }
//
// @Override
// public void onPersonaStateChange(SteamID steamID, PersonaChange personaChange){
//
// }
//
// @Override
// public void onGameOverlayActivated(boolean b){
//
// }
//
// @Override
// public void onGameLobbyJoinRequested(SteamID lobby, SteamID steamIDFriend){
// Log.info("onGameLobbyJoinRequested {0} {1}", lobby, steamIDFriend);
// smat.joinLobby(lobby);
// }
//
// @Override
// public void onAvatarImageLoaded(SteamID steamID, int i, int i1, int i2){
//
// }
//
// @Override
// public void onFriendRichPresenceUpdate(SteamID steamID, int i){
//
// }
//
// @Override
// public void onGameRichPresenceJoinRequested(SteamID steamID, String connect){
// Log.info("onGameRichPresenceJoinRequested {0} {1}", steamID, connect);
// }
//
// @Override
// public void onGameServerChangeRequested(String server, String password){
//
// }
//
// public class SteamConnection extends NetConnection{
// final SteamID sid;
// final P2PSessionState state = new P2PSessionState();
//
// public SteamConnection(SteamID sid){
// super(sid.getAccountID() + "");
// this.sid = sid;
// Log.info("Create STEAM client {0}", sid.getAccountID());
// }
//
// @Override
// public void send(Object object, SendMode mode){
// try{
// writeBuffer.limit(writeBuffer.capacity());
// writeBuffer.position(0);
// serializer.write(writeBuffer, object);
// writeBuffer.flip();
//
// snet.sendP2PPacket(sid, writeBuffer, mode == SendMode.tcp ? object instanceof StreamChunk ? P2PSend.ReliableWithBuffering : P2PSend.Reliable : P2PSend.UnreliableNoDelay, 0);
// }catch(Exception e){
// Log.err(e);
// Log.info("Error sending packet. Disconnecting invalid client!");
// close();
//
// SteamConnection k = steamConnections.get(sid.getAccountID());
// if(k != null) steamConnections.remove(sid.getAccountID());
// }
// }
//
// @Override
// public boolean isConnected(){
// snet.getP2PSessionState(sid, state);
// return true;//state.isConnectionActive();
// }
//
// @Override
// public void close(){
// disconnectSteamUser(sid);
// }
// }
//}
|
<reponame>Ziezi/Programming-Principles-and-Practice-Using-C-by-Bjarne-Stroustrup-
/*
TITLE Add Help Function Chapter7Exercise6.h
"Software - Principles and Practice using C++" by <NAME>"
COMMENT
Objective: Add a function that gives information about the
calculator functionality, capacity and use.
Input: -
Output: -
Author: <NAME>
Date: 25.02.2015
*/
#include <iostream>
#include <string>
#include <vector>
#include "Chapter7Exercise6.h"
int main()
{
std::cout << "\t\tWelcome to our calculator!" << std::endl;
calculate();
} |
import adafruit_irremote
import board
import digitalio
import pulseio
class IRLedController:
def __init__(self, ir_receiver_pin, led_pin):
self.ir_receiver = pulseio.PulseIn(ir_receiver_pin, maxlen=100, idle_state=True)
self.led = digitalio.DigitalInOut(led_pin)
self.led.direction = digitalio.Direction.OUTPUT
def decode_ir_signal(self):
decoder = adafruit_irremote.GenericDecode()
pulses = decoder.read_pulses(self.ir_receiver)
try:
button_code = decoder.decode_bits(pulses, debug=False)
return button_code
except adafruit_irremote.IRNECRepeatException:
return None
def control_led(self, button_code):
if button_code is not None:
if button_code == 0x00FF30CF: # Example button code, replace with actual codes
self.led.value = not self.led.value |
const mongoose = require('mongoose');
const fs = require("fs");
const path = require("path");
module.exports.connect = () => {
const mongoUri = "mongodb://localhost/todo-db";
const mongoDB = mongoose.connect(mongoUri);
fs
.readdirSync('./app/models')
.filter((file) => file !== 'index.js')
.forEach((file) => {
module.exports[file.split('.')[0]] = require(`./${file}`);
});
return mongoDB
}; |
import { put, takeLatest } from 'redux-saga/effects';
import ModelchimpClient from 'utils/modelchimpClient';
import { LOAD_EXPERIMENT_DETAIL_METRIC } from './constants';
import {
loadExperimentMetricSuccessAction,
loadExperimentMetricErrorAction,
} from './actions';
export function* getExperimentMetricData({ modelId }) {
const requestURL = `experiment/${modelId}/metric/`;
try {
const metricData = yield ModelchimpClient.get(requestURL);
yield put(loadExperimentMetricSuccessAction(metricData));
} catch (err) {
yield put(loadExperimentMetricErrorAction(err));
}
}
export default function* experimentMetricData() {
yield takeLatest(LOAD_EXPERIMENT_DETAIL_METRIC, getExperimentMetricData);
}
|
#!/bin/bash
set -e
if grep -q _enter *.md; then
echo "Replace all text having _enter with your input"
exit 1
fi
|
#!/bin/bash
systemctl stop hostapd dhcpcd dnsmasq
#systemctl stop NetworkManager
nmcli radio wifi off
rfkill unblock wlan
# enable the AP
sudo cp config/hostapd /etc/default/hostapd
sudo cp config/dhcpcd.conf /etc/dhcpcd.conf
sudo cp config/dnsmasq.conf /etc/dnsmasq.conf
systemctl restart dhcpcd dnsmasq
sleep 1
systemctl restart hostapd
#sudo reboot now
|
<reponame>metermd/has_salt<filename>test/has_salt_test.rb
require 'minitest/autorun'
require 'minitest/unit'
require 'minitest/pride'
require 'active_record'
require 'has_salt'
ActiveRecord::Base.establish_connection adapter: 'sqlite3',
database: ':memory:'
class BaseTable < ActiveRecord::Base
end
class HasSaltTest < MiniTest::Test
def setup
capture_io do
ActiveRecord::Schema.define(version: 1) do
create_table :base_tables do |t|
t.column :type, :string
t.column :salt, :string
t.column :sodium, :string
t.column :salt16, :string, limit: 16
end
end
end
end
def teardown
capture_io do
ActiveRecord::Base.connection.tables.each do |table|
ActiveRecord::Base.connection.drop_table(table)
end
end
end
class DefaultSettings < BaseTable
has_salt
end
# Just makes sure the module is included and can be instantiated.
def test_basic_functionality
u = DefaultSettings.create!
end
def test_defaults
u = DefaultSettings.create!
assert_equal HasSalt::DEFAULT_LENGTH, u.salt.size
end
def test_explicit_resalt
u = DefaultSettings.create!
assert_equal HasSalt::DEFAULT_LENGTH, u.salt.size
salt1 = u.salt
# This should not change the salt
u.generate_salt
assert_equal salt1, u.salt
u.generate_salt!
assert_equal HasSalt::DEFAULT_LENGTH, u.salt.size
refute_equal salt1, u.salt
end
class OnlyTest < BaseTable
has_salt only: -> { nevar! }
has_salt column: :sodium, only: -> { yaaaase! }
has_salt column: :salt16, only: :yaaaase!
def nevar!
false
end
def yaaaase!
true
end
end
def test_only
u = OnlyTest.create!
assert_equal nil, u.salt
assert_equal HasSalt::DEFAULT_LENGTH, u.sodium.size
refute_equal nil, u.salt16
end
class ExplicitName < BaseTable
has_salt column: :sodium
end
def test_explicit_name
u = ExplicitName.create!
assert_equal nil, u.salt
assert_equal HasSalt::DEFAULT_LENGTH, u.sodium.size
end
class ExplicitLength < BaseTable
has_salt length: 15
end
def test_explicit_length
u = ExplicitLength.create!
assert_equal 15, u.salt.size
end
class ValidationBasedLength < BaseTable
has_salt
validates_length_of :salt, is: 81
end
def test_validation_based_length
u = ValidationBasedLength.create!
assert_equal 81, u.salt.size
end
class ValidationRangeBasedLength < BaseTable
has_salt
validates_length_of :salt, in: 5..100
end
def test_validation_range_based_length
u = ValidationRangeBasedLength.create!
assert_equal 64, u.salt.size
end
class SchemaBasedLength < BaseTable
has_salt column: :salt16
end
def test_schema_based_length
u = SchemaBasedLength.create!
assert_equal 16, u.salt16.size
end
end
|
# Change the address prefixes from Monero to Swap in node_modules/@mymonero/mymonero-nettype/index.js (nettype module)
sed -i '' 's/__MAINNET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 18/__MAINNET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 10343/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__MAINNET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 19/__MAINNET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 13671/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__MAINNET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 42/__MAINNET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 11368/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__TESTNET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 53/__TESTNET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 23325/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__TESTNET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 54/__TESTNET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 20894/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__TESTNET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 63/__TESTNET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 25628/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__STAGENET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 24/__STAGENET_CRYPTONOTE_PUBLIC_ADDRESS_BASE58_PREFIX = 10343/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__STAGENET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 25/__STAGENET_CRYPTONOTE_PUBLIC_INTEGRATED_ADDRESS_BASE58_PREFIX = 13671/g' node_modules/@mymonero/mymonero-nettype/index.js
sed -i '' 's/__STAGENET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 36/__STAGENET_CRYPTONOTE_PUBLIC_SUBADDRESS_BASE58_PREFIX = 11368/g' node_modules/@mymonero/mymonero-nettype/index.js
|
<gh_stars>0
import React, { Component } from 'react'
import DashboardConnect from './DashboardConnect'
import { alert } from '../../alerts/Alerts'
import renderContent, { TABS } from './DashboardRenderer'
import LearnScreen from '../learn/LearnScreen'
import showAbout from '../utils/AboutPopup'
class DashboardScreen extends Component {
static KEY = 'DashboardScreen'
state = {
activeTabIndex: TABS.STATS.index
}
render() {
return renderContent({
ministry: this.props.ministry,
component: this,
options: [
{
name: 'Learn more',
action: () => {
this.props.navigation.navigate(LearnScreen.KEY)
}
},
{
name: 'About',
action: showAbout
}
]
})
}
}
export default DashboardConnect.connect(DashboardScreen) |
#!/bin/bash
set -e
rm -f -- *.js avc.*
wget -nc \
https://mbebenita.github.io/Broadway/{Decoder,YUVCanvas,Player,stream}.js \
https://mbebenita.github.io/Broadway/avc.{wasm,wast,temp.asm.js} || true
sed -ri 's`"(avc\.)(wasm|wast|temp\.asm\.js)"`"broadway/\1\2"`g' Decoder.js
|
<gh_stars>1-10
import clsx from 'clsx';
import { ButtonHTMLAttributes } from 'react';
import { Loader } from '../Loader';
import styles from './Button.module.scss';
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
children: React.ReactNode;
loading?: boolean;
btnType?: 'primary' | 'secondary';
}
const Button = ({ children, loading, btnType = 'primary', ...rest }: Props) => {
return (
<button {...rest} className={clsx(styles.default_btn, styles[btnType])}>
{loading ? <Loader /> : children}
</button>
);
};
export default Button;
|
import sys
import requests
from typing import List
def check_subdomains(subdomains: List[str], domain: str) -> None:
for sub in subdomains:
url_to_check = f"http://{sub}.{domain}"
try:
response = requests.get(url_to_check)
if response.status_code == 200:
print(f"{url_to_check} is available")
else:
print(f"{url_to_check} is not available")
except requests.RequestException:
print(f"{url_to_check} is not available")
# Example usage
subdomains = ["www", "blog", "shop"]
domain = "example.com"
check_subdomains(subdomains, domain) |
package redis
import (
"reflect"
"testing"
"github.com/NYTimes/video-transcoding-api/config"
"github.com/NYTimes/video-transcoding-api/db"
"github.com/NYTimes/video-transcoding-api/db/redis/storage"
)
func TestCreateLocalPreset(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
var cfg config.Config
cfg.Redis = new(storage.Config)
repo, err := NewRepository(&cfg)
if err != nil {
t.Fatal(err)
}
preset := db.LocalPreset{
Name: "test",
Preset: db.Preset{
Name: "test",
},
}
err = repo.CreateLocalPreset(&preset)
if err != nil {
t.Fatal(err)
}
client := repo.(*redisRepository).storage.RedisClient()
defer client.Close()
items, err := client.HGetAll("localpreset:" + preset.Name).Result()
if err != nil {
t.Fatal(err)
}
expectedItems := map[string]string{
"preset_name": "test",
"preset_twopass": "false",
}
if !reflect.DeepEqual(items, expectedItems) {
t.Errorf("Wrong preset hash returned from Redis. Want %#v. Got %#v", expectedItems, items)
}
}
func TestCreateLocalPresetDuplicate(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
repo, err := NewRepository(&config.Config{Redis: new(storage.Config)})
if err != nil {
t.Fatal(err)
}
preset := db.LocalPreset{
Name: "test",
Preset: db.Preset{
Name: "test",
},
}
err = repo.CreateLocalPreset(&preset)
if err != nil {
t.Fatal(err)
}
err = repo.CreateLocalPreset(&preset)
if err != db.ErrLocalPresetAlreadyExists {
t.Errorf("Got wrong error. Want %#v. Got %#v", db.ErrLocalPresetAlreadyExists, err)
}
}
func TestUpdateLocalPreset(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
repo, err := NewRepository(&config.Config{Redis: new(storage.Config)})
if err != nil {
t.Fatal(err)
}
preset := db.LocalPreset{
Name: "test",
Preset: db.Preset{
Name: "test",
},
}
err = repo.CreateLocalPreset(&preset)
if err != nil {
t.Fatal(err)
}
preset.Preset.Name = "test-different"
err = repo.UpdateLocalPreset(&preset)
if err != nil {
t.Fatal(err)
}
client := repo.(*redisRepository).storage.RedisClient()
defer client.Close()
items, err := client.HGetAll("localpreset:" + preset.Name).Result()
if err != nil {
t.Fatal(err)
}
expectedItems := map[string]string{
"preset_name": "test-different",
"preset_twopass": "false",
}
if !reflect.DeepEqual(items, expectedItems) {
t.Errorf("Wrong presetmap hash returned from Redis. Want %#v. Got %#v", expectedItems, items)
}
}
func TestUpdateLocalPresetNotFound(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
repo, err := NewRepository(&config.Config{Redis: new(storage.Config)})
if err != nil {
t.Fatal(err)
}
err = repo.UpdateLocalPreset(&db.LocalPreset{
Name: "non-existent",
Preset: db.Preset{
Name: "test",
},
})
if err != db.ErrLocalPresetNotFound {
t.Errorf("Wrong error returned by UpdateLocalPreset. Want ErrLocalPresetNotFound. Got %#v.", err)
}
}
func TestDeleteLocalPreset(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
repo, err := NewRepository(&config.Config{Redis: new(storage.Config)})
if err != nil {
t.Fatal(err)
}
preset := db.LocalPreset{
Name: "test",
Preset: db.Preset{
Name: "test",
},
}
err = repo.CreateLocalPreset(&preset)
if err != nil {
t.Fatal(err)
}
err = repo.DeleteLocalPreset(&db.LocalPreset{Name: preset.Name})
if err != nil {
t.Fatal(err)
}
client := repo.(*redisRepository).storage.RedisClient()
result := client.HGetAll("localpreset:test")
if len(result.Val()) != 0 {
t.Errorf("Unexpected value after delete call: %v", result.Val())
}
}
func TestDeleteLocalPresetNotFound(t *testing.T) {
err := cleanRedis()
if err != nil {
t.Fatal(err)
}
repo, err := NewRepository(&config.Config{Redis: new(storage.Config)})
if err != nil {
t.Fatal(err)
}
err = repo.DeleteLocalPreset(&db.LocalPreset{Name: "non-existent"})
if err != db.ErrLocalPresetNotFound {
t.Errorf("Wrong error returned by DeleteLocalPreset. Want ErrLocalPresetNotFound. Got %#v.", err)
}
}
|
def isArmstrongNumber(x):
sum = 0
order = len(str(x))
temp = x
while temp > 0:
digit = temp % 10
sum += digit ** order
temp //= 10
# If n is equal to the sum
# of its digits raised to the power
# of the number of digits
if x == sum:
return True
else:
return False
# Driver Program
n = 1634
if isArmstrongNumber(n):
print("Yes")
else:
print("No")
# Output:Yes |
<reponame>azavea/summer-of-maps-website
---
# Front matter comment to ensure Jekyll properly reads file.
---
$(function() {
var hpValid = true;
var $formEls = $(".contact-form");
var $submitButton = $(".contact-form-submit");
var formEndpoint = "https://azavea.us1.list-manage.com/subscribe/post?u=61da999c9897859f1c1fff262&id=d3663223c9";
// Delay submitting immediately
// Set Hubspot context value
setTimeout(function() {
$submitButton.each(function() {
$(this).attr("disabled", false);
});
$($formEls).each(function() {
$(this).attr("action", formEndpoint);
});
}, 3500);
// For each form make sure we run client-side validation
$($formEls).each( function() {
var pristine = new Pristine(this);
$(this).on('submit', function (e) {
e.preventDefault();
var hpFields_1 = $(".contact-method-fullname"),
hpFields_2 = $(".contact-method-phone");
if ($(hpFields_1).val() || $(hpFields_2).val()) {
hpValid = false;
console.log(hpValid)
} else {
hpValid = true;
console.log(hpValid)
};
var checkValidity = pristine.validate();
if( checkValidity && hpValid ) { this.submit() };
});
});
}); |
//
// Created by vitdmit on 2/24/19.
//
#ifndef LEXER_RESERVEDWORDSFINITEMACHINE_H
#define LEXER_RESERVEDWORDSFINITEMACHINE_H
#include "FiniteStateMachine.h"
class ReservedWordsFiniteMachine: public FiniteStateMachine {
public:
Token * getToken() override;
State processString(std::string str, int &i, int row) override;
protected:
State handleInput(char symbol) override;
};
#endif //LEXER_RESERVEDWORDSFINITEMACHINE_H
|
#!/usr/bin/env bash
#
# Some nonsensical combinations which can all be detected at PARSE TIME.
# All shells allow these, but right now OSH disallowed.
# TODO: Run the parser on your whole corpus, and then if there are no errors,
# you should make OSH the OK behavior, and others are OK.
### Prefix env on assignment
f() {
# NOTE: local treated like a special builtin!
E=env local v=var
echo $E $v
}
f
# status: 0
# stdout: env var
# OK bash stdout: var
# OK osh status: 2
# OK osh stdout-json: ""
### Redirect on assignment
f() {
# NOTE: local treated like a special builtin!
local E=env > _tmp/r.txt
}
rm -f _tmp/r.txt
f
test -f _tmp/r.txt && echo REDIRECTED
# status: 0
# stdout: REDIRECTED
# OK osh status: 2
# OK osh stdout-json: ""
### Prefix env on control flow
for x in a b c; do
echo $x
E=env break
done
# status: 0
# stdout: a
# OK osh status: 2
# OK osh stdout-json: ""
### Redirect on control flow
rm -f _tmp/r.txt
for x in a b c; do
break > _tmp/r.txt
done
test -f _tmp/r.txt && echo REDIRECTED
# status: 0
# stdout: REDIRECTED
# OK osh status: 2
# OK osh stdout-json: ""
|
package com.view.calender.horizontal.umar.horizontalcalendarview;
public interface HorizontalCalendarListener {
void updateMonthOnScroll(DayDateMonthYearModel selectedDate);
void newDateSelected(DayDateMonthYearModel selectedDate);
}
|
package collector_test
import (
"testing"
"github.com/mylxsw/sync/collector"
"github.com/stretchr/testify/assert"
)
func TestNewProgress(t *testing.T) {
p := collector.NewProgress(1042)
p.SetTotal(100)
assert.True(t, p.Percentage() < 0.1 && p.Percentage() > 0.09)
p.Add(100)
assert.True(t, p.Percentage() < 0.2 && p.Percentage() > 0.19)
assert.EqualValues(t, 200, p.Total())
}
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --gres=gpu:1 # request GPU generic resource
#SBATCH --cpus-per-task=2 #Maximum of CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=8000M # memory per node
#SBATCH --time=0-01:30 # time (DD-HH:MM)
#SBATCH --output=./job_script_output/Camera1_Sep_17_1500_1600_Prescribed_Behavior_4_%N-%j.out # %N for node name, %j for jobID
## Main processing command
## -v: path to the raw video file
## -o: directory to save processed video
python ./process_video_low_frequent_frame.py -v ../ROM_raw_videos_clips/Sep_17/Camera1_Sep_17_1500_1600_Prescribed_Behavior_4.mp4 -o ../ROM_raw_videos_clips_processed_camera2/Sep_17
|
import curses
class Border:
def __init__(self, parent, height, width, y_start, x_start):
self.parent = parent
self.parent.nodelay(True)
self.border = self.parent.derwin(height, width, y_start, x_start)
def set_dimensions(self, height, width):
self.border.resize(height, width)
def set_position(self, y_start, x_start):
self.border.mvwin(y_start, x_start)
# Example usage
def main():
stdscr = curses.initscr()
border = Border(stdscr, 10, 20, 5, 5)
border.set_dimensions(15, 25)
border.set_position(3, 3)
stdscr.refresh()
stdscr.getch()
curses.endwin()
if __name__ == "__main__":
main() |
package com.yoavfranco.wikigame.utils;
import java.io.Serializable;
/**
* Created by yoav on 02/04/17.
*/
public class SearchItem implements Serializable {
String title;
String decription;
public SearchItem(String title, String decription) {
this.title = title;
this.decription = decription;
}
public String getSubject() {
return decription;
}
public void setDecription(String decription) {
this.decription = decription;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
|
module.exports = {
getUserInfo: {
code: 200,
msg: 'success',
data: {
token: 'admin'
}
},
logout: {
code: 200,
msg: 'success',
data: ''
},
login: {
code: 200,
data: {
name: '刘芳',
avatar: 'user02.png',
organizations: [],
roles: ['admin']
}
}
}
|
package net.community.chest.net.proto.text.smtp;
import java.io.IOException;
import java.io.OutputStream;
import net.community.chest.net.dns.DNSAccess;
import net.community.chest.net.proto.text.NetServerWelcomeLine;
import net.community.chest.net.proto.text.TextProtocolNetConnection;
/**
* <P>Copyright 2007 as per GPLv2</P>
*
* <P>Represents the SMTP access protocol</P>
*
* @author <NAME>.
* @since Sep 20, 2007 7:24:28 AM
*/
public interface SMTPAccessor extends TextProtocolNetConnection {
/**
* Resolves MX record and attempts to connect to an MX gateway in order
* of ascending preference (Note: cannot be called if already connected)
* @param nsa The {@link DNSAccess} instance to be used to resolve MX records
* @param dmName domain name to whose MX gateway we want to connect
* @param nPort port number on which to connect
* @param wl string to be used to fill in the welcome response (null == not needed)
* @throws IOException if connection handling error
*/
void mxConnect (DNSAccess nsa, String dmName, int nPort, NetServerWelcomeLine wl) throws IOException;
/**
* Resolves MX record and attempts to connect to an MX gateway in order
* of ascending preference (Note: cannot be called if already connected)
* @param nsa The {@link DNSAccess} instance to be used to resolve MX records
* @param dmName domain name to whose MX gateway we want to connect
* @param nPort port number on which to connect
* @throws IOException if connection handling error
*/
void mxConnect (DNSAccess nsa, String dmName, int nPort) throws IOException;
/**
* Resolves MX record and attempts to connect to an MX gateway in order
* of ascending preference (Note: cannot be called if already connected)
* @param nsa The {@link DNSAccess} instance to be used to resolve MX records
* @param dmName domain name to whose MX gateway we want to connect
* @param wl string to be used to fill in the welcome response (null == not needed)
* @throws IOException if connection handling error
*/
void mxConnect (DNSAccess nsa, String dmName, NetServerWelcomeLine wl) throws IOException;
/**
* Resolves MX record and attempts to connect to an MX gateway in order
* of ascending preference (Note: cannot be called if already connected)
* @param nsa The {@link DNSAccess} instance to be used to resolve MX records
* @param dmName domain name to whose MX gateway we want to connect
* @throws IOException if connection handling error
*/
void mxConnect (DNSAccess nsa, String dmName) throws IOException;
/**
* Sends the HELO command
* @param dmn domain to be sent as parameter - if null/empty then current
* host domain is used
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse helo (String dmn /* may be null/empty */) throws IOException;
/**
* Sends the EHLO command
* @param dmn domain to be sent as parameter - if null/empty then current
* host domain is used
* @param reporter "callback" used to pass along the resulting reported
* capabilities of the EHLO command. If null, then the report is not needed
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse ehlo (String dmn, ESMTPCapabilityHandler reporter) throws IOException;
/**
* Sends the EHLO command (doesn't care about reported capabilities)
* @param dmn domain to be sent as parameter - if null/empty then current host
* domain is used
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse ehlo (String dmn) throws IOException;
/**
* Sends the EHLO command and returns the capabilities
* @param dmn domain to be sent as parameter - if null/empty then current host domain is used
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPExtendedHeloResponse capabilities (String dmn) throws IOException;
/**
* Performs the ESMTP plaintext AUTH LOGIN protocol
* @param username username for authentication
* @param password <PASSWORD>
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse authLogin (String username, String password) throws IOException;
/**
* Performs the ESMTP plaintext AUTH PLAIN protocol
* @param username username for authentication
* @param password <PASSWORD>
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse authPlain (String username, String password) throws IOException;
/**
* Performs the ESMTP AUTH CRAM-MD5 protocol
* @param username username for authentication
* @param password <PASSWORD>
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse authCRAMMD5 (String username, String password) throws IOException;
/**
* Sets the SMTP sender (MAIL FROM:)
* @param sender sender address (without enclosing "<>") - may be null/empty
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse mailFrom (String sender) throws IOException;
/**
* Adds a mail target identity (RCPT TO:)
* @param recip recipient address (without enclosing "<>")
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse rcptTo (String recip) throws IOException;
/**
* Starts the DATA transfer stage
* @return SMTP response code
* @throws IOException if network errors
* @see SMTPProtocol for known response codes
*/
SMTPResponse startData () throws IOException;
/**
* Writes content data to the connection. This method should be called ONLY
* after successfully starting the protocol data stage.
* @param data data buffer to be written
* @param startOffset offset within the buffer to write from (inclusive)
* @param len number of characters to write
* @param flushIt if TRUE then write is flushed to the network after writing the required data
* @return number of written characters (should be same as <I>"len"</I> parameter)
* @throws IOException If failed to write the data
*/
int writeData (char[] data, int startOffset, int len, boolean flushIt) throws IOException;
/**
* Writes content data to the connection. This method should be called ONLY
* after successfully starting the protocol data stage.
* @param data data buffer to be written (entirely)
* @param flushIt if TRUE then write is flushed to the network after writing the required data
* @return number of written characters (should be same as <I>"data.length"</I> value)
* @throws IOException if network errors
*/
int writeData (char[] data, boolean flushIt) throws IOException;
/**
* Writes specified bytes as if they were 8-bit ASCII characters
* @param buf buffer from which to write
* @param startPos index in buffer to start writing
* @param maxLen number of bytes to write
* @param flushIt if TRUE then channel is flushed AFTER writing the data
* @return number of written bytes (should be EXACTLY the same as <I>"maxLen"</I> parameter)
* @throws IOException if network (or other errors)
*/
int writeBytes (final byte[] buf, final int startPos, final int maxLen, final boolean flushIt) throws IOException;
/**
* Writes specified bytes as if they were 8-bit ASCII characters
* @param buf buffer from which to write (may be null/empty)
* @param flushIt if TRUE then channel is flushed AFTER writing the data
* @return number of written bytes (should be EXACTLY the same as <I>"buf.length"</I> parameter)
* @throws IOException if network (or other errors)
*/
int writeBytes (final byte[] buf, final boolean flushIt) throws IOException;
/**
* Multiplier used when waiting for the DATA stage end response from the server
*/
public static final int DATA_END_TIMEOUT_FACTOR=4;
/**
* ".CRLF" used to signal end-of-message in SMTP data stage
*/
public static final char[] EOM_SIGNAL={ '.', '\r', '\n' };
/**
* Ends the data stage by sending an end-of-message indicator ".CRLF".
* Note: for this stage, the wait time for the server response is 4 times
* the default timeout
* @param addCRLF if TRUE then an additional CRLF is sent BEFORE the
* end-of-message indicator
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse endData (boolean addCRLF) throws IOException;
/**
* "Masks" the object as an output stream
* @param autoClose if TRUE then calling the <I>"close"</I> method of the
* output stream closes
* the connection as well - in this case an ORDERLY <I>"endData"</I> and
* <I>"quit"</I> is done.
* @return output stream object
* @throws IOException if errors
*/
OutputStream asOutputStream (final boolean autoClose) throws IOException;
/**
* Resets the connection state (RSET)
* @return SMTP response code
* @throws IOException if network errors
* @see SMTPProtocol for known response codes
*/
SMTPResponse reset () throws IOException;
/**
* Sends the QUIT command - Note: implementors SHOULD also automatically
* close the connection. However, callers should also call the <I>"close"</I>
* method (just in case)
* @return SMTP response code
* @throws IOException if network errors
*/
SMTPResponse quit () throws IOException;
/**
* Does the handshake required to prepare the connection for receiving
* data - i.e., MAIL FROM: followed by RPCT TO:(s). Note: assumes that
* HELO/EHLO stage has been done
* @param sender sender (MAIL FROM:) - may be empty/null
* @param recips recipients - may not be null/empty, and MUST include
* at least <U>one</U> recipient string.
* @return last valid response. Note: should be the response of the DATA
* request. However, if any previous stage (MAIL FROM:, RCPT TO:)
* fails, then its response object is returned. In other words, the caller
* should check that the returned response code is {@link SMTPProtocol#SMTP_E_START_INP}
* @throws IOException if networking errors
*/
SMTPResponse doDataHandshake (String sender, String... recips) throws IOException;
/**
* Connects and does the handhsake up to and including the DATA stage
* @param server server to which to connect the SMTP protocol
* @param port port number to connect to - if <=0 then default is used
* @param sender sender (MAIL FROM:) - may be empty/null
* @param recips recipients - may not be null/empty, and MUST include
* at least <U>one</U> recipient string.
* @return last valid response. Note: should be the response of the DATA
* request. However, if any previous stage (HELO, MAIL FROM:, RCPT TO:)
* fails, then its response object is returned. In other words, the caller
* should check that the returned response code is {@link SMTPProtocol#SMTP_E_START_INP}
* @throws IOException if networking errors
*/
SMTPResponse connectAndDoDataHandshake (String server, int port, String sender, String... recips) throws IOException;
}
|
#!/bin/bash
#SBATCH -J Act_maxtanh_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py maxtanh 1 sgd 2 0.48565649814059864 95 0.01147140842912888 runiform PE-infersent
|
#!/bin/bash -eux
echo "AJA_DIRECTORY=/var/tmp/ntv2sdk" >> $GITHUB_ENV
echo "CPATH=/usr/local/qt/include" >> $GITHUB_ENV
echo "LIBRARY_PATH=/usr/local/qt/lib" >> $GITHUB_ENV
echo "PKG_CONFIG_PATH=/usr/local/qt/lib/pkgconfig" >> $GITHUB_ENV
echo "/usr/local/qt/bin" >> $GITHUB_PATH
# TOREMOVE: needed only for older CUDA found in Ubuntu 16.04 and 18.04
if command -v gcc-5; then
CUDA_HOST_COMPILER=gcc-5
elif command -v gcc-6; then
CUDA_HOST_COMPILER=gcc-6
else
CUDA_HOST_COMPILER=
fi
echo "CUDA_HOST_COMPILER=$CUDA_HOST_COMPILER" >> $GITHUB_ENV
sudo add-apt-repository ppa:devilutionx/dev # SDL 2.0.14 - CESNET/UltraGrid#168
sudo add-apt-repository ppa:savoury1/vlc3 # new x265
sudo sed -n 'p; /^deb /s/^deb /deb-src /p' -i /etc/apt/sources.list # for build-dep ffmpeg
sudo apt update
sudo apt -y upgrade
sudo apt install libcppunit-dev
sudo apt --no-install-recommends install nvidia-cuda-toolkit
sudo apt install libglew-dev freeglut3-dev libgl1-mesa-dev
sudo apt install libx11-dev
sudo apt install libsdl2-dev
sudo apt install libspeexdsp-dev
sudo apt install libssl-dev
sudo apt install libasound-dev libjack-jackd2-dev libnatpmp-dev libv4l-dev portaudio19-dev
# updates nasm 2.13->2.14 in U18.04 (needed for rav1e)
update_nasm() {
if [ -z "$(apt-cache search --names-only '^nasm-mozilla$')" ]; then
return
fi
sudo apt install nasm- nasm-mozilla
sudo ln -s /usr/lib/nasm-mozilla/bin/nasm /usr/bin/nasm
}
# for FFmpeg - libzmq3-dev needs to be ignored (cannot be installed, see run #380)
FFMPEG_BUILD_DEP=`apt-cache showsrc ffmpeg | grep Build-Depends: | sed 's/Build-Depends://' | tr ',' '\n' |cut -f 2 -d\ | grep -v libzmq3-dev`
sudo apt install $FFMPEG_BUILD_DEP libdav1d-dev
sudo apt-get -y remove 'libavcodec*' 'libavutil*' 'libswscale*' libvpx-dev 'libx264*' nginx
update_nasm
sudo apt --no-install-recommends install asciidoc xmlto
sudo apt install libopencv-dev
sudo apt install libglib2.0-dev libcurl4-nss-dev
sudo apt install libtool # gpujpeg
( ./bootstrap_gpujpeg.sh -d && mkdir ext-deps/gpujpeg/build && cd ext-deps/gpujpeg/build && CUDA_FLAGS=-D_FORCE_INLINES CXXFLAGS=-std=c++11 CC=$CUDA_HOST_COMPILER ../autogen.sh && make && sudo make install && sudo ldconfig || exit 1 )
( sudo apt install uuid-dev && git submodule update --init cineform-sdk && cd cineform-sdk/ && cmake -DBUILD_TOOLS=OFF . && make CFHDCodecStatic || exit 1 )
sudo apt install qtbase5-dev
sudo chmod 777 /usr/local
# Install XIMEA
wget --no-verbose https://www.ximea.com/downloads/recent/XIMEA_Linux_SP.tgz
tar xzf XIMEA_Linux_SP.tgz
cd package
sudo ./install
# Install AJA
if [ -n "$SDK_URL" ]; then
if curl -f -S $SDK_URL/ntv2sdklinux.zip -O; then
FEATURES="${FEATURES:+$FEATURES }--enable-aja"
echo "FEATURES=$FEATURES" >> $GITHUB_ENV
unzip ntv2sdklinux.zip -d /var/tmp
mv /var/tmp/ntv2sdk* /var/tmp/ntv2sdk
cd /var/tmp/ntv2sdk/ajalibraries/ajantv2
export CXX='g++ -std=gnu++11'
make -j $(nproc)
fi
fi
# Install NDI
if [ -n "$SDK_URL" -a "$GITHUB_REF" = refs/heads/ndi-build ]; then
curl -f -S $SDK_URL/NDISDK_Linux.tar.gz -O
tar -C /var/tmp -xzf NDISDK_Linux.tar.gz
yes | PAGER=cat /var/tmp/Install*NDI*sh
sudo cp -r NDI\ SDK\ for\ Linux/include/* /usr/local/include
cat NDI\ SDK\ for\ Linux/Version.txt | sed 's/\(.*\)/\#define NDI_VERSION \"\1\"/' | sudo tee /usr/local/include/ndi_version.h
sudo cp -r NDI\ SDK\ for\ Linux/lib/x86_64-linux-gnu/* /usr/local/lib
sudo ldconfig
fi
# Install live555
git clone https://github.com/xanview/live555/
cd live555
git checkout 35c375
./genMakefiles linux-64bit
make -j $(nproc) CPLUSPLUS_COMPILER="c++ -DXLOCALE_NOT_USED"
sudo make install
cd ..
# Install cross-platform deps
$GITHUB_WORKSPACE/.github/scripts/install-common-deps.sh
|
/*******************************************************************************
* Copyright (c) 2016 comtel inc.
*
* Licensed under the Apache License, version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*******************************************************************************/
package org.jfxvnc.app.presentation;
import java.awt.Toolkit;
import java.net.URL;
import java.text.MessageFormat;
import java.util.ResourceBundle;
import javax.inject.Inject;
import org.jfxvnc.app.persist.SessionContext;
import org.jfxvnc.app.presentation.detail.DetailView;
import org.jfxvnc.app.presentation.vnc.VncView;
import org.jfxvnc.net.rfb.render.ProtocolConfiguration;
import org.jfxvnc.ui.service.VncRenderService;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.Timeline;
import javafx.application.Platform;
import javafx.beans.binding.Bindings;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.css.PseudoClass;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.Slider;
import javafx.scene.control.SplitPane;
import javafx.scene.control.ToggleButton;
import javafx.scene.layout.BorderPane;
import javafx.util.Duration;
public class MainViewPresenter implements Initializable {
@Inject
SessionContext ctx;
@Inject
VncRenderService service;
@FXML
BorderPane mainPane;
@FXML
private Label statusLabel;
@FXML
private ProgressIndicator progress;
@FXML
private Button connectBtn;
@FXML
private Button disconnectBtn;
@FXML
private ToggleButton gearBtn;
@FXML
private Slider zoomSlider;
@FXML
private ToggleButton fullScreenBtn;
@FXML
private SplitPane splitPane;
private volatile long lastPing = 0;
private final static PseudoClass CONNECT_CLASS = PseudoClass.getPseudoClass("connect");
private final static PseudoClass ONLINE_CLASS = PseudoClass.getPseudoClass("online");
private final static PseudoClass WINDOW_CLASS = PseudoClass.getPseudoClass("window");
private final StringProperty statusProperty = new SimpleStringProperty("-", "mainview.status");
@Override
public void initialize(URL location, ResourceBundle rb) {
ctx.addBinding(statusProperty);
VncView vncView = new VncView();
DetailView detailView = new DetailView();
splitPane.getItems().addAll(vncView.getView(), detailView.getView());
splitPane.getDividers().get(0).setPosition(1.0);
statusLabel.textProperty().bind(statusProperty);
gearBtn.selectedProperty().addListener(l -> {
SplitPane.Divider divider = splitPane.getDividers().get(0);
KeyValue value = new KeyValue(divider.positionProperty(), gearBtn.isSelected() ? 0.80 : 1.0);
new Timeline(new KeyFrame(Duration.seconds(0.2), value)).play();
});
gearBtn.setSelected(true);
connectBtn.textProperty().bind(Bindings.createStringBinding(
() -> service.listeningModeProperty().get() ? rb.getString("button.listening") : rb.getString("button.connect"), service.listeningModeProperty()));
disconnectBtn.textProperty().bind(Bindings.createStringBinding(
() -> service.listeningModeProperty().get() ? rb.getString("button.cancel") : rb.getString("button.disconnect"), service.listeningModeProperty()));
disconnectBtn.disableProperty().bind(connectBtn.disabledProperty().not());
fullScreenBtn.selectedProperty().bindBidirectional(service.fullSceenProperty());
fullScreenBtn.selectedProperty().addListener((l, o, n) -> fullScreenBtn.pseudoClassStateChanged(WINDOW_CLASS, n));
progress.visibleProperty().bind(service.connectingProperty());
zoomSlider.valueProperty().bindBidirectional(service.zoomLevelProperty());
vncView.getView().setOnScroll(e -> service.zoomLevelProperty().set(service.zoomLevelProperty().get() + (e.getDeltaY() > 0.0 ? 0.01 : -0.01)));
service.zoomLevelProperty()
.addListener((l, o, z) -> statusProperty.set(MessageFormat.format(rb.getString("status.zoom.scale"), Math.floor(z.doubleValue() * 100))));
service.protocolStateProperty().addListener((l, o, event) -> Platform.runLater(() -> {
switch (event) {
case LISTENING:
statusProperty.set(rb.getString("status.listening"));
break;
case CLOSED:
statusProperty.set(rb.getString("status.closed"));
break;
case HANDSHAKE_STARTED:
ProtocolConfiguration config = service.getConfiguration();
statusProperty.set(MessageFormat.format(rb.getString("status.try.connect"), config.hostProperty().get(), config.portProperty().get()));
break;
case HANDSHAKE_COMPLETE:
statusProperty.set(rb.getString("status.open"));
gearBtn.setSelected(false);
break;
case SECURITY_FAILED:
statusProperty.set(rb.getString("status.auth.failed"));
break;
case SECURITY_COMPLETE:
statusProperty.set(rb.getString("status.auth.done"));
break;
default:
break;
}
}));
service.connectedProperty().addListener((l, o, n) -> Platform.runLater(() -> connectBtn.setDisable(n)));
service.connectingProperty().addListener((l, o, n) -> Platform.runLater(() -> gearBtn.pseudoClassStateChanged(CONNECT_CLASS, n)));
service.onlineProperty().addListener((l, o, n) -> Platform.runLater(() -> gearBtn.pseudoClassStateChanged(ONLINE_CLASS, n)));
service.exceptionCaughtProperty().addListener((l, o, n) -> Platform.runLater(() -> {
// Notifications.create().owner(mainPane).position(Pos.TOP_CENTER).text(n.getMessage()).showError();
statusProperty.set(n.getMessage());
}));
service.bellProperty().addListener(l -> bell());
}
@FXML
void connect(ActionEvent event) {
service.connect();
}
@FXML
void disconnect(ActionEvent event) {
service.disconnect();
}
private void bell() {
long time = System.currentTimeMillis();
if (lastPing > time - 2000) {
return;
}
lastPing = time;
Toolkit.getDefaultToolkit().beep();
Platform.runLater(() -> statusProperty.set("Bell"));
}
}
|
<gh_stars>1-10
/*********************************************
* @Author : Daniel_Elendeer
* @Date : 2020-12-30 15:50:09
* @LastEditors : Daniel_Elendeer
* @LastEditTime : 2021-01-01 16:49:21
* @Description :
*********************************************/
#ifndef __ADDRESS_PARSER_HPP__
#define __ADDRESS_PARSER_HPP__
#include <iostream>
#include <string>
#include <vector>
class AddressParser {
using string = std::string;
private:
string m_current_working_directory;
string m_main_directory;
bool isAbsPath(string path);
public:
AddressParser();
~AddressParser();
// Get current working directory.
string getCwd() const;
// Get directory of main exec file.
string getExecDiretory() const;
// Get the path of help.txt.
string getHelpPath() const;
// Parse a relative path to a absolute path.
string parseRelativePath(string path);
// Return true if path is not pointing to a file but a directory.
bool isDir(string path);
// Get sub paths of a directory.
std::vector<string> getSubPaths(string dir_path);
};
#endif
|
# shellcheck shell=bash
# toolbelt.sh
# https://github.com/TobyGiacometti/toolbelt.sh
# Copyright (c) 2021 Toby Giacometti and contributors
# Apache License 2.0
public__register() { :; }
|
<filename>resources/js/components/particulas.js
import React from 'react';
import Particles from 'react-particles-js';
// import Yo from '../../../public/images/yop.jpg';
class Particula extends React.Component {
state = {
loading: true,
error: null,
data: [],
image: [],
};
componentDidMount(){
this.fetchData();
}
fetchData = async () => {
this.setState({loading: true, error: null});
try{
const response = await fetch('http://127.0.0.1:8888/api/introduccion/1');
const data = await response.json();
this.setState({data: data});
}catch(error){
this.setState({error: error});
}
try{
const response = await fetch('http://127.0.0.1:8888/api/image/1');
const image = await response.json();
this.setState({loading: false, image: image});
}catch(error){
this.setState({loading: false, error: error});
}
}
render() {
if(this.state.loading === true){
return 'Loading...'
}
return(
<div className="contenedor">
<div className="particulas">
<Particles
params={{
polygon: {
draw: {
stroke: {
enable: false,
color: "#000000"
}
}
},
interactivity:{
events: {
onhover:{
enable: true,
mode: "repulse"
},
resize: true
},
modes: {
grab: {
distance: 400,
line_linked: {
opacity: 1
}
}
}
},
particles: {
color:{
value: "#000000"
},
shape:{
type: "circle",
stroke:{
color: "#000000"
}
},
number: {
value: 90,
density: {
enable: true,
value_area: 900
}
},
line_linked: {
color: "#000000",
shadow: {
enable: true,
color: "#000000",
blur: 10
}
}
}
}}
/>
</div>
<div className="titulo">
<div className="container">
<div className="row">
<div className="col-md-3 offset-md-2 col-sm-12">
<div className="bajada">
<div className="image-alineacion">
<img className="image-principal" src={this.state.image.inicio} alt="yop"/>
</div>
</div>
</div>
<div className="col-md-6 col-sm-12 text-left p-4">
<div className="subida">
<h3>{this.state.data.saludo}</h3>
<h4>{this.state.data.identificacion}</h4>
<h6>
{this.state.data.introduccion}
</h6>
</div>
</div>
</div>
</div>
</div>
</div>
);
}
}
export default Particula; |
def sort_words(words):
return sorted(words)
words = ['banana', 'apple', 'orange', 'pear', 'watermelon']
sorted_words = sort_words(words)
print(sorted_words) |
use diesel::expression::Expression;
use diesel::query_builder::QueryFragment;
use diesel::sql_types::IntoNullable;
use diesel::query_builder::QueryBuilder;
use diesel::backend::Backend;
struct MaybeNull<T> {
inner: T,
}
impl<T> Expression for MaybeNull<T>
where
T: Expression,
T::SqlType: IntoNullable,
{
type SqlType = <T::SqlType as IntoNullable>::Nullable;
}
impl<T, DB> QueryFragment<DB> for MaybeNull<T>
where
T: QueryFragment<DB>,
DB: Backend,
{
fn walk_ast(&self, mut out: DB::QueryBuilder) -> Result<(), DB::Error> {
out.push_sql("COALESCE(");
self.inner.walk_ast(out.reborrow())?;
out.push_sql(", NULL)");
Ok(())
}
} |
Hashtable table = new Hashtable();
// Add items to the hashtable
table.Add("Key1", 10);
table.Add("Key2", 20);
table.Add("Key3", 30);
table.Add("Key4", 40);
table.Add("Key5", 50); |
input_list = [1, 2, 3, 4, 5, 6, 7, 8]
new_list = [x for x in input_list if x % 2 == 0]
print('New list:', new_list) |
#!/usr/bin/env bash
while [[ $# -gt 1 ]]
do
_key="$1"
case $_key in
-t|--tests)
_tests="$2"
shift
;;
*)
echo "Unknown option: $_key"
exit 1
;;
esac
shift # past argument or value
done
if [ "x$_tests" = "x" ]; then
_tests=`ls tests/test_*.cpp | sed -e 's/tests\/\(.*\)\.cpp/\1/'`
fi
echo $_tests
case "$_tests" in
*unit*)
_dc_run_opts="--no-deps --rm"
;;
*)
_dc_run_opts="--rm"
;;
esac
_dc_opts="-f docker-compose.yml"
exit_code=0
for _t in $_tests; do
_command="docker-compose $_dc_opts run $_dc_run_opts hssp ./$_t"
echo $_command
$_command
e=$?
if [ $e -ne 0 ] ; then
exit_code=$e
fi
done
# Remove all containers and network only if the tests passed. Keep them around
# for debugging the failed tests.
if [ $exit_code -eq 0 ]; then
docker-compose down
fi
exit $exit_code
|
let daySelector = document.querySelector('.days');
let hourSelector = document.querySelector('.hours');
let minuteSelector = document.querySelector('.minutes');
let secondSelector = document.querySelector('.seconds');
addToCountdownResult = () => {
let currentDate = new Date();
let currentTime = currentDate.getTime();
let endDate = new Date(2021, 02, 19, 17, 0, 0).getTime();
let remainingSeconds = endDate - currentTime;
let timeRemaining = ((remainingSeconds / 3600) / 24) / 1000; // time remaining in percentage of days
daysRemaining = Math.floor(timeRemaining);
hoursRemaining = Math.floor(24 * (timeRemaining % 1));
minutesRemaining = Math.floor((24 * timeRemaining % 1) * 60);
secondsRemaining = Math.round(60 * (((24 * (timeRemaining % 1)) * 60) % 1));
if(hoursRemaining < 1) {
hoursRemaining = 0;
}
if(hoursRemaining < 10){
hoursRemaining = '0'+ hoursRemaining;
}
if(minutesRemaining < 10){
minutesRemaining = '0' + minutesRemaining;
}
if(secondsRemaining < 10){
secondsRemaining = '0' + secondsRemaining;
}
daySelector.innerHTML = daysRemaining;
hourSelector.innerHTML = hoursRemaining
minuteSelector.innerHTML = minutesRemaining
secondSelector.innerHTML = secondsRemaining
}
addToCountdownResult();
setInterval(addToCountdownResult, 1000);
|
def bubble_sort(arr):
# Traverse through all array elements
for i in range(len(arr)):
# Last i elements are already in place
for j in range(0, len(arr)-i-1):
# traverse the array from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# Driver code to test above
unsorted_array = [5, 3, 9, 1, 0, 8, 2]
print("Original array:")
print(unsorted_array)
bubble_sort(unsorted_array)
print("Sorted array:")
print(unsorted_array) |
#!/bin/bash
# Copyright © 2018. TIBCO Software Inc.
#
# This file is subject to the license terms contained
# in the license file that is distributed with this file.
# create k8s namespace for a specified org,
# if the optional target env is az, create the storage account secret based on config file in $HOME/.azure/store-secret
# usage: k8s-namespace.sh <cmd> [-p <property file>] [-t <env type>]
# where property file is specified in ../config/org_name.env, e.g.
# k8s-namespace.sh create -p org1 -t az
# use config parameters specified in ../config/org1.env
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; echo "$(pwd)")"
function printK8sNamespace {
echo "
apiVersion: v1
kind: Namespace
metadata:
name: ${ORG}
labels:
use: hyperledger"
}
# create azure-secret yaml
function printAzureSecretYaml {
user=$(echo -n "${STORAGE_ACCT}" | base64 -w 0)
key=$(echo -n "${STORAGE_KEY}" | base64 -w 0)
echo "---
apiVersion: v1
kind: Secret
metadata:
name: azure-secret
namespace: ${ORG}
type: Opaque
data:
azurestorageaccountname: ${user}
azurestorageaccountkey: ${key}"
}
# set k8s default namespace
function setDefaultNamespace {
local curr=$(kubectl config current-context)
local c_namespace=$(kubectl config view -o=jsonpath="{.contexts[?(@.name=='${curr}')].context.namespace}")
if [ "${c_namespace}" != "${ORG}" ]; then
local c_user=$(kubectl config view -o=jsonpath="{.contexts[?(@.name=='${curr}')].context.user}")
local c_cluster=$(kubectl config view -o=jsonpath="{.contexts[?(@.name=='${curr}')].context.cluster}")
if [ ! -z "${c_cluster}" ]; then
echo "set default kube namespace ${ORG} for cluster ${c_cluster} and user ${c_user}"
kubectl config set-context ${ORG} --namespace=${ORG} --cluster=${c_cluster} --user=${c_user}
kubectl config use-context ${ORG}
else
echo "failed to set default context for namespace ${ORG}"
fi
else
echo "namespace ${ORG} is already set as default"
fi
}
function createNamespace {
${sumd} -p ${DATA_ROOT}/namespace/k8s
echo "check if namespace ${ORG} exists"
kubectl get namespace ${ORG}
if [ "$?" -ne 0 ]; then
echo "create k8s namespace ${ORG}"
printK8sNamespace | ${stee} ${DATA_ROOT}/namespace/k8s/namespace.yaml > /dev/null
kubectl create -f ${DATA_ROOT}/namespace/k8s/namespace.yaml
fi
if [ "${ENV_TYPE}" == "az" ]; then
# create secret for Azure File storage
echo "create Azure storage secret"
printAzureSecretYaml | ${stee} ${DATA_ROOT}/namespace/k8s/azure-secret.yaml > /dev/null
kubectl create -f ${DATA_ROOT}/namespace/k8s/azure-secret.yaml
fi
setDefaultNamespace
}
function deleteNamespace {
kubectl delete -f ${DATA_ROOT}/namespace/k8s/namespace.yaml
if [ "${ENV_TYPE}" == "az" ]; then
kubectl delete -f ${DATA_ROOT}/namespace/k8s/azure-secret.yaml
fi
}
# Print the usage message
function printHelp() {
echo "Usage: "
echo " k8s-namespace.sh <cmd> [-p <property file>] [-t <env type>]"
echo " <cmd> - one of 'create', or 'delete'"
echo " - 'create' - create k8s namespace for the organization defined in network spec; for Azure, also create storage secret"
echo " - 'delete' - delete k8s namespace, for Azure, also delete the storage secret"
echo " -p <property file> - the .env file in config folder that defines network properties, e.g., org1 (default)"
echo " -t <env type> - deployment environment type: one of 'k8s' (default), 'aws', 'az', or 'gcp'"
echo " k8s-namespace.sh -h (print this message)"
}
ORG_ENV="org1"
CMD=${1}
if [ "${CMD}" != "-h" ]; then
shift
fi
while getopts "h?p:t:" opt; do
case "$opt" in
h | \?)
printHelp
exit 0
;;
p)
ORG_ENV=$OPTARG
;;
t)
ENV_TYPE=$OPTARG
;;
esac
done
source $(dirname "${SCRIPT_DIR}")/config/setup.sh ${ORG_ENV} ${ENV_TYPE}
if [ "${ENV_TYPE}" == "az" ]; then
# read secret key for Azure storage account
source ${HOME}/.azure/store-secret
if [ -z "${STORAGE_ACCT}" ] || [ -z "${STORAGE_KEY}" ]; then
echo "Error: 'STORAGE_ACCT' and 'STORAGE_KEY' must be set in ${HOME}/.azure/store-secret for Azure"
exit 1
fi
elif [ "${ENV_TYPE}" == "docker" ]; then
echo "No need to create namespace for docker"
exit 0
fi
case "${CMD}" in
create)
echo "create namespace ${ORG} for: ${ORG_ENV} ${ENV_TYPE}"
createNamespace
;;
delete)
echo "delete namespace ${ORG}: ${ORG_ENV} ${ENV_TYPE}"
deleteNamespace
;;
*)
printHelp
exit 1
esac
|
const router = require("express").Router();
const jwt_decode = require("jwt-decode");
const Sets = require("./sets-model");
// ---------------------- /api/sets ---------------------- //
router.post("/", (req, res) => {
let set = req.body;
Sets.add(set)
.then(newSet => {
res.status(201).json({ newSet });
})
.catch(err => {
res.status(500).json(err);
});
});
router.get("/", async (req, res) => {
try {
const sets = await Sets.find();
res.status(200).json({ sets });
} catch (error) {
res.status(500).json(error);
}
});
module.exports = router;
|
<gh_stars>0
/*
Package gonconf provides a simple conf loader support multiple sources.
Read configuration automatically based on the given struct's field name.
Load configuration from multiple sources
multiple file inherit
Values are resolved with the following priorities (lowest to highest):
1. Options struct default value
2. Flags default value
3. Config file value, TOML or JSON file
4. Command line flag
*/
package goconf
|
<filename>Basil/include/Debug/Instrumentor.h
/*
* Declares a class for writing profiling data to a JSON file.
*/
#pragma once
#include <algorithm>
#include <chrono>
#include <fstream>
#include <string>
#include <thread>
namespace Basil
{
struct ProfileResult
{
std::string name;
long long start;
long long end;
std::thread::id threadID;
};
struct InstrumentationSession
{
std::string name;
};
class Instrumentor
{
public:
Instrumentor();
void beginSession(const std::string& name, const std::string& filePath = "results.json");
void endSession();
void writeProfile(const ProfileResult& result);
static Instrumentor& get();
private:
void writeHeader();
void writeFooter();
void internalEndSession();
InstrumentationSession* currentSession;
std::ofstream outputStream;
int profileCount;
std::mutex mutex;
};
class InstrumentationTimer
{
public:
InstrumentationTimer(const char* name);
~InstrumentationTimer();
void stop();
private:
const char* name;
std::chrono::time_point<std::chrono::high_resolution_clock> startTimepoint;
bool stopped;
};
namespace InstrumentorUtils
{
template <size_t N>
struct ChangeResult
{
char data[N];
};
template <size_t N, size_t K>
constexpr auto cleanupOutputString(const char(&expr)[N], const char(&remove)[K])
{
ChangeResult<N> result = {};
size_t srcIndex = 0;
size_t dstIndex = 0;
while (srcIndex < N)
{
size_t matchIndex = 0;
while (matchIndex < K - 1 && srcIndex + matchIndex < N - 1 && expr[srcIndex + matchIndex] == remove[matchIndex])
matchIndex++;
if (matchIndex == K - 1)
srcIndex += matchIndex;
result.data[dstIndex++] = expr[srcIndex] == '"' ? '\'' : expr[srcIndex];
srcIndex++;
}
return result;
}
}
#define PROFILE 0
#if PROFILE
#if defined(__GNUC__) || (defined(__MWERKS__) && (__MWERKS__ >= 0x3000)) || (defined(__ICC) && (__ICC >= 600)) || defined(__ghs__)
#define FUNC_SIG __PRETTY_FUNCTION__
#elif defined(__DMC__) && (__DMC__ >= 0x810)
#define FUNC_SIG __PRETTY_FUNCTION__
#elif (defined(__FUNCSIG__) || (_MSC_VER))
#define FUNC_SIG __FUNCSIG__
#elif (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 600)) || (defined(__IBMCPP__) && (__IBMCPP__ >= 500))
#define FUNC_SIG __FUNCTION__
#elif defined(__BORLANDC__) && (__BORLANDC__ >= 0x550)
#define FUNC_SIG __FUNC__
#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901)
#define FUNC_SIG __func__
#elif defined(__cplusplus) && (__cplusplus >= 201103)
#define FUNC_SIG __func__
#else
#define FUNC_SIG "FUN_SIG unknown!"
#endif
#define PROFILE_BEGIN_SESSION(name, filepath) ::Basil::Instrumentor::get().beginSession(name, filepath)
#define PROFILE_END_SESSION() ::Basil::Instrumentor::get().endSession()
#define PROFILE_SCOPE(name) constexpr auto fixedName = ::Basil::InstrumentorUtils::cleanupOutputString(name, "__cdecl"); ::Basil::InstrumentationTimer timer##__LINE__(fixedName.data)
#define PROFILE_FUNCTION() PROFILE_SCOPE(FUNC_SIG)
#else
#define PROFILE_BEGIN_SESSION(name, filepath)
#define PROFILE_END_SESSION()
#define PROFILE_SCOPE(name)
#define PROFILE_FUNCTION()
#endif
} |
import os
import random
import problem # Assuming problem.DatasetSplit.TRAIN is defined in a separate module
def generate_samples(data_dir, tmp_dir, split):
# Read dataset based on the specified split
dataset = read_dataset(data_dir, split)
# Process the data and generate samples
generated_paths = []
for sample in dataset:
processed_sample = process_sample(sample)
sample_path = save_sample(tmp_dir, processed_sample)
generated_paths.append(sample_path)
return generated_paths
def read_dataset(data_dir, split):
# Implement dataset reading based on the split
# Example: Read data from data_dir for the specified split
if split == problem.DatasetSplit.TRAIN:
# Read training data
pass
elif split == problem.DatasetSplit.TEST:
# Read test data
pass
else:
# Read validation data
pass
def process_sample(sample):
# Implement sample processing logic
# Example: Preprocess, transform, or extract features from the sample
processed_sample = sample # Placeholder for actual processing
return processed_sample
def save_sample(tmp_dir, sample):
# Save the processed sample in the temporary directory
sample_filename = f"sample_{random.randint(1, 1000)}.txt" # Example filename generation
sample_path = os.path.join(tmp_dir, sample_filename)
with open(sample_path, 'w') as file:
file.write(sample) # Assuming sample is a string
return sample_path |
package com.ubergeek42.WeechatAndroid.adapters;
import android.annotation.SuppressLint;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.MainThread;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.view.ViewGroup;
import com.ubergeek42.WeechatAndroid.Weechat;
import com.ubergeek42.WeechatAndroid.fragments.BufferFragment;
import com.ubergeek42.WeechatAndroid.relay.Buffer;
import com.ubergeek42.WeechatAndroid.relay.BufferList;
import com.ubergeek42.WeechatAndroid.service.P;
import com.ubergeek42.cats.Cat;
import com.ubergeek42.cats.CatD;
import com.ubergeek42.cats.Kitty;
import com.ubergeek42.cats.Root;
import java.util.ArrayList;
public class MainPagerAdapter extends PagerAdapter {
final private static @Root Kitty kitty = Kitty.make();
final private ArrayList<String> names = new ArrayList<>();
final private ViewPager pager;
final private FragmentManager manager;
final private Handler handler;
private FragmentTransaction transaction = null;
@MainThread public MainPagerAdapter(FragmentManager manager, ViewPager pager) {
super();
this.manager = manager;
this.pager = pager;
handler = new Handler(Looper.getMainLooper());
}
////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////
@MainThread @CatD public void openBuffer(final String name) {
if (names.contains(name)) return;
Buffer buffer = BufferList.findByFullName(name);
if (buffer != null) buffer.setOpen(true);
names.add(name);
notifyDataSetChanged();
P.setBufferOpen(name, true);
}
@MainThread @CatD public void closeBuffer(String name) {
if (!names.remove(name)) return;
notifyDataSetChanged();
Buffer buffer = BufferList.findByFullName(name);
if (buffer != null) Weechat.runOnMainThread(() -> buffer.setOpen(false)); // make sure isOpen is called after
P.setBufferOpen(name, false);
}
@MainThread public void focusBuffer(String name) {
pager.setCurrentItem(names.indexOf(name));
}
@MainThread public void setBufferInputText(@NonNull final String name, @NonNull final String text) {
BufferFragment bufferFragment = getBufferFragment(names.indexOf(name));
if (bufferFragment == null) {
kitty.warn("Tried to set input text of unknown buffer %s", name);
return;
}
bufferFragment.setText(text);
}
// returns whether a buffer is inside the pager
@MainThread public boolean isBufferOpen(String name) {
return names.contains(name);
}
// returns full name of the buffer that is currently focused or null if there's no buffers
@MainThread public @Nullable String getCurrentBufferFullName() {
int i = pager.getCurrentItem();
return (names.size() > i) ? names.get(i) : null;
}
// returns BufferFragment that is currently focused or null
@MainThread public @Nullable BufferFragment getCurrentBufferFragment() {
return getBufferFragment(pager.getCurrentItem());
}
@MainThread private @Nullable BufferFragment getBufferFragment(int i) {
if (names.size() <= i) return null;
return (BufferFragment) manager.findFragmentByTag(names.get(i));
}
////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////// overrides
////////////////////////////////////////////////////////////////////////////////////////////////
// attach a fragment if it's in the FragmentManager, create and add a new one if it's not
@MainThread @Override @SuppressLint("CommitTransaction") @Cat(linger=true)
public @NonNull Object instantiateItem(@NonNull ViewGroup container, int i) {
if (transaction == null) transaction = manager.beginTransaction();
String tag = names.get(i);
Fragment frag = manager.findFragmentByTag(tag);
if (frag == null) {
kitty.trace("adding");
transaction.add(container.getId(), frag = BufferFragment.newInstance(tag), tag);
} else {
kitty.trace("attaching");
transaction.attach(frag);
}
return frag;
}
// detach fragment if it went off-screen or remove it completely if it's been closed by user
@MainThread @Override @SuppressLint("CommitTransaction") @Cat(linger=true)
public void destroyItem(@NonNull ViewGroup container, int i, @NonNull Object object) {
if (transaction == null) transaction = manager.beginTransaction();
Fragment frag = (Fragment) object;
if (names.contains(frag.getTag())) {
kitty.trace("detaching");
transaction.detach(frag);
} else {
kitty.trace("removing");
transaction.remove(frag);
}
}
////////////////////////////////////////////////////////////////////////////////////////////////
@MainThread @Override public int getCount() {
return names.size();
}
@MainThread @Override public CharSequence getPageTitle(int i) {
String name = names.get(i);
Buffer buffer = BufferList.findByFullName(names.get(i));
return buffer == null ? name : buffer.shortName;
}
@MainThread @Override public boolean isViewFromObject(@NonNull View view, @NonNull Object object) {
return ((Fragment) object).getView() == view;
}
private Fragment oldFrag;
// in the interface object is annotates as @NonNull but it can be nullable
// see https://issuetracker.google.com/issues/69440293
@SuppressWarnings("NullableProblems")
@MainThread @Override public void setPrimaryItem(@NonNull ViewGroup container, int position, @Nullable Object object) {
if (object == oldFrag) return;
Fragment frag = (Fragment) object;
if (oldFrag != null) {
oldFrag.setMenuVisibility(false);
oldFrag.setUserVisibleHint(false);
}
if (frag != null) {
frag.setMenuVisibility(true);
frag.setUserVisibleHint(true);
}
oldFrag = frag;
}
// this should return index for fragments or POSITION_NONE if a fragment has been removed
// providing proper indexes instead of POSITION_NONE allows buffers not to be
// fully recreated on every uiBuffer list change
@MainThread @Override public int getItemPosition(@NonNull Object object) {
int idx = names.indexOf(((Fragment) object).getTag());
return (idx >= 0) ? idx : POSITION_NONE;
}
// this one's empty because instantiateItem and destroyItem create transactions as needed
// this function is called too frequently to create a transaction inside it
@MainThread @Override public void startUpdate(@NonNull ViewGroup container) {}
// commit the transaction and execute it ASAP, but NOT on the current loop
// this way the drawer will wait for the fragment to appear
@MainThread @Override public void finishUpdate(@NonNull ViewGroup container) {
if (transaction == null)
return;
transaction.commitAllowingStateLoss();
transaction = null;
handler.postAtFrontOfQueue(manager::executePendingTransactions);
}
////////////////////////////////////////////////////////////////////////////////////////////////
@MainThread public boolean canRestoreBuffers() {
return P.openBuffers.size() > 0 && names.size() == 0 && BufferList.hasData();
}
@MainThread public void restoreBuffers() {
for (String fullName : P.openBuffers)
openBuffer(fullName);
}
}
|
#!/bin/bash
##################################################################
#
# Install IMPALA CATALOG ROLE onto a MAPR system.
#
# This script is intended to be run in two modes:
# From mapr installer (rpm)
# install.sh /opt/mapr /opt/mapr/impala
#
# The installer has already copied the files in place,
# so we are mainly doing the post-install activation.
#
# From development environment:
# make_tarball.sh; sudo ./install.sh
#
# "make_tarball.sh" leaves its staged files in /var/tmp/impala.
# We copy files from the staging area to /opt/mapr/impala,
# then do the post-install activation.
#
#####################################################################
# Where the new IMPALA files were copied
export MAPR_HOME=${1:-/opt/mapr}
export IMPALA_HOME=${2:-$MAPR_HOME/impala/impala-1.4.1}
. $IMPALA_HOME/mapr/IMPALA_VERSION.sh
main()
{
# must be root to run this script
if ! is_root ; then
echo "You must be root to install impala"
exit 1
fi
# figure out which user we run as
local owner=$(get_mapr_owner)
# create the role file
mkdir -p ${MAPR_HOME}/roles
touch ${MAPR_HOME}/roles/impalacatalog
chown $owner ${MAPR_HOME}/roles/impalacatalog
# create the conf.d directory if not exists
if [ ! -d $MAPR_HOME/conf/conf.d ]; then
mkdir -p $MAPR_HOME/conf/conf.d
fi
# create the warden configuration file
configure_file $IMPALA_HOME/mapr/warden/warden.impalacatalog.conf \
$MAPR_HOME/conf/conf.d/warden.impalacatalog.conf
chown $owner $MAPR_HOME/conf/conf.d/warden.impalacatalog.conf
configure_file $IMPALA_HOME/mapr/warden/mapr-impalacatalog \
$MAPR_HOME/initscripts/mapr-impalacatalog
chown $owner $MAPR_HOME/initscripts/mapr-impalacatalog
chmod a+x $MAPR_HOME/initscripts/mapr-impalacatalog
# create a link so "service" can be used to start/stop us
rm -f /etc/init.d/mapr-impalacatalog
ln -s ${MAPR_HOME}/initscripts/mapr-impalacatalog /etc/init.d
}
################### Should be part of a "common" file ###################
##############################################################
# filter a file, substituting for the following shell variables
# $MAPR_HOME
# $IMPALA_HOME
# $IMPALA_VERSION
##############################################################
configure_file() { # <source file> <new modified file>
sed -e "s^\$MAPR_HOME^$MAPR_HOME^g" \
-e "s^\$IMPALA_HOME^$IMPALA_HOME^g" \
-e "s^\$IMPALA_VERSION^$IMPALA_VERSION^g" < $1 > $2
}
# "true" if user is root
is_root()
{
[ `id -u 2>/dev/null` -eq 0 ]
}
valid_user() # <user name>
{
[ "`id -u $1 2>/dev/null`" != "" ]
}
get_group() # <user name>
{
id -g -n $1 2>/dev/null
}
get_mapr_owner()
{
# get the mapr user out of the daemon.conf file
local conf=$MAPR_HOME/conf/daemon.conf
local owner=$(awk -F = '$1 == "mapr.daemon.user" { print $2 }' $conf)
local group=$(awk -F = '$1 == "mapr.daemon.group" { print $2 }' $conf)
# if not specified or invalid, then try "root" and 'mapr'
([ "$owner" == "" ] || ! valid_user $owner) && owner=root
([ "$owner" == "root" ] && valid_user mapr ) && owner=mapr
[ "$group" == "" ] && group=$(get_group $owner)
[ "$group" == "root" ] && group=mapr
echo $owner:$group
}
main "$@"
|
CUDA_VISIBLE_DEVICDES=1 python ./main.py --self_att --model_name self_att2
CUDA_VISIBLE_DEVICDES=1 python ./main.py --self_att --model_name self_att2 --seed 2
CUDA_VISIBLE_DEVICDES=1 python ./main.py --self_att --model_name self_att2 --seed 3
CUDA_VISIBLE_DEVICDES=1 python ./main.py --self_att --model_name self_att2 --seed 4
CUDA_VISIBLE_DEVICDES=1 python ./main.py --self_att --model_name self_att2 --seed 5
CUDA_VISIBLE_DEVICDES=1 python ./main.py --seed 1
CUDA_VISIBLE_DEVICDES=1 python ./main.py --seed 2
CUDA_VISIBLE_DEVICDES=1 python ./main.py --seed 3
CUDA_VISIBLE_DEVICDES=1 python ./main.py --seed 4
CUDA_VISIBLE_DEVICDES=1 python ./main.py --seed 5
|
#!/bin/sh
##
## Copyright (C) 2002-2008, Marcelo E. Magallon <mmagallo[]debian org>
## Copyright (C) 2002-2008, Milan Ikits <milan ikits[]ieee org>
##
## This program is distributed under the terms and conditions of the GNU
## General Public License Version 2 as published by the Free Software
## Foundation or, at your option, any later version.
set -e
if [ ! -d $1 ] ; then
mkdir $1
fi
cd $1
# wget used to return 0 (success), but more recent versions
# don't so we don't want to bail out in failure mode
# eventhough everything is fine.
set +e
wget \
--mirror \
--no-parent \
--no-host-directories \
--cut-dirs=2 \
--accept=txt,html \
$2
echo 'wget exit code: ' $?
exit 0
|
#!/bin/sh
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache License Version 2.0.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2020 Datadog, Inc.
# Builds Datadogpy layers for lambda functions, using Docker
set -e
# Change to the parent of the directory this script is in
cd $(dirname "$0")/..
docker build -t datadog-go-layer . --build-arg runtime=python:3.7
docker run --rm datadog-go-layer go test -v ./...
|
package types
const (
ModuleName = "ethmultisig-client"
ClientType = "ethmultisig-client"
)
|
// Global ESLint file for basic eslintery.
// Ideally a new root eslint file is created in each project,
// but sometimes I just need some syntax checking.
module.exports = {
root: true,
parser: 'babel-eslint',
parserOptions: {
ecmaVersion: 6,
},
extends: [
'eslint:recommended',
],
env: {
node: true,
es6: true,
},
rules: {
// Blep.
'brace-style': ['warn', 'stroustrup'],
// Because some projects require semis and some don't, I'm just turning this off.
'semi': ['off'],
},
};
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
sed -i '11s/#//' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
sed -i '11a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.iosColorWand = void 0;
var iosColorWand = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "polygon",
"attribs": {
"points": "198.011,159.22 163.968,193.337 420.064,450 454,415.883 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "198.011,159.22 163.968,193.337 420.064,450 454,415.883 \t"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "182",
"y": "62",
"width": "32",
"height": "64"
},
"children": [{
"name": "rect",
"attribs": {
"x": "182",
"y": "62",
"width": "32",
"height": "64"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "182",
"y": "266",
"width": "32",
"height": "64"
},
"children": [{
"name": "rect",
"attribs": {
"x": "182",
"y": "266",
"width": "32",
"height": "64"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "274",
"y": "178",
"width": "64",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "274",
"y": "178",
"width": "64",
"height": "32"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "303.941,112.143 281.314,89.465 236.06,134.82 258.687,157.498 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "303.941,112.143 281.314,89.465 236.06,134.82 258.687,157.498 \t"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "92.06,112.143 137.314,157.498 159.941,134.82 114.687,89.465 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "92.06,112.143 137.314,157.498 159.941,134.82 114.687,89.465 \t"
},
"children": []
}]
}, {
"name": "polygon",
"attribs": {
"points": "92.06,279.141 114.687,301.816 159.941,256.462 137.314,233.784 \t"
},
"children": [{
"name": "polygon",
"attribs": {
"points": "92.06,279.141 114.687,301.816 159.941,256.462 137.314,233.784 \t"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "58",
"y": "178",
"width": "64",
"height": "32"
},
"children": [{
"name": "rect",
"attribs": {
"x": "58",
"y": "178",
"width": "64",
"height": "32"
},
"children": []
}]
}]
}]
};
exports.iosColorWand = iosColorWand; |
# !/bin/bash
default_location="/home/greenlab/Anadroid/demoProjects"
#default_location="./samples"
target_success_apps_dir="$HOME/successAppsFolder"
#for version in $(cat suc.log); do
for version in $(cat $ANADROID_PATH/.ana/logs/success.log); do
folder_version=$(grep "$version" $ANADROID_PATH/.ana/logs/processedApps.log )
target_app_dir=$( dirname $folder_version | xargs dirname | xargs basename )
version_app_dir=$( dirname $folder_version | xargs basename )
echo "$version - $target_app_dir -- $version_app_dir"
target_dir="$target_success_apps_dir/$target_app_dir/$version_app_dir"
echo "creating $target_dir"
if [ -d "$target_dir" ]; then
echo "ja existe"
continue
fi
mkdir -p "$target_dir"
cp -r $folder_version $target_dir
#echo "cp -r $folder_version $target_success_apps_dir"
#exit 0
#version_pack=$(find $default_location -mindepth 3 -maxdepth 4 -name $version -type f )
#echo "folder - $version_pack"
#app_name=$( echo "$version" | cut -f9 -d\/ ) #"sed 's#'"${default_location}"'##g' )
#echo "->$app_name"
#mkdir -p "outDir/$app_name"
#cp $version_pack "outDir/$app_name"
done
|
<filename>L2Tests/test/com/microsoft/alm/L2/tfvc/TfvcServerWorkspaceTests.java<gh_stars>10-100
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.L2.tfvc;
import com.intellij.openapi.vcs.VcsException;
import com.microsoft.alm.plugin.authentication.AuthenticationInfo;
import com.microsoft.alm.plugin.context.ServerContext;
import com.microsoft.alm.plugin.context.ServerContextManager;
import com.microsoft.alm.plugin.external.models.Workspace;
import com.microsoft.alm.plugin.external.utils.CommandUtils;
import com.microsoft.alm.plugin.external.utils.TfvcCheckoutResultUtils;
import com.microsoft.alm.plugin.idea.common.ui.checkout.VsoCheckoutPageModel;
import com.microsoft.alm.plugin.idea.tfvc.core.TfvcWorkspaceLocator;
import com.microsoft.tfs.model.connector.TfsDetailedWorkspaceInfo;
import com.microsoft.tfs.model.connector.TfvcCheckoutResult;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
public class TfvcServerWorkspaceTests extends TfvcCheckoutTestBase {
@Override
protected void setUpCheckoutModel(VsoCheckoutPageModel model, String path) {
super.setUpCheckoutModel(model, path);
model.setTfvcServerCheckout(true);
}
@NotNull
private ServerContext getServerContext() {
return ServerContextManager.getInstance().get(getServerUrl());
}
private void checkoutFile(File file) {
TfvcCheckoutResult checkoutResult = CommandUtils.checkoutFilesForEdit(
getServerContext(),
Collections.singletonList(file.toPath()),
false);
try {
TfvcCheckoutResultUtils.verify(checkoutResult);
} catch (VcsException e) {
throw new RuntimeException(e);
}
}
private void assertIsServerWorkspace(Path workspace) {
TfsDetailedWorkspaceInfo partialWorkspaceInfo = TfvcWorkspaceLocator.getPartialWorkspace(
null,
workspace,
false);
Workspace partialWorkspace = Workspace.fromWorkspaceInfo(partialWorkspaceInfo);
AuthenticationInfo authenticationInfo = getServerContext().getAuthenticationInfo();
Workspace workspaceInfo = CommandUtils.getDetailedWorkspace(
partialWorkspace.getServerDisplayName(),
partialWorkspace.getName(),
authenticationInfo);
assertEquals(Workspace.Location.SERVER, workspaceInfo.getLocation());
}
@Test(timeout = 60000)
public void testServerCheckout() throws InterruptedException, IOException {
checkoutTestRepository(this::assertIsServerWorkspace);
}
}
|
#!/bin/bash
set -e
gsutil cp gs://ai-bucket/dataproc/jars/elasticsearch-hadoop-2.3.0.BUILD-20160303.040409-29.jar /usr/lib/hadoop/lib/
wget https://jdbc.postgresql.org/download/postgresql-9.4.1208.jar -P /usr/lib/hadoop/lib/
wget http://central.maven.org/maven2/org/apache/avro/avro/1.7.7/avro-1.7.7.jar -P /usr/lib/hadoop/lib/
#wget https://dl.bintray.com/spark-packages/maven/spotify/spark-bigquery/0.1.2-s_2.10/spark-bigquery-0.1.2-s_2.10.jar -P /usr/lib/hadoop/lib/
#wget https://dl.bintray.com/spark-packages/maven/databricks/spark-avro/3.0.1-s_2.10/spark-avro-3.0.1-s_2.10.jar -P /usr/lib/hadoop/lib/
|
#!/bin/bash
# this test script:
# 1. deploys an older version of Automate and upgrades it to v2 using the beta CLI,
# skipping v1 policy migration.
# 2. runs inspec tests to verify IAM v2 behavior without legacy policies.
# 3. upgrades Automate to the latest build. This force-upgrades the system to IAM v2.
# 4. runs inspec tests to verify that the system was not disrupted by the force-upgrade
# and no legacy policies were migrated.
#shellcheck disable=SC2034
test_name="iam_force_upgrade_to_v2_with_no_legacy"
test_upgrades=true
test_upgrade_strategy="none"
# a2-iam-no-legacy-integration verifies permissions on an IAM v2 system
# without v1 legacy policies
test_deploy_inspec_profiles=(a2-deploy-smoke)
# a2-deploy-integration verifies that the system is up and all APIs work correctly
# (which now includes only IAM v2 APIs)
# a2-iam-no-legacy-integration verifies permission enforcement on a fresh IAM v2
# system with no v1 legacy policies enforced
test_upgrade_inspec_profiles=(a2-deploy-integration a2-iam-no-legacy-integration)
# Note: we can't run diagnostics AND inspec, so skip diagnostics
test_skip_diagnostics=true
# on this version, we released IAM v2 GA
OLD_VERSION=20200127203438
OLD_MANIFEST_DIR="${A2_ROOT_DIR}/components/automate-deployment/testdata/old_manifests/"
DEEP_UPGRADE_PATH="${OLD_MANIFEST_DIR}/${OLD_VERSION}.json"
do_deploy() {
#shellcheck disable=SC2154
cp "$DEEP_UPGRADE_PATH" "$test_manifest_path"
# we use the CLI for the old version of Automate we want to deploy
local cli_bin="/bin/chef-automate-${OLD_VERSION}"
download_cli "${OLD_VERSION}" "${cli_bin}"
#shellcheck disable=SC2154
"${cli_bin}" deploy "$test_config_path" \
--hartifacts "$test_hartifacts_path" \
--override-origin "$HAB_ORIGIN" \
--manifest-dir "$test_manifest_path" \
--admin-password chefautomate \
--accept-terms-and-mlsa \
--skip-preflight \
--debug
"${cli_bin}" iam upgrade-to-v2 --skip-policy-migration
}
do_prepare_upgrade() {
# use latest current here
prepare_upgrade_milestone "current" "20220329091442"
}
|
SELECT
c.customer_id,
c.first_name,
c.last_name,
c.email,
c.phone_number,
cu.currency_code,
af.address_format,
ma.address_details AS mailing_address,
ba.address_details AS billing_address,
co.country_name
FROM customer c
JOIN currency cu ON c.currency_id = cu.currency_id
JOIN address_format af ON c.address_format_id = af.address_format_id
JOIN address ma ON c.mailing_address_id = ma.address_id
JOIN address ba ON c.billing_address_id = ba.address_id
JOIN country co ON c.country_id = co.country_id; |
#include "FlakyFakeSocketHandler.hpp"
namespace et {
FlakyFakeSocketHandler::FlakyFakeSocketHandler(int _chance)
: FakeSocketHandler(), //
chance(_chance) {}
FlakyFakeSocketHandler::FlakyFakeSocketHandler(
std::shared_ptr<FakeSocketHandler> remoteHandler_, int _chance)
: FakeSocketHandler(remoteHandler_), //
chance(_chance) {}
ssize_t FlakyFakeSocketHandler::read(int i, void* buf, size_t count) {
if (rand() % chance == 1) {
VLOG(1) << "read failed\n";
errno = ECONNRESET;
return -1;
}
return FakeSocketHandler::read(i, buf, count);
}
ssize_t FlakyFakeSocketHandler::write(int i, const void* buf, size_t count) {
if (rand() % chance == 1) {
VLOG(1) << "write failed\n";
errno = EPIPE;
return -1;
}
return FakeSocketHandler::write(i, buf, count);
}
} // namespace et
|
# Lists of groupings
groups = {"3":[], "5":[], "7":[]}
# Group strings into different lengths
for s in strings:
if len(s) == 3:
groups['3'].append(s)
elif len(s) == 5:
groups['5'].append(s)
elif len(s) == 7:
groups['7'].append(s)
# Print groupings
print(groups)
# Output: {'3': ['cat', 'dog'], '5': ['apple', 'banana'], '7': ['elephant']} |
package org.museautomation.ui.ide.navigation;
import javafx.application.*;
import javafx.geometry.*;
import javafx.scene.*;
import javafx.scene.control.*;
import javafx.scene.layout.*;
import javafx.stage.*;
import org.museautomation.ui.ide.*;
import org.museautomation.ui.ide.navigation.resources.*;
import org.museautomation.core.*;
import org.museautomation.core.project.*;
import org.museautomation.core.resource.*;
import org.museautomation.core.resource.storage.*;
import org.museautomation.ui.extend.glyphs.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.atomic.*;
/**
* @author <NAME> (see LICENSE.txt for license details)
*/
public class NavigatorView
{
public NavigatorView(ResourceEditors editors, Stage stage)
{
_editors = editors;
_stage = stage;
activateInitialUI();
}
private void activateInitialUI()
{
VBox rows = new VBox();
rows.setAlignment(Pos.CENTER);
rows.setSpacing(20);
Button open_project = new Button("Open Project...", Glyphs.create("FA:FOLDER_OPEN_ALT"));
rows.getChildren().add(open_project);
open_project.setOnAction(event ->
{
DirectoryChooser chooser = new DirectoryChooser();
chooser.setTitle("Choose project folder");
List<RecentProject> projects = RecentProjectSettings.get().getProjects();
if (projects.size() > 0)
chooser.setInitialDirectory(new File(projects.get(0).getLocation()).getParentFile());
File folder = chooser.showDialog(_stage.getOwner());
if (folder != null)
openProject(folder);
});
RecentProjectsPanel recent = new RecentProjectsPanel(this);
rows.getChildren().add(recent.getNode());
_root.setCenter(rows);
}
void openProject(File folder)
{
for (ProjectOpenListener listener : OPEN_LISTENERS)
listener.projectWillBeOpened(folder);
_project = new SimpleProject(new FolderIntoMemoryResourceStorage(folder), folder.getName());
_project.open();
activateNavigationUI();
RecentProjectSettings.get().addProject(folder.getPath());
}
private void activateNavigationUI()
{
ProjectNavigator navigator = new ProjectNavigator(_project, _editors);
navigator.setProjectCloser(() ->
{
if (_editors.hasUnsavedChanges())
{
final AtomicReference<String> error = new AtomicReference<>();
boolean close = SaveChangesDialog.createShowAndWait(
() -> error.set(_editors.saveAllChanges()),
_editors::revertAllChanges);
if (!close || error.get() != null)
{
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Error");
alert.setHeaderText("Unable to save a resource");
alert.setContentText(error.get());
alert.showAndWait();
return;
}
}
_editors.closeAll();
_project = null;
activateInitialUI();
});
_root.setCenter(navigator.getNode());
detectAndWarnFileExtensionChange();
}
// TODO This can be removed in the future (a few months after Dec 2019?)
// TODO Not before FromJsonFileResourceFactory is changed to stop reading .json files.
private void detectAndWarnFileExtensionChange()
{
int count = 0;
List<ResourceToken<MuseResource>> tokens = _project.getResourceStorage().findResources(ResourceQueryParameters.forAllResources());
for (ResourceToken<MuseResource> token : tokens)
{
Object meta = token.metadata().getMetadataField("filename");
if (meta != null && meta.toString().endsWith(".json"))
count++;
}
if (count > 0)
{
final int info_count = count;
Platform.runLater(() ->
{
Alert alert = new Alert(Alert.AlertType.INFORMATION);
alert.setTitle("Notice");
alert.setHeaderText(info_count + " resources found with .json extension in filename");
alert.setContentText("Muse files will be moving from the .json filename extension to .muse.\nNew items created in MuseIDE will use the new extension.\nSoon, project resources will no longer be read from .json files.\nUse these commands to rename all the files in a folder:\n- Windows: ren *.json *.muse\n- Mac: for f in *.json; do mv $f `basename $f .json`.muse; done;");
alert.show();
});
}
}
public Node getNode()
{
return _root;
}
private Stage _stage;
private final ResourceEditors _editors;
private final BorderPane _root = new BorderPane();
private MuseProject _project = null;
public interface ProjectCloser
{
void close();
}
public interface ProjectOpenListener
{
void projectWillBeOpened(File folder);
}
private final static List<ProjectOpenListener> OPEN_LISTENERS = new ArrayList<>();
@SuppressWarnings("unused") // public API
public static void addProjectOpenListener(ProjectOpenListener listener)
{
OPEN_LISTENERS.add(listener);
}
} |
#!/bin/bash
# Get the scripts path: the path where this file is located.
path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $path/_utils.sh
# Set "key_to_pass" to store the generated key to "pass" instead of to unencrypted file.
key_to_pass=true
if [ $key_to_pass ]; then
cs_info "Storing private key in pass"
key_file=$(mktemp -u --suffix=cs)
checkError
trap "rm -f $key_file" EXIT
else
key_file="nrfutil_private_key.pem"
cs_warn "Storing private key unencrypted."
fi
cs_info "Generate key"
nrfutil keys generate "$key_file"
checkError
if [ $key_to_pass ]; then
cs_info "Overwrite 'dfu-pkg-sign-key' entry in pass with new key? [y/N]"
read store_pass_response
if [[ ! $store_pass_response == "y" ]]; then
cs_warn "abort"
exit 1
fi
cat "$key_file" | pass insert --multiline dfu-pkg-sign-key
checkError
fi
cs_info "Use the code below as dfu_public_key.c"
nrfutil keys display --key pk --format code "$key_file"
cs_info "Use the code above as dfu_public_key.c"
|
#! /usr/bin/env python
"""
Retrieve the contigs for a list of cDBG nodes. Consumes the output of
extract_nodes_by_query to get the list of nodes.
"""
import argparse
import os
import sys
import gzip
import khmer
import collections
import gzip
import screed
from spacegraphcats.utils.logging import log
from . import search_utils
def main():
p = argparse.ArgumentParser()
p.add_argument('catlas_prefix')
p.add_argument('query')
p.add_argument('cdbg_nodefile')
p.add_argument('-o', '--output', type=argparse.FileType('wt'))
p.add_argument('-k', '--ksize', default=31, type=int,
help='k-mer size (default: 31)')
p.add_argument('-v', '--verbose', action='store_true')
args = p.parse_args()
contigs = os.path.join(args.catlas_prefix, 'contigs.fa.gz')
assert args.output, 'must specify -o'
outfp = args.output
outname = args.output.name
print('loading bf...', end=' ')
bf = khmer.Nodetable(args.ksize, 3e8, 2)
bf.consume_seqfile(args.query)
print('done.')
print('loading catlas...', end=' ')
catlas = os.path.join(args.catlas_prefix, 'catlas.csv')
domfile = os.path.join(args.catlas_prefix, 'first_doms.txt')
top_node_id, dag, dag_up, dag_levels, catlas_to_cdbg = search_utils.load_dag(catlas)
layer1_to_cdbg = search_utils.load_layer1_to_cdbg(catlas_to_cdbg, domfile)
print('done.')
print('loading nodefile {}'.format(args.cdbg_nodefile))
cdbg_nodes = set()
with gzip.open(args.cdbg_nodefile, 'r') as fp:
for line in fp:
cdbg_nodes.add(int(line.strip()))
print('loading contigs')
total_bp = 0
total_seqs = 0
n_homogeneous = 0
n_missing = 0
bp_missing = 0
for n, record in enumerate(screed.open(contigs)):
if n % 10000 == 0:
offset_f = total_seqs / len(cdbg_nodes)
print('...at n {} ({:.1f}% of shadow)'.format(total_seqs,
offset_f * 100),
end='\r')
contig_id = int(record.name)
if contig_id not in cdbg_nodes:
continue
counts = bf.get_kmer_counts(record.sequence)
if min(counts) == max(counts):
n_homogeneous += 1
if max(counts) == 0:
n_missing += 1
bp_missing += len(record.sequence)
outfp.write('{}\n'.format(len(record.sequence)))
total_bp += len(record.sequence)
total_seqs += 1
print('')
print('fetched {} contigs, {} bp matching node list.'.format(total_seqs, total_bp))
print('n_homogeneous: {}'.format(n_homogeneous))
print('pure overhead count: {} seqs / {} bp'.format(n_missing, bp_missing))
sys.exit(0)
if __name__ == '__main__':
main()
|
// By KRT girl xiplus
#include <bits/stdc++.h>
#define endl '\n'
using namespace std;
int main(){
// ios::sync_with_stdio(false);
// cin.tie(0);
int n,q;
cin>>n>>q;
vector<int> v;
int t;
while(n--){
cin>>t;
v.push_back(t);
}
sort(v.begin(),v.end());
int a,b;
while(q--){
cin>>a>>b;
int ans=upper_bound(v.begin(),v.end(),b)-lower_bound(v.begin(),v.end(),a);
if(ans)cout<<ans<<endl;
else cout<<"The candies are too short"<<endl;
}
}
|
#!/usr/bin/env python
"""PySide port of the richtext/syntaxhighlighter example from Qt v4.x"""
import sys
import re
from PySide import QtCore, QtGui
import syntaxhighlighter_rc
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
self.highlighter = Highlighter()
self.setupFileMenu()
self.setupEditor()
self.setCentralWidget(self.editor)
self.setWindowTitle(self.tr("Syntax Highlighter"))
def newFile(self):
self.editor.clear()
def openFile(self, path=""):
fileName = path
if fileName=="":
fileName,_ = QtGui.QFileDialog.getOpenFileName(self, self.tr("Open File"), "",
"qmake Files (*.pro *.prf *.pri)")
if fileName!="":
inFile = QtCore.QFile(fileName)
if inFile.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text):
self.editor.setPlainText(unicode(inFile.readAll()))
def setupEditor(self):
variableFormat = QtGui.QTextCharFormat()
variableFormat.setFontWeight(QtGui.QFont.Bold)
variableFormat.setForeground(QtCore.Qt.blue)
self.highlighter.addMapping("\\b[A-Z_]+\\b", variableFormat)
singleLineCommentFormat = QtGui.QTextCharFormat()
singleLineCommentFormat.setBackground(QtGui.QColor("#77ff77"))
self.highlighter.addMapping("#[^\n]*", singleLineCommentFormat)
quotationFormat = QtGui.QTextCharFormat()
quotationFormat.setBackground(QtCore.Qt.cyan)
quotationFormat.setForeground(QtCore.Qt.blue)
self.highlighter.addMapping("\".*\"", quotationFormat)
functionFormat = QtGui.QTextCharFormat()
functionFormat.setFontItalic(True)
functionFormat.setForeground(QtCore.Qt.blue)
self.highlighter.addMapping("\\b[a-z0-9_]+\\(.*\\)", functionFormat)
font = QtGui.QFont()
font.setFamily("Courier")
font.setFixedPitch(True)
font.setPointSize(10)
self.editor = QtGui.QTextEdit()
self.editor.setFont(font)
self.highlighter.addToDocument(self.editor.document())
def setupFileMenu(self):
fileMenu = QtGui.QMenu(self.tr("&File"), self)
self.menuBar().addMenu(fileMenu)
newFileAct = QtGui.QAction(self.tr("&New..."), self)
newFileAct.setShortcut(QtGui.QKeySequence(self.tr("Ctrl+N", "File|New")))
self.connect(newFileAct, QtCore.SIGNAL("triggered()"), self.newFile)
fileMenu.addAction(newFileAct)
openFileAct = QtGui.QAction(self.tr("&Open..."), self)
openFileAct.setShortcut(QtGui.QKeySequence(self.tr("Ctrl+O", "File|Open")))
self.connect(openFileAct, QtCore.SIGNAL("triggered()"), self.openFile)
fileMenu.addAction(openFileAct)
fileMenu.addAction(self.tr("E&xit"), QtGui.qApp, QtCore.SLOT("quit()"),
QtGui.QKeySequence(self.tr("Ctrl+Q", "File|Exit")))
class Highlighter(QtCore.QObject):
def __init__(self, parent=None):
QtCore.QObject.__init__(self, parent)
self.mappings = {}
def addToDocument(self, doc):
self.connect(doc, QtCore.SIGNAL("contentsChange(int, int, int)"), self.highlight)
def addMapping(self, pattern, format):
self.mappings[pattern] = format
def highlight(self, position, removed, added):
doc = self.sender()
block = doc.findBlock(position)
if not block.isValid():
return
if added > removed:
endBlock = doc.findBlock(position + added)
else:
endBlock = block
while block.isValid() and not (endBlock < block):
self.highlightBlock(block)
block = block.next()
def highlightBlock(self, block):
layout = block.layout()
text = block.text()
overrides = []
for pattern in self.mappings:
for m in re.finditer(pattern,text):
range = QtGui.QTextLayout.FormatRange()
s,e = m.span()
range.start = s
range.length = e-s
range.format = self.mappings[pattern]
overrides.append(range)
layout.setAdditionalFormats(overrides)
block.document().markContentsDirty(block.position(), block.length())
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.resize(640, 512)
window.show()
window.openFile(":/examples/example")
sys.exit(app.exec_())
|
<gh_stars>1-10
package info.javaspec;
import java.util.List;
/** A collection of related specs, that is a composite with potential sub-collections. */
public interface SpecCollection {
String description();
List<String> intendedBehaviors();
void runSpecs(RunObserver observer);
List<SpecCollection> subCollections();
}
|
#!/bin/bash
pushd "$2" > /dev/null
$1 --syntax-check ssg-$3-role-*.yml
ret=$?
popd > /dev/null
exit $ret
|
pkg_name=scaffolding-ruby
pkg_origin=core
pkg_version="0.8.11"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=('Apache-2.0')
pkg_description="Habitat Plan Scaffolding for Ruby Applications"
pkg_upstream_url="https://github.com/habitat-sh/core-plans/tree/main/scaffolding-ruby"
pkg_deps=(core/bundler core/ruby core/tar core/busybox-static core/rq core/gcc core/make core/pkg-config)
pkg_build_deps=(core/coreutils core/sed)
pkg_bin_dirs=(bin)
do_prepare() {
GEM_HOME="$(pkg_path_for bundler)"
build_line "Setting GEM_HOME=$GEM_HOME"
GEM_PATH="$GEM_HOME"
build_line "Setting GEM_PATH=$GEM_PATH"
export GEM_HOME GEM_PATH
}
do_build() {
return 0
}
do_install() {
find lib -type f | while read -r f; do
install -D -m 0644 "$f" "$pkg_prefix/$f"
done
find bin libexec -type f | while read -r f; do
install -D -m 0755 "$f" "$pkg_prefix/$f"
done
# Embed the release version and author information of the program.
sed \
-e "s,@author@,$pkg_maintainer,g" \
-e "s,@version@,$pkg_version/$pkg_release,g" \
-i "$pkg_prefix/lib/ruby_scaffolding/cli.rb"
# Wrap the Ruby program so it can be executed from anywhere
wrap_ruby_bin "$pkg_prefix/bin/gemfile-parser"
}
wrap_ruby_bin() {
local bin="$1"
build_line "Adding wrapper $bin to ${bin}.real"
mv -v "$bin" "${bin}.real"
cat <<EOF > "$bin"
#!$(pkg_path_for busybox-static)/bin/sh
set -e
if test -n "\$DEBUG"; then set -x; fi
export GEM_HOME="$GEM_HOME"
export GEM_PATH="$GEM_PATH"
unset RUBYOPT GEMRC
exec $(pkg_path_for ruby)/bin/ruby ${bin}.real \$@
EOF
chmod -v 755 "$bin"
}
|
<reponame>andree182/Omni-Notes<gh_stars>1-10
/*******************************************************************************
* Copyright 2014 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package it.feio.android.omninotes.utils;
public interface Constants {
final String TAG = "Omni Notes";
final String DATABASE_NAME = "omni-notes";
final String PACKAGE = "it.feio.android.omninotes";
final String PREFS_NAME = PACKAGE + "_preferences";
final String APP_STORAGE_DIRECTORY = TAG;
final String APP_STORAGE_DIRECTORY_ATTACHMENTS = "attachments";
final String APP_STORAGE_DIRECTORY_SB_SYNC = "db_sync";
// Splash screen timer
static int SPLASH_TIME_OUT = 1200;
static long SPLASH_MIN_OFFSET = 24 * 60 * 60 * 1000; // 1 day
final String DEV_EMAIL = "<EMAIL>";
// Used for updates retrieval
final String PS_METADATA_FETCHER_URL = "http://www.iosue.it/federico/apps/PSMetadataFetcher/get_app_data.php?url=";
final String PLAY_STORE_URL = "https://play.google.com/store/apps/details?id=";
static long UPDATE_MIN_FREQUENCY = 24 * 60 * 60 * 1000; // 1 day
final String DRIVE_FOLDER_LAST_BUILD = "http://goo.gl/R10Tr5";
// Notes swipe
final int SWIPE_MARGIN = 30;
final int SWIPE_OFFSET = 100;
// Notes content masking
final String MASK_CHAR = "*";
// Used for ACRA
final String ACRA_MAILER_URL = "http://www.iosue.it/federico/apps/acramailer/acra.php?email=";
final int THUMBNAIL_SIZE = 300;
final String DATE_SEPARATOR = "/";
final String TIME_SEPARATOR = ":";
final String DATE_FORMAT_EU = "dd/MM/yyyy HH:mm";
final String DATE_FORMAT_ISO8601 = "YYYY-MM-DD HH:mm:SS.SSS";
final String DATE_FORMAT_SHORT = "d MMM HH:mm";
final String DATE_FORMAT_SHORT_DATE = "d MMM yyyy";
final String DATE_FORMAT_SHORT_TIME = "HH" + TIME_SEPARATOR + "mm";
final String DATE_FORMAT_SORTABLE = "yyyyMMdd_HHmmss_S";
final String DATE_FORMAT_SORTABLE_OLD = "yyyyMMddHHmmss";
final String DATE_FORMAT_EXPORT = "yyyy.MM.dd-HH.mm";
final boolean LOAD_NOTES_SYNC = true;
final String INTENT_KEY = "note_id";
final String INTENT_NOTE = "note";
final String INTENT_MANAGING_SHARE = "managing_share"; // Used when handling received data
final String INTENT_IMAGE = "image";
final String GALLERY_TITLE = "gallery_title";
final String GALLERY_CLICKED_IMAGE = "gallery_clicked_image";
final String GALLERY_IMAGES = "gallery_images";
final int INTENT_ALARM_CODE = 12345;
final String INTENT_TAG = "tag";
final String INTENT_DETAIL_RESULT_CODE = "detail_result_code";
final String INTENT_DETAIL_RESULT_MESSAGE = "detail_result_message";
final String INTENT_GOOGLE_NOW = "com.google.android.gm.action.AUTO_SEND";
final String INTENT_WIDGET = "widget_id";
final String INTENT_UPDATE_DASHCLOCK = "update_dashclock";
// Custom intent actions
final String ACTION_START_APP = "action_start_app";
final String ACTION_RESTART_APP = "action_restart_app";
final String ACTION_DISMISS = "action_dismiss";
final String ACTION_SNOOZE = "action_snooze";
final String ACTION_POSTPONE = "action_postpone";
final String ACTION_SHORTCUT = "action_shortcut";
final String ACTION_WIDGET = "action_widget";
final String ACTION_WIDGET_TAKE_PHOTO = "action_widget_take_photo";
final String ACTION_WIDGET_SHOW_LIST = "action_widget_show_list";
final String ACTION_NOTIFICATION_CLICK = "action_notification_click";
final String ACTION_MERGE = "action_merge";
final String MESSAGE = "message";
final String PREF_FIRST_RUN = "first_run";
final String PREF_LAST_UPDATE_CHECK = "last_update_check";
final String PREF_NAVIGATION = "navigation";
final String PREF_SORTING_COLUMN = "sorting_column";
final String PREF_SORTING_ORDER = "sorting_direction";
final String PREF_PASSWORD = "password";
final String PREF_PASSWORD_QUESTION = "<PASSWORD>";
final String PREF_PASSWORD_ANSWER = "<PASSWORD>";
final String PREF_RATE_DISMISSED = "rate_dismissed";
final String PREF_LAUNCH_COUNT = "launch_count";
final String PREF_FIRST_LAUNCH = "first_launch";
final String PREF_KEEP_CHECKED = "keep_checked";
final String PREF_KEEP_CHECKMARKS = "show_checkmarks";
final String PREF_TOUR_PREFIX = "tour_";
final String PREF_EXPANDED_VIEW = "expanded_view";
final String PREF_COLORS_APP_DEFAULT = "strip";
final String PREF_WIDGET_PREFIX = "widget_";
final String EXPORT_FILE_NAME = TAG;
final String MIME_TYPE_IMAGE = "image/jpeg";
final String MIME_TYPE_AUDIO = "audio/3gp";
final String MIME_TYPE_VIDEO = "video/mp4";
final String MIME_TYPE_SKETCH = "image/png";
final String MIME_TYPE_FILES = "file/*";
final String MIME_TYPE_IMAGE_EXT = ".jpeg";
final String MIME_TYPE_AUDIO_EXT = ".3gp";
final String MIME_TYPE_VIDEO_EXT = ".mp4";
final String MIME_TYPE_SKETCH_EXT = ".png";
final int ERROR_NOTE_NOT_DELETED = -1;
final int ERROR_ATTACHMENTS_NOT_DELETED = -2;
final String SECURITY_ALGORITHM = "MD5";
final String TIMESTAMP_NEVER = "13910051406040"; // I really don't believe we'll reach 10/17/2410 1:10:06 AM
}
|
import mongoose from 'mongoose';
const expect = require('chai').expect;
const request = require('supertest');
const app = require('../app');
const User = require('../models/user');
const users = require('../seed-data/users');
describe('User Controller', () => {
before(done => {
// Connect to MongoDB
mongoose.connect('mongodb://localhost/test', {
useNewUrlParser: true
});
const db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error'));
db.once('open', () => {
console.log('We are connected to test database!');
done();
});
});
// Create
describe('Create user', () => {
it('Creates a new user', done => {
const user = {
name: 'John Doe',
email: 'johndoe@example.com',
age: 18
};
request(app)
.post('/api/users')
.send(user)
.end((err, res) => {
expect(res.statusCode).to.equal(200);
expect(res.body.name).to.equal('John Doe');
done();
});
});
});
// Read
describe('Read users', () => {
it('Reads all users', done => {
request(app)
.get('/api/users')
.end((err, res) => {
expect(res.statusCode).to.equal(200);
expect(res.body).to.deep.equal(users);
done();
});
});
});
// Update
describe('Update user', () => {
it('Updates an existing user', done => {
const user = {
_id: users[0]._id,
name: 'John Doe 2',
email: 'johndoe2@example.com',
age: 22
};
request(app)
.put(`/api/users/${user._id}`)
.send(user)
.end((err, res) => {
expect(res.statusCode).to.equal(200);
expect(res.body).to.deep.equal(user);
done();
});
});
});
// Delete
describe('Delete user', () => {
it('Deletes an existing user', done => {
const user = {
_id: users[0]._id
};
request(app)
.delete(`/api/users/${user._id}`)
.end((err, res) => {
expect(res.statusCode).to.equal(200);
expect(res.body).to.deep.equal(user);
done();
});
});
});
// Close connection
after(() => {
mongoose.connection.close();
});
}); |
def bubble_sort(list):
for length in range(len(list)-1, 0, -1):
for i in range(length):
if list[i] > list[i+1]:
list[i], list[i+1] = list[i+1], list[i]
return list
list = [3, 7, 2, 6, 5]
sorted_list = bubble_sort(list)
print(sorted_list) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.