text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
python2.7 -m unittest discover -s test/
|
sum = 0
# Iterate from 1 to 10
for i in range(1, 11):
# Add number to sum
sum += i
# Print the sum
print(sum) |
#!/bin/bash
# Script intended to be executed from ncmpcpp (execute_on_song_change
# preference) running from urxvt to set album cover as background image
# Copyright (c) 2013 Vyacheslav Levit
# Licensed under The MIT License: http://opensource.org/licenses/MIT
MUSIC_DIR=$HOME/Músicas
DARKEN=50 # 0 - original image colors, 100 - absolutely black background
TMP=/tmp
COVER=/tmp/cover.jpg
IM_ARGS=(-limit memory 32mb -limit map 64mb)
function reset_background
{
# is there any better way?
printf "\e]20;;100x100+1000+1000\a"
}
{
album="$(mpc --format %album% current)"
file="$(mpc --format %file% current)"
album_dir="${file%/*}"
[[ -z "$album_dir" ]] && exit 1
album_dir="$MUSIC_DIR/$album_dir"
covers="$(find "$album_dir" -type d -exec find {} -maxdepth 1 -type f -iregex ".*/.*\(${album}\|cover\|folder\|artwork\|front\).*[.]\(jpe?g\|png\|gif\|bmp\)" \; )"
src="$(echo -n "$covers" | head -n1)"
rm -f "$COVER"
if [[ -n "$src" ]] ; then
#resize the image's width to 300px
convert "$src" -resize 300x "$COVER"
if [[ -f "$COVER" ]] ; then
#scale down the cover to 30% of the original
#place it 1% away from left and 50% away from top.
printf "\e]20;${COVER};30x30+1+50:op=keep-aspect\a"
else
reset_background
fi
else
reset_background
fi
} &
|
from datetime import timedelta, datetime
from airflow.models import DAG, Variable
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.utils.dates import days_ago
from sqlalchemy import create_engine, Table, Column, Integer, String, Boolean
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
import fns.detikcom_scraper.engine as detikcom_scraper
db = create_engine('mysql://root:root@localhost:3306/airflow', pool_recycle=3600)
Base = declarative_base()
class Detikcom(Base):
__tablename__ = 'detikcom_index'
id = Column(Integer, primary_key=True)
title = Column(String(255), nullable=False)
url = Column(String(255), nullable=False)
used = Column(Boolean, default=False)
Base.metadata.create_all(bind=db)
graph_args = {
'owner': 'Airflow',
'depends_on_past': False,
'start_date': days_ago(0),
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
}
graph = DAG(
dag_id='detikcom_index',
description='detik.com index scraper',
default_args=graph_args,
schedule_interval='*/30 * * * *',
tags=['example', 'scraper']
)
def collect_callable(ds, **context):
page = Variable.get('detikcom_index_page')
page = int(page)
if page > 24:
page = 1
print('PAGE: {}'.format(page))
indexes = detikcom_scraper.get_and_parse_index(date=datetime.today().strftime('%m/%d/%Y'), page=page)
Variable.set('detikcom_index_page', page + 1)
return indexes
collect = PythonOperator(
task_id='collect',
provide_context=True,
python_callable=collect_callable,
dag=graph
)
def save_callable(ds, **context):
datas = context['task_instance'].xcom_pull(task_ids='collect')
Session = sessionmaker()
Session.configure(bind=db)
session = Session()
for data in datas:
news = Detikcom(title=data['title'], url=data['url'])
session.add(news)
session.commit()
save = PythonOperator(
task_id='save',
provide_context=True,
python_callable=save_callable,
dag=graph
)
collect >> save
|
<reponame>schoolofacceleratedlearning/huboard-web
module Saas
class UserOnboardingPreview < ActionMailer::Preview
def welcome_email
Saas::UserOnboarding.welcome_email({
email: "<EMAIL>"
})
end
end
end
|
import re
def word_frequency(text_file):
word_freq = {}
with open(text_file, 'r') as file:
text = file.read().lower()
words = re.findall(r'\b\w+\b', text)
for word in words:
if word in word_freq:
word_freq[word] += 1
else:
word_freq[word] = 1
for word, freq in word_freq.items():
print(f"{word}: {freq}")
# Example usage
word_frequency('input.txt') |
TERMUX_PKG_HOMEPAGE=https://www.gnu.org/software/zile/
TERMUX_PKG_DESCRIPTION="Lightweight clone of the Emacs text editor"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=2.4.15
TERMUX_PKG_SRCURL=https://mirrors.kernel.org/gnu/zile/zile-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=39c300a34f78c37ba67793cf74685935a15568e14237a3a66fda8fcf40e3035e
TERMUX_PKG_DEPENDS="libgc, ncurses"
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
ac_cv_header_spawn_h=no
"
termux_step_post_configure() {
# zile uses help2man to build the zile.1 man page, which would require
# a host build. To avoid that just copy a pre-built man page.
cp $TERMUX_PKG_BUILDER_DIR/zile.1 $TERMUX_PKG_BUILDDIR/doc/zile.1
touch -d "next hour" $TERMUX_PKG_BUILDDIR/doc/zile.1*
}
termux_step_create_debscripts() {
cat <<- EOF > ./postinst
#!$TERMUX_PREFIX/bin/sh
if [ "$TERMUX_PACKAGE_FORMAT" = "pacman" ] || [ "\$1" = "configure" ] || [ "\$1" = "abort-upgrade" ]; then
if [ -x "$TERMUX_PREFIX/bin/update-alternatives" ]; then
update-alternatives --install \
$TERMUX_PREFIX/bin/editor editor $TERMUX_PREFIX/bin/zile 35
fi
fi
EOF
cat <<- EOF > ./prerm
#!$TERMUX_PREFIX/bin/sh
if [ "$TERMUX_PACKAGE_FORMAT" = "pacman" ] || [ "\$1" != "upgrade" ]; then
if [ -x "$TERMUX_PREFIX/bin/update-alternatives" ]; then
update-alternatives --remove editor $TERMUX_PREFIX/bin/zile
fi
fi
EOF
}
|
package impl
import (
"testing"
)
func TestForEachRowCallFunc(t *testing.T) {
// type args struct {
// ctx context.Context
// callback interface{}
// }
// tests := []struct {
// name string
// args args
// wantF func(sqldb.RowScanner) error
// wantErr bool
// }{
// // TODO: Add test cases.
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// gotF, err := ForEachRowCallFunc(tt.args.ctx, tt.args.callback)
// if (err != nil) != tt.wantErr {
// t.Errorf("ForEachRowCallFunc() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
// if !reflect.DeepEqual(gotF, tt.wantF) {
// t.Errorf("ForEachRowCallFunc() = %v, want %v", gotF, tt.wantF)
// }
// })
// }
}
|
CMD_DESCRIPTION="Initialize plugin for the first time."
composer_dir="${ATHENA_COMPOSER_DIR:-"$HOME/.composer"}"
if [[ ! -d "$composer_dir" ]]; then
if ! mkdir "$composer_dir"; then
athena.fatal "Failed to created $composer_dir directory"
fi
fi
athena.color.print_info "Mounting $composer_dir at /root/.composer"
athena.docker.add_option -v "$composer_dir:/root/.composer"
|
#!/bin/sh -eux
DOCKER_BUILDKIT=1 docker build --progress=plain --tag rekgrpth/pg_auto_failover . 2>&1 | tee build.log
|
import styled from 'styled-components';
const Container = styled.div`
background: #fff;
height: 80px;
width: 100%;
display: flex;
justify-content: center;
align-items: center;
font-size: 1.2rem;
position: sticky;
top: 0;
z-index: 10;
box-shadow: 0 3px 5px rgba(57, 63, 72, 0.3);
`;
export default Container;
|
var express = require("express");
var mongoose = require("mongoose")
const app = express();
const port = 3000;
mongoose.connect("mongodb+srv://jota_nascimento:<EMAIL>/biblioteca?retryWrites=true&w=majority", {useNewUrlParser: true, useUnifiedTopology:true})
const Livros = mongoose.model("livros",{
nome: String,
categoria: String,
codigo: String,
autor: String,
});
app.set("view engine", "ejs");
app.set("views", __dirname,"/views");
app.use(express.urlencoded());
app.use(express.json());
app.get("/", (rep,res)=>{
res.send("Página Inicial");
});
// rota para renderizar a pagina de formulario do cadastro
app.get("/cadastrarLivros", (req, res) => {
res.render("formlivros")
})
//metodo POST para salvar os produtos no DB
app.post("/cadastrarLivros", (req, res) => {
let livro = new Livros() //criando um objeto do tipo produtos
livro.nome = req.body.nome //recebe os dados atraves da tag name
livro.categoria = req.body.categoria //recebe os dados atraves da tag name
livro.codigo = req.body.codigo //recebe os dados atraves da tag name
livro.autor = req.body.autor //recebe os dados atraves da tag name
livro.save(err => { //condicao para verificar erro
if (err) //condicao
return res.status(500).send("Erro ao cadastrar") // case true status 500
return res.redirect("/livros") // case false redireciona para a pagina produtos
})
})
//criando uma rota para listar os produtos cadastrados
app.get("/livros", (req, res) => { //rota para pag produtos
Livros.find({}, (err, livro) => { //find{pega tudo}
if (err)
return res.status(500).send("Erro ao consultar Produto")
res.render("livros", {
item: livro
}) //renderizar pag produtos
})
})
app.get("/formEditlivro/:id",(req,res)=>{
Livros.findById(req.params.id,(err,livros)=>{
if(err)
return res.status(500).send("erro ao consultar livro")
res.render("formEditlivro",{livros:livros});
});
});
//editar e salvar
app.post("/formEditlivro", (req, res) => {
var id = req.body.id
Livros.findById(id, (err, livros) => {
if (err)
return res.status(500).send("Erro ao consultar Livro")
livros.nome = req.body.nome
livros.codigo = req.body.codigo
livros.categoria = req.body.categoria
livros.autor = req.body.autor
livros.save(err => {
if (err)
return res.status(500).send("Erro ao editar livro")
return res.redirect("/livros")
})
})
})
//DELETAR
app.get("/deletarLivro/:id", (req, res) => {
var chave = req.params.id
Livros.deleteOne({
_id: chave
}, (err, result) => {
if (err)
return res.status(500), send("Erro ao excluir livro")
})
res.redirect("/livros")
})
// criar uma rota para busca que funcione, mudar para string
app.get("/pesquisar", (req,res)=>{
var busca = req.query.pesquisa; // query vem de consulta
Livros.find({$or:[{nome:busca},{categoria:busca},{codigo:busca},{autor:busca}]}, (err, livro) => { //informa de onde vem a busca}
if (err)
return res.status(500).send("Erro ao consultar Produto")
res.render("livros", {item: livro}) //renderizar pag produtos
})
})
app.listen(port, ()=>{
console.log("Servidor rodando na porta "+port)
}) |
const getUrl = require('../utils').getUrl;
const logger = require('../logger').log;
const dbHandler = require('../db/resource.db');
module.exports = (client, message) => {
// This function has been called after a message has been detected in the dev-resources channel
const messageUrls = getUrl(message.content)
const channel = client.channels.get(process.env.RESOURCES_CHANNEL);
if (messageUrls) {
const authorObj = {
id: message.author.id,
username: message.author.username,
discriminator: message.author.discriminator,
avatar: message.author.avatarURL
}
Promise.all(
messageUrls.map(url => {
logger({
author: message.author.username,
type: 'Automatic Link Submission',
url: url,
status: 'URL Processed'
});
return dbHandler.create({ link: url, author: authorObj });
})
)
.then(responses => {
message.delete(1000);
responses.forEach(response => {
channel.send({
embed: {
color: 4647373,
title: response.payload.title,
url: response.payload.url,
description: "Thank you for submitting this resource, I am strong because of you 💪",
thumbnail: {
url: response.payload.image
},
author: {
name: message.author.username,
icon_url: message.author.avatarURL
}
}
}).then(sendEmbed => sendEmbed.react(process.env.SENT_EMOJI));
logger({
author: message.author.username,
type: 'Automatic Link Submission',
url: response.payload.url,
status: 'Success',
avatar: message.author.avatarURL,
message: response.message
});
});
})
.catch(error => {
logger({
author: message.author.username,
type: 'Automatic Link Submission',
url: error.payload.url,
status: 'Error',
message: error.message,
avatar: message.author.avatarURL
});
console.log(error.message)
});
}
else {
message.delete()
.then((msg) => {
msg.author.send(`Please do not send a message that is not a resource to the ${channel.name} channel`);
})
.catch(err => console.log("couldn't delete the message "+ err.msg))
}
// For the inital release version, this will do the job. We should eventually look at removing the original message
// But we will want to monitor its working correctly for a while before removing them.
} |
#!/bin/bash
set -e
set -o pipefail
cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd ..
: ${NEO4J_CONTAINER_ROOT:?"Environment variable NEO4J_CONTAINER_ROOT is unset or empty"}
: ${NEO4J_DATA_DIR:?"Environment variable NEO4J_DATA_DIR is unset or empty"}
: ${NEO4J_VERSION:?"Environment variable NEO4J_VERSION is unset or empty"}
: ${NEO4J_CONTAINER_NAME:?"Environment variable NEO4J_CONTAINER_NAME is unset or empty"}
if [ ! -d ${NEO4J_DATA_DIR} ]; then
echo "Neo4j data directory does not exist"
exit 1
fi
docker run --rm \
--user="$(id -u):$(id -g)" \
--publish=7474:7474 \
--publish=7687:7687 \
--detach \
${NEO4J_ENV_VARS} \
--volume=${NEO4J_DATA_DIR}:/data \
--volume=${NEO4J_CONTAINER_ROOT}/logs:/logs \
--volume=${NEO4J_CONTAINER_ROOT}/import:/var/lib/neo4j/import \
--volume=${NEO4J_CONTAINER_ROOT}/plugins:/plugins \
--env NEO4JLABS_PLUGINS='["apoc", "graph-data-science"]' \
--env NEO4J_AUTH=none \
--name ${NEO4J_CONTAINER_NAME} \
neo4j:${NEO4J_VERSION}
echo "Waiting for the database to start..."
until docker exec --interactive --tty ${NEO4J_CONTAINER_NAME} cypher-shell "RETURN 'Database has started successfully' AS message"; do
sleep 1
done
|
#!/bin/sh
mv src/utils/gzstream/version src/utils/gzstream/version.txt
make install CXX="${CXX}" prefix="${PREFIX}"
|
#!/bin/bash
target=$1
if [[ -z $target ]]; then
target=/usr/local/bin
fi
if [[ ! -d $target ]]; then
echo "Folder $target does not exist"
exit 1
fi
url=`curl -s https://pypi.org/pypi/pickley/json | grep -Eo '"download_url":"([^"]+)"' | cut -d'"' -f4`
echo curl -sLo $target/pickley $url
|
#!/usr/bin/env bash
heroku create
git push heroku master
heroku open
heroku logs --tail |
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package output
import "configcenter/src/framework/core/types"
type customWrapper struct {
name string
runFunc func(data types.MapStr) error
}
// Name the Inputer description.
// This information will be printed when the Inputer is abnormal, which is convenient for debugging.
func (cli *customWrapper) Name() string {
return cli.name
}
// Run the output main loop. This should block until singnalled to stop by invocation of the Stop() method.
func (cli *customWrapper) Put(data types.MapStr) error {
return cli.runFunc(data)
}
// Stop is the invoked to signal that the Run() method should its execution.
// It will be invoked at most once.
func (cli *customWrapper) Stop() error {
// only compatible with the Outputer interface
return nil
}
|
#!/bin/sh
pip3 install --user git+http://github.com/return42/linuxdoc.git
|
<reponame>kuyuri-iroha/owarini_arazu_s1c002
import Renderer from './Renderer';
import Shader from './Shader';
import Texture2D from './Texture2D';
import { mat4, vec3 } from 'gl-matrix';
export default class {
public static createVBO(
data: Float32Array,
usage: number
): WebGLBuffer | null {
const gl = Renderer.gl;
const vbo = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
gl.bufferData(gl.ARRAY_BUFFER, data, usage);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
return vbo;
}
public static createIBO(data: Int16Array): WebGLBuffer | null {
const gl = Renderer.gl;
const ibo = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, ibo);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return ibo;
}
program: WebGLProgram;
uniformLocationCache: Map<string, WebGLUniformLocation | null>;
attribLocationCache: Map<string, number>;
constructor() {
this.uniformLocationCache = new Map<string, WebGLUniformLocation | null>();
this.attribLocationCache = new Map<string, number>();
const tmp = Renderer.gl.createProgram();
if (tmp === null) {
console.log('Faild create program: ' + this);
this.program = new WebGLProgram();
return;
}
this.program = tmp;
}
link(vs: Shader, fs: Shader): void {
vs.attach(this.program);
fs.attach(this.program);
Renderer.gl.linkProgram(this.program);
}
use(): void {
Renderer.gl.useProgram(this.program);
}
getUniformLocaltion(name: string): WebGLUniformLocation | null {
if (this.uniformLocationCache.has(name)) {
return this.uniformLocationCache.get(name) as WebGLUniformLocation | null;
}
const location = Renderer.gl.getUniformLocation(this.program, name);
this.uniformLocationCache.set(name, location);
return location;
}
getAttribLocation(name: string): number {
if (this.attribLocationCache.has(name)) {
return this.attribLocationCache.get(name) as number;
}
const location = Renderer.gl.getAttribLocation(this.program, name);
this.attribLocationCache.set(name, location);
return location;
}
setAttribute(
vbo: WebGLBuffer | null,
name: string,
size: number,
type: number
): void {
const gl = Renderer.gl;
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
const location = this.getAttribLocation(name);
gl.enableVertexAttribArray(location);
gl.vertexAttribPointer(location, size, type, false, 0, 0);
}
setIBO(ibo: WebGLBuffer | null): void {
Renderer.gl.bindBuffer(Renderer.gl.ELEMENT_ARRAY_BUFFER, ibo);
}
send1f(name: string, v1: number): void {
Renderer.gl.uniform1f(this.getUniformLocaltion(name), v1);
}
send2f(name: string, v1: number, v2: number): void {
Renderer.gl.uniform2f(this.getUniformLocaltion(name), v1, v2);
}
send1i(name: string, v1: number): void {
Renderer.gl.uniform1i(this.getUniformLocaltion(name), v1);
}
sendMatrix4f(name: string, v1: mat4): void {
Renderer.gl.uniformMatrix4fv(this.getUniformLocaltion(name), false, v1);
}
sendVector3f(name: string, v1: vec3): void {
Renderer.gl.uniform3fv(this.getUniformLocaltion(name), v1);
}
sendTexture2D(name: string, tex: Texture2D, slot: number): void {
tex.activate(slot);
this.send1i(name, slot);
}
}
|
def find_word_index(words, target_word):
for i, word in enumerate(words):
if word == target_word:
return i
return -1
word_index = find_word_index(words, target_word)
print(word_index) |
<!DOCTYPE html>
<html>
<head>
<title>Sum of Two Numbers</title>
</head>
<body>
<h1>Sum of Two Numbers</h1>
<form>
Number 1: <input type="number" name="num1" id="num1"><br>
Number 2: <input type="number" name="num2" id="num2"><br>
<input type="submit" value="Submit" onclick="sumNumbers()" />
</form>
<div id="result"></div>
<script>
function sumNumbers() {
let num1 = document.getElementById('num1').value;
let num2 = document.getElementById('num2').value;
let sum = parseInt(num1) + parseInt(num2);
document.getElementById('result').innerHTML = "<h2>Sum: " + sum + "</h2>";
}
</script>
</body>
</html> |
import path from "path";
import { AliasOptions } from "vite";
const alias = {
"@": path.resolve(__dirname, "../src"),
} as AliasOptions;
export default alias;
|
SELECT
MAX(id) AS max_id,
MAX(first_name) AS max_first_name,
MAX(last_name) AS max_last_name,
MAX(age) AS max_age
FROM users; |
/* eslint-env mocha,chai */
import { expect } from 'chai'
import { me, logout } from './user'
import { mockStore, mockAxios, history } from './testConfig'
describe('thunk creators', () => {
let store
const initialState = { user: {} }
beforeEach(() => {
mockAxios.reset()
store = mockStore(initialState)
})
describe('me', () => {
it('eventually dispatches the GET USER action', async () => {
const fakeUser = { email: 'Cody' }
mockAxios.onGet('/auth').replyOnce(200, fakeUser)
await store.dispatch(me())
const [getUserAction] = store.getActions()
expect(getUserAction.type).to.be.equal('GET_USER')
expect(getUserAction.user).to.be.deep.equal(fakeUser)
})
})
describe('logout', () => {
it('logout: eventually dispatches the REMOVE_USER action', async () => {
mockAxios.onDelete('/auth').replyOnce(204)
await store.dispatch(logout())
const [removeUserAction] = store.getActions()
expect(removeUserAction.type).to.be.equal('REMOVE_USER')
expect(history.location.pathname).to.be.equal('/')
})
})
})
|
import csv
from pathlib import Path
def get_enc(mode):
# Function to determine the encoding based on the mode
# Implementation not provided as it's not relevant to the problem
def read_for(files, out_writer):
# Function to read data from input files, perform operations, and write summary report
for file in files:
with open(file, 'r', encoding=get_enc('r')) as in_file_obj:
# Perform operations on data from the input file (not specified)
# For example, read data, process it, and prepare summary results
# Write the results to the CSV file using the out_writer
out_writer.writerow(['Summary', 'of', 'data', 'from', file, 'goes', 'here'])
def main():
# List of input file names
input_files = ['input1.csv', 'input2.csv', 'input3.csv']
# Output file for the summary report
out_file = Path('summary_report.csv')
# Open the output file for writing the summary report
with out_file.open('w', encoding=get_enc('w'), newline='') as out_file_obj:
# Get the CSV writer for the output file
out_writer = csv.writer(out_file_obj, dialect="excel")
# Process the input files and generate the summary report
read_for(input_files, out_writer)
if __name__ == '__main__':
main()
print('finish') |
#!/bin/sh
source /opt/Xilinx/SDx/2017.1.rte.4ddr/setup.sh
./stereo_pipeline_test ../../left.png ../../right.png
|
#!/bin/sh
SCRIPT="$0"
echo "# START SCRIPT: $SCRIPT"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/openapi-generator-cli/target/openapi-generator-cli.jar"
if [ ! -f "$executable" ]
then
mvn -B clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="generate -t modules/openapi-generator/src/main/resources/Java/libraries/google-api-client -i modules/openapi-generator/src/test/resources/2_0/petstore-with-fake-endpoints-models-for-testing.yaml -g java -c bin/java-petstore-google-api-client.json -o samples/client/petstore/java/google-api-client -DhideGenerationTimestamp=true $@"
echo "Removing files and folders under samples/client/petstore/java/google-api-client/src/main"
rm -rf samples/client/petstore/java/google-api-client/src/main
find samples/client/petstore/java/google-api-client -maxdepth 1 -type f ! -name "README.md" -exec rm {} +
java $JAVA_OPTS -jar $executable $ags
|
<reponame>ByLiZhang/TrailMix<gh_stars>1-10
import React, {Component} from 'react';
import {Route} from 'react-router-dom';
import {connect} from 'react-redux';
import {getDirections,deleteMapDirection} from '../actions';
import keys from '../assets/config/apiKeys';
import Search from './search';
import Logo from './logo';
import { NavLink } from 'react-router-dom';
import Directions from './directions';
import Weather from './weather';
import Details from './details';
import Loading from './loading';
class PlanTrip extends Component {
constructor(props){
super(props);
this.state = {
loading: true
}
}
componentDidMount(){
if (typeof google !== 'object'){
// Connect the initMap() function within this class to the global window context,
// so Google Maps can invoke it
window.initMap = this.initDirection.bind(this);
// Asynchronously load the Google Maps script, passing in the callback reference
this.loadJS(keys.google);
}else{
this.initDirection();
}
}
componentWillUnmount(){
this.props.deleteMapDirection();
}
componentWillReceiveProps(newProps){
if(this.props.map !== newProps.map && Object.keys(newProps.map).length !== 0){
this.setState({
loading: false
});
}
}
initDirection() {
setTimeout(() => {
if(this.state.loading){
this.props.history.push('/notResponding');
}
}, 8000);
this.props.getDirections(this.props.match.params.lat,
this.props.match.params.long, this.props.map, this.props.match.params.location);
}
loadJS(src) {
let ref = window.document.getElementsByTagName("script")[0];
let script = window.document.createElement("script");
script.src = src;
script.async = true;
ref.parentNode.insertBefore(script, ref);
}
render(){
let loadComponent = '';
if(this.state.loading){
loadComponent = <Loading />
}
return (
<div className="plantrip">
{loadComponent}
<div className="header">
<Logo {...this.props} logoClass="wholeLogoContainerLite" title={'Back to the homepage'}/>
<div className="planTripSearch">
<Search {...this.props} />
</div>
</div>
<div className="mainContent">
<div className="mapContainer">
<div id='mapDirection' className='googleMap'></div>
</div>
<div className="planTripOptions">
<div className="planTripTabs">
<NavLink activeClassName='active selected' className="tabLinks" to={`/planTrip/${this.props.match.params.lat}/lat/${this.props.match.params.long}/long/${this.props.match.params.id}/id/${this.props.match.params.location}/location/details`}>Trail Detail</NavLink>
<NavLink activeClassName='active selected' className="tabLinks" to={`/planTrip/${this.props.match.params.lat}/lat/${this.props.match.params.long}/long/${this.props.match.params.id}/id/${this.props.match.params.location}/location/directions`}>Directions</NavLink>
<NavLink activeClassName='active selected' className="tabLinks" to={`/planTrip/${this.props.match.params.lat}/lat/${this.props.match.params.long}/long/${this.props.match.params.id}/id/${this.props.match.params.location}/location/weather`}>Weather</NavLink>
<NavLink activeClassName='active selected' className="tabLinks" to={`/trailList/${this.props.match.params.location}/location`}>Back To Trails</NavLink>
</div>
<div className="tabContent">
<Route path={`/planTrip/:lat/lat/:long/long/:id/id/:location/location/details`} component={Details} />
<Route path={`/planTrip/:lat/lat/:long/long/:id/id/:location/location/directions`}
render={props => <Directions {...props} location={this.props.match.params.location} traillat={this.props.match.params.lat} traillong={this.props.match.params.long}/> }/>
<Route path={`/planTrip/:lat/lat/:long/long/:id/id/:location/location/weather`} component={Weather} />
</div>
</div>
</div>
</div>
);
}
}
function mapStateToProps(state) {
return {
map: state.map.mapDirections,
initLat: state.map.lat,
initLong: state.map.long
}
}
export default connect(mapStateToProps, {getDirections,deleteMapDirection})(PlanTrip); |
package com.google.developers.group;
import com.google.api.client.util.Key;
import java.util.List;
/**
* Created by renfeng on 7/19/15.
*/
public class DirectoryGroups {
@Key
private List<Chapter> groups;
@Key
private String success;
public List<Chapter> getGroups() {
return groups;
}
public void setGroups(List<Chapter> groups) {
this.groups = groups;
}
public String getSuccess() {
return success;
}
public void setSuccess(String success) {
this.success = success;
}
}
|
#!/bin/sh
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2020 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
export PATH=$PATH:/usr/local/bin:/usr/local/lib:/usr/local/lib/sa
cd $TDK_PATH
sar -r -u 1 1 | awk ' /Average:/ { print $0 }' >> sysStatAvg.log
echo "ITERATION" >> sysStatAvg.log
|
package com.gw.domain.model;
public enum Interval {
LAST_WEEK,
LAST_MONTH,
LAST_QUARTER,
LAST_HALF_YEAR,
LAST_YEAR,
LAST_5_YEARS;
public static Interval fromInteger(int x) {
switch(x) {
case 0:
return LAST_WEEK;
case 1:
return LAST_MONTH;
case 2:
return LAST_QUARTER;
case 3:
return LAST_HALF_YEAR;
case 4:
return LAST_YEAR;
case 5:
return LAST_5_YEARS;
}
return null;
}
}
|
<filename>core/tools/query.js<gh_stars>1-10
/**
* 查询节点信息
* https://uniapp.dcloud.io/api/ui/nodes-info?id=selectorqueryselectall
*/
export default (vm, selector, more) => {
const queryHandle = resolve => {
const method = more ? 'selectAll' : 'select'
const ql = uni
.createSelectorQuery()
.in(vm)
[method](selector)
ql.boundingClientRect(rect => resolve(rect)).exec()
}
return new Promise(queryHandle)
}
|
#!/bin/bash
set -e
cd "$(dirname "$0")"
if [ "$(uname)" == "Darwin" ]; then
source "$DOTS/common/brew.sh"
brew_install mosh
elif [[ "$(lsb_release -i)" == *"Ubuntu"* ]]; then
source "$DOTS/common/apt.sh"
apt_install mosh
fi
|
<filename>tests/mathtest.c<gh_stars>0
/* Library routines for testing math functions with odd values */
/* Copyright (C) Advanced RISC Machines, 1997. All Rights Reserved */
/* SPDX-Licence-Identifier: Apache-2.0 */
/*
* RCS $Revision$
* Checkin $Date$
* Revising $Author$
*/
#include <stdlib.h>
#include <errno.h>
#include <stdio.h>
#include "mathtest.h"
#define SIGN_BIT 0x80000000ul
#define DINFNAN_INT_SE_HI 0x7FF00000ul
#define DINFNAN_INT_LO 0x00000000ul
#define DQNAN_BIT 0x00080000ul
unsigned long dnan_low = 1, dnan_high = 0, fnan = 1;
void dset_to_qnan(double_ints *x)
{
if ((dnan_low == 0ul && dnan_high == 0ul) ||
(dnan_high & (DINFNAN_INT_SE_HI+DQNAN_BIT)) != 0ul ||
(dnan_low & DINFNAN_INT_LO) != 0ul) {
fprintf(stderr, "Test error - dset_to_qnan [%08lx/%08lx]\n",
dnan_high, dnan_low);
exit(1);
}
x->i.se_hi = DINFNAN_INT_SE_HI + DQNAN_BIT + dnan_high;
x->i.lo = DINFNAN_INT_LO + dnan_low;
return;
}
void dset_to_snan(double_ints *x)
{
if ((dnan_low == 0ul && dnan_high == 0ul) ||
(dnan_high & (DINFNAN_INT_SE_HI+DQNAN_BIT)) != 0ul ||
(dnan_low & DINFNAN_INT_LO) != 0ul) {
fprintf(stderr, "Test error - dset_to_snan [%08lx/%08lx]\n",
dnan_high, dnan_low);
exit(1);
}
x->i.se_hi = DINFNAN_INT_SE_HI + dnan_high;
x->i.lo = DINFNAN_INT_LO + dnan_low;
return;
}
unsigned int disnan(double_ints *x)
{
return ( ((x->i.se_hi & DINFNAN_INT_SE_HI) == DINFNAN_INT_SE_HI) &&
( (x->i.se_hi & ~(SIGN_BIT+DINFNAN_INT_SE_HI)) != 0 ||
x->i.lo != 0) );
}
void dset_to_inf(double_ints *x)
{
x->i.se_hi = DINFNAN_INT_SE_HI;
x->i.lo = DINFNAN_INT_LO;
return;
}
unsigned int disinf(double_ints *x)
{
return ( ((x->i.se_hi & DINFNAN_INT_SE_HI) == DINFNAN_INT_SE_HI) &&
(x->i.se_hi & ~(SIGN_BIT+DINFNAN_INT_SE_HI)) == 0 &&
x->i.lo == 0 );
}
void dset_to_one(double_ints *x)
{
x->f = 1.0;
return;
}
void dset_to_mone(double_ints *x)
{
x->f = -1.0;
}
void dsucc(double_ints *a)
{
/* return a number at least 1 bigger than the argument */
double x = a->f;
a->f += 1.0;
if (a->f == x) {
unsigned lo = a->i.lo;
if (a->i.se_hi & SIGN_BIT) {
a->i.lo--;
if (lo < a->i.lo) a->i.se_hi--;
} else {
a->i.lo++;
if (lo > a->i.lo) a->i.se_hi++;
}
}
}
void dpred(double_ints *a)
{
double x = a->f;
a->f -= 1.0;
if (a->f == x) {
unsigned lo = a->i.lo;
if (a->i.se_hi & SIGN_BIT) {
a->i.lo++;
if (lo > a->i.lo) a->i.se_hi++;
} else {
a->i.lo--;
if (lo < a->i.lo) a->i.se_hi--;
}
}
}
void dneg(double_ints *a)
{
a->i.se_hi |= SIGN_BIT;
}
#define FINFNAN_INT_SEM 0x7f800000ul
#define FQNAN_BIT 0x00400000ul
void fset_to_qnan(float_int *x)
{
if (fnan == 0ul ||
(fnan & (FINFNAN_INT_SEM+FQNAN_BIT)) != 0ul) {
fprintf(stderr, "Test error - fset_to_qnan [%08lx]\n", fnan);
exit(1);
}
x->sem = FINFNAN_INT_SEM + FQNAN_BIT + fnan;
return;
}
void fset_to_snan(float_int *x)
{
if (fnan == 0ul ||
(fnan & (FINFNAN_INT_SEM+FQNAN_BIT)) != 0ul) {
fprintf(stderr, "Test error - fset_to_qnan [%08lx]\n", fnan);
exit(1);
}
x->sem = FINFNAN_INT_SEM + fnan;
return;
}
unsigned int fisnan(float_int *x)
{
return ( (x->sem & FINFNAN_INT_SEM) == FINFNAN_INT_SEM &&
(x->sem &~(SIGN_BIT+FINFNAN_INT_SEM)) != 0);
}
void fset_to_inf(float_int *x)
{
x->sem = FINFNAN_INT_SEM;
return;
}
unsigned int fisinf(float_int *x)
{
return ( (x->sem & FINFNAN_INT_SEM) == FINFNAN_INT_SEM &&
(x->sem &~(SIGN_BIT+FINFNAN_INT_SEM)) == 0);
}
void fset_to_one(float_int *x)
{
x->f = 1.0;
return;
}
void fset_to_mone(float_int *x)
{
x->f = -1.0;
}
void fsucc(float_int *a)
{
/* return a number bigger than the argument */
float x = a->f;
a->f += 1.0;
if (a->f == x) {
if (a->sem & SIGN_BIT)
a->sem--;
else
a->sem++;
}
}
void fpred(float_int *a)
{
float x = a->f;
a->f -= 1.0;
if (a->f == x) {
if (a->sem & SIGN_BIT)
a->sem++;
else
a->sem--;
}
}
void fneg(float_int *a)
{
a->sem |= SIGN_BIT;
}
/* return TRUE if there's been an IVO, and clear the flag */
unsigned int ivo(void)
{
unsigned long flags = __fp_status(__fpsr_IOC, 0);
return (flags & __fpsr_IOC) != 0;
}
int get_errno(void)
{
int e = errno;
errno = 0;
return e;
}
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
|
#!/bin/sh
BD="$(dirname $0)"
S="$BD/scenarios/207-cert-not-found.xml"
T=$1
if [ -z "$T" ]; then
echo "Usage: $0 <target-uri>"
exit 1
fi
SIPP="$BD/helpers/sipp"
RUNNER="$BD/helpers/run-invite.sh"
"$RUNNER" "$SIPP" "$S" "$T"
|
<reponame>PloadyFree/bacs-learn-current
package istu.bacs.web.user;
import istu.bacs.db.user.UserPersonalDetails;
public interface UserPersonalInfoService {
UserPersonalDetails findByUsername(String username);
int save(UserPersonalDetails userPersonalDetails);
} |
#!/bin/bash
for NAMES in $(cat names.txt); do
echo " The Names are : $NAMES "
done
|
#include <iostream>
template <typename T>
class SmartPointer {
private:
T* m_impl;
public:
SmartPointer(T* ptr) : m_impl(ptr) {}
~SmartPointer() {
delete m_impl;
}
T* getPtr() {
return m_impl ? m_impl : nullptr;
}
};
int main() {
// Example usage
int* ptr = new int(42);
SmartPointer<int> smartPtr(ptr);
int* underlyingPtr = smartPtr.getPtr();
if (underlyingPtr) {
std::cout << "Value: " << *underlyingPtr << std::endl;
} else {
std::cout << "Null pointer" << std::endl;
}
return 0;
} |
#-----------------------------------------------------
# This is a shell script to run dataframe_creation_from_pandas.py
#-----------------------------------------------------
# @author Mahmoud Parsian
#-----------------------------------------------------
export SPARK_HOME="/pyspark_book/spark-2.4.3"
export SPARK_PROG="/pyspark_book/code/chap07/dataframe_creation_from_pandas.py"
#
# run the PySpark program:
$SPARK_HOME/bin/spark-submit $SPARK_PROG
|
query {
authors {
name
age
books {
title
}
}
filter(where: {books_some: {title_not: null}})
} |
package com.bustiblelemons.cthulhator.system.brp.statistics;
import com.bustiblelemons.cthulhator.system.edition.GameEdition;
import com.bustiblelemons.cthulhator.system.properties.ActionGroup;
import com.bustiblelemons.cthulhator.system.properties.CharacterProperty;
import com.bustiblelemons.cthulhator.system.properties.ModifierType;
import com.bustiblelemons.cthulhator.system.properties.PropertyFormat;
import com.bustiblelemons.cthulhator.system.properties.PropertyType;
import com.bustiblelemons.cthulhator.system.properties.Relation;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Created by bhm on 20.07.14.
*/
public class HitPoints {
private final static Collection<Relation> sRelations = new ArrayList<Relation>();
private static final Relation sConStrRelation = new Relation()
.withModifier(0)
.withRelation(BRPStatistic.CON.name())
.withRelation(BRPStatistic.STR.name())
.withModifierType(ModifierType.AVERAGE);
static {
sRelations.add(sConStrRelation);
}
private int max = 0;
private int current = 0;
private int min = -2;
public static HitPoints forProperties(GameEdition edition, float con, float siz) {
HitPoints hitPoints = new HitPoints();
int value = getValue(con, siz, edition);
hitPoints.setMax(value);
hitPoints.setCurrent(value);
hitPoints.setMin(-2);
return hitPoints;
}
private static int getValue(float con, float siz, GameEdition edition) {
float sum;
int value;
switch (edition) {
default:
case CoC5:
case CoC6:
sum = con + siz;
value = Math.round(sum / 2);
}
return value;
}
public int getMax() {
return max;
}
public void setMax(int max) {
this.max = max;
}
public int getCurrent() {
return current;
}
public void setCurrent(int current) {
this.current = current;
}
public CharacterProperty asCharacterProperty() {
CharacterProperty r = new CharacterProperty();
r.setType(PropertyType.HIT_POINTS);
r.setMinValue(getMin());
r.setMaxValue(getMax());
r.setValue(getMax());
r.setRelations(sRelations);
r.setName(HitPoints.class.getSimpleName());
List<ActionGroup> g = new ArrayList<ActionGroup>();
g.add(ActionGroup.COMBAT);
r.setActionGroup(g);
r.setFormat(PropertyFormat.NUMBER);
return r;
}
public int getMin() {
return min;
}
public void setMin(int min) {
this.min = min;
}
} |
#!/bin/bash
# build and pack a rust lambda library
# https://aws.amazon.com/blogs/opensource/rust-runtime-for-aws-lambda/
set -eo pipefail
mkdir -p target/lambda
export PROFILE=${PROFILE:-release}
# cargo uses different names for target
# of its build profiles
if [[ "${PROFILE}" == "release" ]]; then
TARGET_PROFILE="${PROFILE}"
else
TARGET_PROFILE="debug"
fi
export CARGO_TARGET_DIR=$PWD/target/lambda
(
if [[ $# -gt 0 ]]; then
yum install -y "$@"
fi
# source cargo
. $HOME/.cargo/env
# cargo only supports --release flag for release
# profiles. dev is implicit
if [ "${PROFILE}" == "release" ]; then
cargo build ${CARGO_FLAGS:-} --${PROFILE}
else
cargo build ${CARGO_FLAGS:-}
fi
) 1>&2
function package() {
file="$1"
strip "$file"
rm "$file.zip" > 2&>/dev/null || true
# note: would use printf "@ $(basename $file)\n@=bootstrap" | zipnote -w "$file.zip"
# if not for https://bugs.launchpad.net/ubuntu/+source/zip/+bug/519611
if [ "$file" != ./bootstrap ] && [ "$file" != bootstrap ]; then
mv "${file}" bootstrap
fi
zip "$file.zip" bootstrap
rm bootstrap
}
cd "${CARGO_TARGET_DIR}/${TARGET_PROFILE}"
(
. $HOME/.cargo/env
if [ -z "$BIN" ]; then
IFS=$'\n'
for executable in $(cargo read-manifest | jq -r '.targets[] | select(.kind[] | contains("bin")) | .name'); do
package "$executable"
done
else
package "$BIN"
fi
) 1>&2 |
# Create a Hive table from RDS
sqoop import --connect "jdbc:mysql://ifds-rds.cmi2faravii8.sa-east-1.rds.amazonaws.com:3306/ifds" \
--username hguimaraes -P --table hist_data --hive-import --create-hive-table --hive-table HIST_DATA \
--delete-target-dir --target-dir /user/hadoop/HIST_DATA --hive-overwrite -m 4
# Move stream data to HDFS
hadoop distcp s3n://ita-bd-fds/data/data/pays_test.csv /user/ita-fds/
# Install Git on EMR
sudo yum install git
git clone https://github.com/Hguimaraes/bd-fraud
# Install dependencies
sudo pip-3.4 install cherrypy flask paste
# Change pyspark_python variable
sudo vim /usr/lib/spark/conf/spark-env.sh
# INSERT: export PYSPARK_PYTHON=python3
# Go to src directory
cd bd-fraud
cd src
# Initialize spark
spark-submit --total-executor-cores 6 --executor-memory 4g server.py |
<reponame>nedphae/contact-center-client
import React, { useRef, useState } from 'react';
import _ from 'lodash';
import { gql, useMutation, useQuery } from '@apollo/client';
import {
DataGrid,
GridColDef,
GridRowId,
GridValueGetterParams,
} from '@material-ui/data-grid';
import {
QUERY_GROUP,
QUERY_STAFF,
StaffGroupList,
StaffList,
} from 'app/domain/graphql/Staff';
import GRID_DEFAULT_LOCALE_TEXT from 'app/variables/gridLocaleText';
import { CustomerGridToolbarCreater } from 'app/components/Table/CustomerGridToolbar';
import DraggableDialog, {
DraggableDialogRef,
} from 'app/components/DraggableDialog/DraggableDialog';
import StaffForm from 'app/components/StaffForm/StaffForm';
import Staff from 'app/domain/StaffInfo';
import useAlert from 'app/hook/alert/useAlert';
type Graphql = StaffList;
const columns: GridColDef[] = [
{ field: 'id', headerName: 'ID', width: 90 },
{ field: 'username', headerName: '用户名', width: 150 },
{ field: 'nickName', headerName: '昵称', width: 150 },
{ field: 'realName', headerName: '实名', width: 150 },
{ field: 'role', headerName: '角色', width: 150 },
{
field: 'staffType',
headerName: '客服类型',
width: 150,
valueGetter: (params: GridValueGetterParams) => {
let result = '机器人';
if (params.value === 1) {
result = '人工';
}
return result;
},
},
{ field: 'groupName', headerName: '组名', width: 150 },
{
field: 'gender',
headerName: '性别',
width: 150,
valueGetter: (params: GridValueGetterParams) => {
let result = '其他';
switch (params.value) {
case 0: {
result = '男';
break;
}
case 1: {
result = '女';
break;
}
default: {
break;
}
}
return result;
},
},
{ field: 'mobilePhone', headerName: '手机', width: 150 },
{
field: 'simultaneousService',
headerName: '同时服务数',
type: 'number',
width: 150,
},
{
field: 'maxTicketPerDay',
headerName: '每日上限(工单)',
type: 'number',
width: 150,
},
{
field: 'maxTicketAllTime',
headerName: '总上限(工单)',
type: 'number',
width: 150,
},
{ field: 'personalizedSignature', headerName: '个性签名', width: 150 },
{ field: 'enabled', headerName: '是否启用', type: 'boolean', width: 150 },
];
const defaultStaff = { staffType: 1 } as Staff;
const MUTATION_STAFF = gql`
mutation DeleteStaff($ids: [Long!]!) {
deleteStaffByIds(ids: $ids)
}
`;
export default function AccountList() {
const { loading, data, refetch } = useQuery<Graphql>(QUERY_STAFF);
const { data: groupList } = useQuery<StaffGroupList>(QUERY_GROUP);
const refOfDialog = useRef<DraggableDialogRef>(null);
const [staff, setStaff] = useState<Staff>(defaultStaff);
const [selectionModel, setSelectionModel] = useState<GridRowId[]>([]);
const { onLoadding, onCompleted, onError } = useAlert();
const [deleteStaffByIds, { loading: updateLoading }] = useMutation<unknown>(
MUTATION_STAFF,
{
onCompleted,
onError,
}
);
if (updateLoading) {
onLoadding(updateLoading);
}
const groupMap = _.groupBy(groupList?.allStaffGroup ?? [], (it) => it.id);
const rows = [...(data?.allStaff ?? [])].map((it) => {
const itGroup = groupMap[it.groupId];
if (itGroup && itGroup.length > 0) {
return _.assign(
{
groupName: itGroup[0]?.groupName,
},
it
);
}
return it;
});
function newButtonClick() {
setStaff(defaultStaff);
refOfDialog.current?.setOpen(true);
}
const handleClickOpen = (selectStaff: Staff) => {
setStaff(selectStaff);
refOfDialog.current?.setOpen(true);
};
function deleteButtonClick() {
if (selectionModel && selectionModel.length > 0) {
deleteStaffByIds({ variables: { ids: selectionModel } });
}
}
return (
<>
<DraggableDialog title="客服信息" ref={refOfDialog}>
<StaffForm defaultValues={staff} />
</DraggableDialog>
<DataGrid
localeText={GRID_DEFAULT_LOCALE_TEXT}
rows={rows}
columns={columns}
components={{
// TODO: 自定义分组
Toolbar: CustomerGridToolbarCreater({
newButtonClick,
deleteButtonClick,
refetch: () => {
refetch();
},
}),
}}
onRowClick={(param) => {
handleClickOpen(param.row as Staff);
}}
pagination
rowsPerPageOptions={[10, 20, 50, 100]}
loading={loading}
disableSelectionOnClick
checkboxSelection
onSelectionModelChange={(selectionId: GridRowId[]) => {
setSelectionModel(selectionId);
}}
selectionModel={selectionModel}
/>
</>
);
}
|
#!/bin/bash
# variables
version=1.11.1
# install Go
wget "https://dl.google.com/go/go$version.linux-amd64.tar.gz"
tar -xvf "go$version.linux-amd64.tar.gz"
sudo mv go /usr/local
# set Go path
GOROOT=/usr/local/go
GOPATH=$HOME/go
# add Go path to profile
echo "export PATH=$GOPATH/bin:$GOROOT/bin:$PATH" >> ~/.profile
|
<reponame>lexfaraday/hamburgo<filename>SampleBackend/src/main/java/test/backend/www/model/sabre/SabreOriginDestinationLocationsResponse.java
/*
* Sabre Inc. All rights reserved.
*
* THE SOFTWARE, SAMPLE CODES AND ANY COMPILED PROGRAMS CREATED USING THE
* SOFTWARE ARE FURNISHED "AS IS" WITHOUT WARRANTY OF ANY KIND, INCLUDING BUT
* NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE. NO ORAL OR WRITTEN INFORMATION OR ADVICE GIVEN BY SABRE,
* ITS AGENTS OR EMPLOYEES SHALL CREATE A WARRANTY OR IN ANY WAY INCREASE THE
* SCOPE OF THIS WARRANTY, AND YOU MAY NOT RELY ON ANY SUCH INFORMATION OR
* ADVICE. SABRE DOES NOT WARRANT, GUARANTEE, OR MAKE ANY REPRESENTATIONS
* REGARDING THE USE, OR THE RESULTS OF THE USE, OF THE SOFTWARE, COMPILED
* PROGRAMS CREATED USING THE SOFTWARE, OR WRITTEN MATERIALS IN TERMS OF
* CORRECTNESS, ACCURACY, RELIABLITY, CURRENTNESS, OR OTHERWISE. THE ENTIRE RISK
* AS TO THE RESULTS AND PERFORMANCE OF THE SOFTWARE AND ANY COMPILED
* APPLICATIONS CREATED USING THE SOFTWARE IS ASSUMED BY YOU. BY YOUR USE, YOU
* AGREE THAT NEITHER SABRE NOR ANYONE ELSE WHO HAS BEEN INVOLVED IN THE
* CREATION, PRODUCTION OR DELIVERY OF THE SOFTWARE SHALL BE LIABLE FOR ANY
* DIRECT, INDIRECT, CONSEQUENTIAL, OR INCIDENTAL DAMAGES (INCLUDING DAMAGES FOR
* LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF BUSINESS
* INFORMATION, AND THE LIKE) ARISING OUT OF THE USE OF OR INABILITY TO USE SUCH
* PRODUCT EVEN IT SABRE HAS BEEN ADVISED OF THE POSSIBLITY OF SUCH DAMAGES.
* THIS SOFTWARE IS OWNED AND COPYRIGHTED BY SABRE OR ITS THIRD PARTY SUPPLIERS.
* YOUR LICENSE TO UTILIZE IT CONFERS NO OWNERSHIP RIGHTS IN THE SOFTWARE OR
* THOSE PORTIONS YOU MAY USE IN A PROJECT. YOU AGREE TO INDEMNIFY AND HOLD
* HARMLESS SABRE AND ITS AFFILIATES FOR ANY CLAIM BROUGHT AGAINST IT BASED UPON
* YOUR USE OF THE SOFTWARE OR ANY COMPILED PROGRAMS CREATED USING THE SOFTWARE
*/
package test.backend.www.model.sabre;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
public class SabreOriginDestinationLocationsResponse
{
@Data
public static class SabreLocation
{
@JsonProperty("AirportCode")
private String airportCode;
@JsonProperty("AirportName")
private String airportName;
@JsonProperty("CityName")
private String cityName;
@JsonProperty("CountryCode")
private String countryCode;
@JsonProperty("CountryName")
private String countryName;
@JsonProperty("RegionName")
private String regionName;
}
@Data
public static class SabreOriginDestinationLocation
{
@JsonProperty("OriginDestinationLocations")
private String originDestinationLocations;
@JsonProperty("OriginLocation")
private SabreLocation originLocation;
@JsonProperty("DestinationLocation")
private SabreLocation destinationLocation;
}
@JsonProperty("OriginDestinationLocations")
private List<SabreOriginDestinationLocation> originDestinationLocations;
@JsonProperty("Links")
private List<SabreLink> links;
} |
#include "videobuffer.h"
#include <Tempest/Device>
#include <Tempest/Except>
#include <algorithm>
using namespace Tempest;
VideoBuffer::VideoBuffer(AbstractGraphicsApi::PBuffer&& impl, size_t size)
:impl(std::move(impl)),sz(size) {
}
VideoBuffer::VideoBuffer(VideoBuffer &&other)
:impl(std::move(other.impl)),sz(other.sz){
}
VideoBuffer::~VideoBuffer(){
}
VideoBuffer &VideoBuffer::operator=(VideoBuffer &&other) {
impl = std::move(other.impl);
sz = other.sz;
return *this;
}
void VideoBuffer::update(const void *data, size_t offset, size_t count, size_t size, size_t alignedSz) {
if(count==0)
return;
if((offset+count)*alignedSz>sz)
throw std::system_error(Tempest::GraphicsErrc::InvalidBufferUpdate);
impl.handler->update(data,offset,count,size,alignedSz);
}
|
#!/bin/bash
set -e
echo "Cleaning maven cache"
HELPER=$(mvn help:evaluate -Dexpression=settings.localRepository)
rm -rf $(echo "${HELPER}" | grep -v '\[INFO\]')/berlin/yuna/$(echo "${HELPER}" | grep -i building | awk '{print $3}')
echo "CLU cloning"
git clone https://github.com/YunaBraska/command-line-util.git clu
echo "CLU installing"
mvn --file=clu/pom.xml install -DskipTests=true --quiet
echo "CLU removing"
rm -rf clu
#https://central.sonatype.org/pages/working-with-pgp-signatures.html
if [ -z ${GPG_KEY_UID+x} ]; then
echo "GPG_KEY_UID not set - skipping gpg key import"
else
echo "import gpg key"
gpg --keyserver hkp://pool.sks-keyservers.net --recv-keys "${GPG_KEY_UID}"
fi |
<filename>src/client/UI/playerList/PlayerComp.tsx
import {jsx} from "@emotion/core";
import {FC, useState, useRef} from "react";
import {Player} from "../../model/game/Player";
import {Persona, PersonaSize, ContextualMenu} from "@fluentui/react";
import {DefaultLoader} from "../../components/DefaultLoader";
import {Application} from "../../model/Application";
import {IDataHook, useDataHook} from "model-react";
import {useTheme} from "../../services/useTheme";
import {ChangeableName} from "./ChangeableName";
export const PlayerComp: FC<{player: Player; plain?: boolean}> = ({player, plain}) => {
const [h] = useDataHook();
const [showContext, setShowContext] = useState(false);
const isAdmin = Application.isAdmin(h);
const me = Application.getPlayer(h);
const room = Application.getRoom(h);
const won = player.hasSelection(room?.getAnswer(h) || [], h);
const elRef = useRef(null);
const isMe = player.is(me);
const theme = useTheme();
return (
<div css={{width: 200, padding: theme.spacing.s1}}>
<DefaultLoader>
{h => (
<div ref={elRef}>
<Persona
onClick={() => !isMe && !plain && setShowContext(true)}
css={{height: "auto"}}
text={player.getName(h)}
secondaryText={`Points: ${player.getScore(h)}`}
size={PersonaSize.size48}
initialsColor={
won ? theme.palette.greenLight : theme.palette.accent
}
onRenderPrimaryText={
isMe && !plain
? () => <ChangeableName player={player} hook={h} />
: undefined
}
/>
</div>
)}
</DefaultLoader>
<ContextualMenu
items={[
{
key: "kick",
text: "Kick",
disabled: !isAdmin,
onClick: () => {
if (player && room) room.kick(player);
},
},
]}
hidden={!showContext}
target={elRef}
onDismiss={() => setShowContext(false)}
/>
</div>
);
};
|
#!/usr/bin/env bash
# SUPPORT_FIRECLOUD_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/support-firecloud" && pwd)"
SUPPORT_FIRECLOUD_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source ${SUPPORT_FIRECLOUD_DIR}/sh/common.inc.sh
export SF_CI_ECHO_BENCHMARK=${GIT_ROOT}/sf-ci-echo-benchmark
GITHUB_JOB=${GITHUB_JOB:-main}
case ${GITHUB_JOB} in
mainc*|deployc*)
source .ci.container.sh
;;
main*)
source .ci.main.sh
;;
*)
echo_err "Unknown GITHUB_JOB=${GITHUB_JOB}."
exit 1
esac
|
"""
Design a program to encrypt given vocab list using AES Algorithm
"""
from cryptography.fernet import Fernet
# Vocab list
Vocab_list = ['ale', 'apple', 'bee', 'cat, cheese']
# Generate Key
key = Fernet.generate_key()
# Create cipher
cipher = Fernet(key)
# List of encrypted vocab
encrypted_vocab_list =[]
# Encrypting the vocab list
for item in Vocab_list:
encrypted_vocab_list.append(str(cipher.encrypt(str(item).encode())).replace("b'",'').replace("'",''))
# Print encrypted vocab list
print(encrypted_vocab_list) |
<reponame>sagarc-contrail/contrail-controller
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import uuid as __uuid
import argparse
import requests
import json
import os
import sys
from vnc_api.vnc_api import *
from vnc_api.gen.resource_xsd import *
from cfgm_common.exceptions import *
EP_DELIM=','
PUBSUB_DELIM=' '
DEFAULT_HEADERS = {'Content-type': 'application/json; charset="UTF-8"'}
def show_usage():
print 'A rule string must be specified for this operation'
print '<publisher-spec> <subscriber-spec>'
print 'publisher-spec := <prefix>,<type>,<id>,<version>'
print 'subscriber-spec := <prefix>,<type>,<id>,<version>'
def parse_pubsub_ep(pubsub_str):
r = pubsub_str.split(EP_DELIM)
if len(r) < 4:
for i in range(4-len(r)):
r.append('')
return r
# '1.1.1.1/24' or '1.1.1.1'
def prefix_str_to_obj(prefix_str):
if '/' not in prefix_str:
prefix_str += '/32'
x = prefix_str.split('/')
if len(x) != 2:
return None
return SubnetType(x[0], int(x[1]))
def build_dsa_rule_entry(rule_str):
r = parse_pubsub_ep(rule_str)
r = rule_str.split(PUBSUB_DELIM) if rule_str else []
if len(r) < 2:
return None
# [0] is publisher-spec, [1] is subscriber-spec
pubspec = parse_pubsub_ep(r[0])
subspec = parse_pubsub_ep(r[1])
pfx_pub = prefix_str_to_obj(pubspec[0])
pfx_sub = prefix_str_to_obj(subspec[0])
if pfx_sub is None or pfx_sub is None:
return None
publisher = DiscoveryPubSubEndPointType(ep_prefix = pfx_pub,
ep_type = pubspec[1], ep_id = pubspec[2],
ep_version = pubspec[3])
subscriber = [DiscoveryPubSubEndPointType(ep_prefix = pfx_sub,
ep_type = subspec[1], ep_id = subspec[2],
ep_version = subspec[3])]
dsa_rule_entry = DiscoveryServiceAssignmentType(publisher, subscriber)
return dsa_rule_entry
#end
def match_pubsub_ep(ep1, ep2):
if ep1.ep_prefix.ip_prefix != ep2.ep_prefix.ip_prefix:
return False
if ep1.ep_prefix.ip_prefix_len != ep2.ep_prefix.ip_prefix_len:
return False
if ep1.ep_type != ep2.ep_type:
return False
if ep1.ep_id != ep2.ep_id:
return False
if ep1.ep_version != ep2.ep_version:
return False
return True
# match two rules (type DiscoveryServiceAssignmentType)
def match_rule_entry(r1, r2):
if not match_pubsub_ep(r1.get_publisher(), r2.get_publisher()):
return False
sub1 = r1.get_subscriber()
sub2 = r2.get_subscriber()
if len(sub1) != len(sub2):
return False
for i in range(len(sub1)):
if not match_pubsub_ep(sub1[i], sub2[i]):
return False
return True
# end
# check if rule already exists in rule list and returns its index if it does
def find_rule(dsa_rules, in_rule):
rv = None
for dsa_rule in dsa_rules:
dsa_rule_obj = vnc_read_obj(vnc, 'dsa-rule', dsa_rule['to'])
entry = dsa_rule_obj.get_dsa_rule_entry()
if match_rule_entry(entry, in_rule):
rv = dsa_rule_obj
return rv
# end
def print_dsa_rule_entry(entry, prefix = ''):
pub = entry.get_publisher()
sub = entry.get_subscriber()[0]
pub_str = '%s/%d,%s,%s,%s' % \
(pub.ep_prefix.ip_prefix, pub.ep_prefix.ip_prefix_len,
pub.ep_type, pub.ep_id, pub.ep_version)
sub_str = '%s/%d,%s,%s,%s' % \
(sub.ep_prefix.ip_prefix, sub.ep_prefix.ip_prefix_len,
sub.ep_type, sub.ep_id, sub.ep_version)
print '%s %s %s' % (prefix, pub_str, sub_str)
"""
[
{
u'to': [u'default-discovery-service-assignment', u'default-dsa-rule'],
u'href': u'http://127.0.0.1:8082/dsa-rule/b241e9e7-2085-4a8b-8e4b-375ebf4a6dba',
u'uuid': u'b241e9e7-2085-4a8b-8e4b-375ebf4a6dba'
}
]
"""
def show_dsa_rules(vnc, dsa_rules):
if dsa_rules is None:
print 'Empty DSA group!'
return
print 'Rules (%d):' % len(dsa_rules)
print '----------'
idx = 1
for rule in dsa_rules:
dsa_rule = vnc_read_obj(vnc, 'dsa-rule', rule['to'])
entry = dsa_rule.get_dsa_rule_entry()
# entry is empty by default in a DSA rule object
if entry:
print_dsa_rule_entry(entry, prefix = '%d)' % idx)
idx += 1
print ''
# end
def vnc_read_obj(vnc, obj_type, fq_name):
method_name = obj_type.replace('-', '_')
method = getattr(vnc, "%s_read" % (method_name))
try:
return method(fq_name=fq_name)
except NoIdError:
print '%s %s not found!' % (obj_type, fq_name)
return None
# end
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--server', help="Discovery server address in the form IP:Port",
default = '127.0.0.1:5998')
parser.add_argument(
'--api-server', help="API server address in the form IP:Port",
default = '127.0.0.1:8082')
parser.add_argument(
'--load-balance', help="Load balance type",
choices = ["partial", "full"], default = "partial")
parser.add_argument(
'--admin-state', choices = ['up', 'down'],
help="Set administrative state of a service")
parser.add_argument(
'--service-id', help="Service id")
parser.add_argument(
'--service-type', help="Service type")
valid_ops = ['read', 'add-rule', 'del-rule', 'create', 'delete', 'load-balance']
parser.add_argument(
'--op', choices = valid_ops, help="Operation to perform")
parser.add_argument(
'--name', help="FQN of discovery-service-assignment object",
default = 'default-discovery-service-assignment')
parser.add_argument('--rule', help="Rule to add or delete")
parser.add_argument('--uuid', help="object UUID")
parser.add_argument(
'--os-username', help="Keystone User Name", default=None)
parser.add_argument(
'--os-password', help="Keystone User Password", default=None)
parser.add_argument(
'--os-tenant-name', help="Keystone Tenant Name", default=None)
args = parser.parse_args()
return args
# end parse_args
def get_ks_var(args, name):
opts = vars(args)
uname = name.upper()
cname = '-'.join(name.split('_'))
if opts['os_%s' % (name)]:
value = opts['os_%s' % (name)]
return (value, '')
rsp = ''
try:
value = os.environ['OS_' + uname]
if value == '':
value = None
except KeyError:
value = None
if value is None:
rsp = 'You must provide a %s via either --os-%s or env[OS_%s]' % (
name, cname, uname)
return (value, rsp)
# end
args = parse_args()
# Validate Discovery server information
server = args.server.split(':')
if len(server) != 2:
print 'Discovery server address must be of the form ip:port, '\
'for example 127.0.0.1:5998'
sys.exit(1)
server_ip = server[0]
server_port = server[1]
# Validate API server information
api_server = args.api_server.split(':')
if len(api_server) != 2:
print 'API server address must be of the form ip:port, '\
'for example 127.0.0.1:8082'
sys.exit(1)
api_server_ip = api_server[0]
api_server_port = api_server[1]
# Validate keystone credentials
conf = {}
for name in ['username', 'password', 'tenant_name']:
val, rsp = get_ks_var(args, name)
if val is None:
print rsp
sys.exit(1)
conf[name] = val
username = conf['username']
password = conf['password']
tenant_name = conf['tenant_name']
print 'API Server = ', args.api_server
print 'Discovery Server = ', args.server
print 'Username = ', username
print 'Tenant = ', tenant_name
print ''
try:
vnc = VncApi(username, password, tenant_name,
api_server[0], api_server[1])
except Exception as e:
print 'Exception: %s' % str(e)
sys.exit(1)
headers = DEFAULT_HEADERS.copy()
headers['X-AUTH-TOKEN'] = vnc.get_auth_token()
if args.admin_state:
if not args.service_id or not args.service_type:
print 'Please specify service type and ID'
sys.exit(1)
print 'Service type %s, id %s' % (args.service_type, args.service_id)
data = {
"service-type": args.service_type,
}
if args.admin_state:
data['admin-state'] = args.admin_state
url = "http://%s:%s/service/%s" % (server_ip, server_port, args.service_id)
r = requests.put(url, data=json.dumps(data), headers=headers)
if r.status_code != 200:
print "Operation status %d" % r.status_code
sys.exit(0)
elif args.op == 'load-balance':
if not args.service_type:
print 'Please specify service type'
sys.exit(1)
if args.service_id:
print 'Specific service id %s ignored for this operation' % args.service_id
url = "http://%s:%s/load-balance/%s" % (server_ip, server_port, args.service_type)
payload = { 'type': args.load_balance }
r = requests.post(url, headers=headers, data=json.dumps(payload))
if r.status_code != 200:
print "Operation status %d" % r.status_code
sys.exit(0)
uuid = args.uuid
# transform uuid if needed
if uuid and '-' not in uuid:
uuid = str(__uuid.UUID(uuid))
fq_name = vnc.id_to_fq_name(uuid) if uuid else args.name.split(':')
print ''
print 'Oper = ', args.op
print 'Name = %s' % fq_name
print 'UUID = %s' % uuid
if args.op == 'add-rule':
if not args.rule:
print 'Error: missing rule'
sys.exit(1)
rule_entry = build_dsa_rule_entry(args.rule)
if rule_entry is None:
show_usage()
sys.exit(1)
# name is of discovery-service-assignment object
# which consists of one or more rules
dsa = vnc.discovery_service_assignment_read(fq_name = fq_name)
dsa_rules = dsa.get_dsa_rules()
show_dsa_rules(vnc, dsa_rules)
print ''
print_dsa_rule_entry(rule_entry)
ans = raw_input("Confirm (y/n): ")
if not ans or ans[0].lower() != 'y':
sys.exit(0)
rule_uuid = __uuid.uuid4()
dsa_rule = DsaRule(name = str(rule_uuid), parent_obj = dsa, dsa_rule_entry = rule_entry)
dsa_rule.set_uuid(str(rule_uuid))
vnc.dsa_rule_create(dsa_rule)
elif args.op == 'read':
dsa = vnc_read_obj(vnc, 'discovery-service-assignment', fq_name)
if dsa == None:
sys.exit(1)
dsa_rules = dsa.get_dsa_rules()
show_dsa_rules(vnc, dsa_rules)
elif args.op == 'del-rule':
if args.rule is None:
print 'Error: missing rule'
sys.exit(1)
rule = build_dsa_rule_entry(args.rule)
if rule is None:
show_usage()
sys.exit(1)
dsa = vnc.discovery_service_assignment_read(fq_name = fq_name)
dsa_rules = dsa.get_dsa_rules()
if dsa_rules is None:
print 'Empty DSA group!'
sys.exit(1)
show_dsa_rules(vnc, dsa_rules)
obj = find_rule(dsa_rules, rule)
if not obj:
print 'Rule not found. Unchanged'
sys.exit(1)
else:
print 'Rule found!'
ans = raw_input("Confirm (y/n): ")
if not ans or ans[0].lower() != 'y':
sys.exit(0)
vnc.dsa_rule_delete(id = obj.uuid)
|
SF="$1"
rm -rf input.mp4
rm -rf output.mp4
python3 run.py --sf $SF
python3 pix2pix.py --mode test --output_dir test_output/ --input_dir testing_output_images/ --checkpoint checkpoints/output/
ffmpeg -r 30 -f image2 -s 256x256 -i test_output/images/%d-outputs.png -vcodec libx264 -crf 20 output0.mp4
ffmpeg -r 30 -f image2 -s 256x256 -i test_output/images/%d-inputs.png -vcodec libx264 -crf 20 input0.mp4
# ffmpeg -i output0.mp4 -i audio_testing.wav -c:v copy -c:a aac -strict experimental output.mp4
# audios/00001-000.wav
ffmpeg -i $SF output_audio_trim.wav
ffmpeg -i output0.mp4 -i output_audio_trim.wav -c:v copy -c:a aac -strict experimental output.mp4
ffmpeg -i input0.mp4 -i output_audio_trim.wav -c:v copy -c:a aac -strict experimental input.mp4
rm -rf testing_output_images
rm -rf test_output
rm -rf output0.mp4
rm -rf input0.mp4
rm -rf output_audio_trim.wav |
#include <iostream>
#include <unordered_map>
#include <string>
#include <vector>
class FileSystem {
private:
std::unordered_map<std::string, std::string> files;
public:
FileSystem() {
// Constructor to initialize the file system
}
void createFile(const std::string& filename, const std::string& content) {
files[filename] = content;
}
std::string readFile(const std::string& filename) {
if (files.find(filename) != files.end()) {
return files[filename];
} else {
return "File not found";
}
}
void deleteFile(const std::string& filename) {
files.erase(filename);
}
std::vector<std::string> listFiles() {
std::vector<std::string> fileNames;
for (const auto& file : files) {
fileNames.push_back(file.first);
}
return fileNames;
}
};
int main() {
FileSystem fs;
fs.createFile("file1.txt", "This is the content of file1");
fs.createFile("file2.txt", "Content of file2");
std::cout << fs.readFile("file1.txt") << std::endl;
fs.deleteFile("file2.txt");
std::vector<std::string> fileList = fs.listFiles();
for (const auto& file : fileList) {
std::cout << file << std::endl;
}
return 0;
} |
<filename>api/api_book/list.go
package api_book
import "net/http"
func HandleList(w http.ResponseWriter, r *http.Request) {
}
|
<reponame>iOS-web-tools/torrent-http-proxy
package services
import (
"sync"
"time"
"github.com/improbable-eng/grpc-web/go/grpcweb"
"github.com/sirupsen/logrus"
)
const (
GRPC_PROXY_TTL = 60
)
type HTTPGRPCProxyPool struct {
sm sync.Map
timers sync.Map
claims *Claims
expire time.Duration
r *Resolver
baseURL string
}
func NewHTTPGRPCProxyPool(bu string, claims *Claims, r *Resolver) *HTTPGRPCProxyPool {
return &HTTPGRPCProxyPool{baseURL: bu, claims: claims, expire: time.Duration(GRPC_PROXY_TTL) * time.Second, r: r}
}
func (s *HTTPGRPCProxyPool) Get(src *Source, logger *logrus.Entry) (*grpcweb.WrappedGrpcServer, error) {
key := src.GetKey()
v, _ := s.sm.LoadOrStore(key, NewHTTPGRPCProxy(NewGRPCProxy(s.baseURL, s.claims, s.r, src, nil, logger)))
t, tLoaded := s.timers.LoadOrStore(key, time.NewTimer(s.expire))
timer := t.(*time.Timer)
if !tLoaded {
go func() {
<-timer.C
s.sm.Delete(key)
s.timers.Delete(key)
}()
} else {
timer.Reset(s.expire)
}
return v.(*HTTPGRPCProxy).Get(), nil
}
|
# -*- bash -*-
podman container inspect two_networks_con1_1 --format '{{len .NetworkSettings.Networks}}'
is "$output" "2" "$testname : Container is connected to both networks"
podman container inspect two_networks_con1_1 --format '{{.NetworkSettings.Networks}}'
like "$output" "two_networks_net1" "$testname : First network name exists"
like "$output" "two_networks_net2" "$testname : Second network name exists"
|
'use strict';
import {Question} from './question';
import {JSON_DATA} from './questionsJson.js';
export class QuestionService{
constructor(){
this.JSON_TEMP = JSON_DATA;
}
retrieveQuestions(){
const SPREADSHEET_KEY = '<KEY>';
const URI = `https://spreadsheets.google.com/feeds/list/${SPREADSHEET_KEY}/od6/public/values?alt=json`;
const metaProcess = {
questionColumn : 'question',
anwserColums : ['answera', 'answerb', 'answerc'],
helpColumn : 'explanation'
};
return fetch(URI,
{
method: 'GET'
}
)
.then((response)=>response.json())
.catch((e) => {
return this.JSON_TEMP
})
.then((json)=>{
if (json && json.feed && json.feed.entry){
try{
const questionsReformat = json.feed.entry.map((entry)=>{
const questionTitle = entry[`gsx\$${metaProcess.questionColumn}`]['$t'];
const help = entry[`gsx\$${metaProcess.helpColumn}`]['$t'];
const propositions = new Array();
metaProcess.anwserColums.forEach(col=>{
propositions.push(entry[`gsx\$${col}`]['$t']);
})
return new Question(questionTitle, propositions, help);
});
console.info(questionsReformat);
return questionsReformat;
}catch(e){
console.error(e);
return Promise.reject(e);
}
}
});
}
} |
#!/bin/bash
ETCD_URLS=$1
START_NUM=$2
basepath=$(cd `dirname $0`; pwd)
# Check the specified etcd cluster URLs.
_prefix=`echo ${ETCD_URLS:0:4}`
if [ "x${_prefix}" != "xhttp" ] ; then
echo "Please specify the first parameter correctly, such as \"http://9.111.255.50:2379,http://9.111.255.10:2379,http://9.111.254.41:2379\" exit ... "
exit 1
fi
if [ "x${START_NUM}" = "x" ]; then
START_NUM=1000
fi
cat ${basepath}/conf/agent-ips.conf|while read ip
do
passwd='Letmein123'
/usr/bin/expect <<-EOF
set timeout 80
spawn ssh root@$ip
expect {
"*yes/no" { send "yes\r"; exp_continue }
"*password:" { send "$passwd\r" }
}
expect "#*"
send "/bin/bash ${basepath}/scripts/startAgents.sh ${ETCD_URLS} ${START_NUM} \r"
expect "#*"
send "exit\r"
expect eof
EOF
done
|
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class User extends Model {
static associate(models) {
this.hasMany(models.Cita);
this.hasOne(models.Historial);
}
};
User.init({
nombre: DataTypes.STRING,
apellidos: DataTypes.STRING,
rol: DataTypes.STRING,
email: DataTypes.STRING,
password: DataTypes.STRING,
covid: DataTypes.BOOLEAN,
telefono: DataTypes.INTEGER,
direccion: DataTypes.STRING,
deudor: DataTypes.BOOLEAN,
dni: DataTypes.STRING,
dob: DataTypes.DATE
}, {
sequelize,
modelName: 'User',
});
return User;
}; |
#!/bin/bash
in_dir=$1
out_dir=$2
for rst in ${in_dir}/*.hdf;
do
echo $rst
out_name=${out_dir}/`basename ${rst}`_subset.tif
echo $out_name
#gdal_translate -of GTiff -projwin -0.05 0.025 -0.037 0.013 ${rst} ${out_name}
gdal_translate -of GTiff -projwin -0.05 0.025 -0.037 0.013 HDF4_EOS:EOS_GRID:${rst}:"MCD_CMG_BRDF_30Arc Second":BRDF_Quality ${out_name}
done
|
angular.module('appMantags', [])
.directive('akModal', function() { // Angular doesn't work with native Bootstrap modal
return {
restrict: 'A',
link: function(scope, element, attrs) {
scope.$watch(attrs.akModal, function(value) {
if (value)
element.modal('show');
else
element.modal('hide');
});
}
};
})
.controller('MantagsCtrl', ['$scope', '$http', function($scope, $http) {
updTags = function() {
$http.get('index.php/main/all_tags').then(function(res) {
$scope.tags = res.data;
var groupSet = new Set($scope.tags.map(function(x) { return x.properties.group; }));
$scope.groups = Array.from(groupSet);
if ($scope.groups === [])
$scope.groups = [undefined];
if (! groupSet.has($scope.curGroup))
$scope.curGroup = $scope.groups[0];
});
};
$scope.curGroup = undefined;
updTags();
$scope.lists = [{prototype: null, selection: ['null']}];
$scope.chosen = null;
$scope.showModal = false; // this means modal for input new tag
$scope.showNewGroupModal = false;
$scope.updList = function(param) {
for (var i = 0; i < $scope.lists.length; i++) {
var next = Number($scope.lists[i].selection[0]);
if (isNaN(next)) { // for '--' -> 'null'
$scope.lists = $scope.lists.slice(0, i + 1);
break;
}
if (i + 1 == $scope.lists.length) {
$scope.lists.push({prototype: next, selection: ['null']});
break;
}
if ($scope.lists[i + 1].prototype != next || $scope.lists[i] == param) {
$scope.lists[i + 1] = {prototype: next, selection: ['null']};
$scope.lists = $scope.lists.slice(0, i + 2);
break;
}
}
function getFromId(id) {
var ret = null;
for (var j in $scope.tags)
if ($scope.tags[j].idCategory == id)
ret = {id: id, name: $scope.tags[j].name, proto: $scope.tags[j].prototype, peers: [], properties: $scope.tags[j].properties};
if (! ret) return null;
for (var j in $scope.tags)
if ($scope.tags[j].idCategory != id && $scope.tags[j].prototype == ret.proto) {
if ($scope.tags[j].properties && ret.properties && $scope.tags[j].properties.group !== ret.properties.group)
continue;
ret.peers.push({id: $scope.tags[j].idCategory, name: $scope.tags[j].name});
}
for (var j in $scope.tags)
if ($scope.tags[j].idCategory == ret.proto) // null != 0 in javascript
ret.proto = {id: $scope.tags[j].idCategory, proto: $scope.tags[j].prototype};
return ret;
}
$scope.chosen = ($scope.lists.length >= 2 ? getFromId($scope.lists[$scope.lists.length - 1].prototype) : null);
};
$scope.del = function(msg) {
if (confirm(msg))
$http.get('index.php/admin/del_tag/' + $scope.chosen.id).then(function(res) {
updTags();
$scope.lists = [{prototype: null, selection: ['null']}];
$scope.chosen = null;
});
};
$scope.add = function() {
$http.post(
'index.php/admin/add_tag/' + $scope.inputName + '/' + ($scope.chosen ? $scope.chosen.id : ''),
$.param({properties: JSON.stringify({group: $scope.curGroup})}),
{headers: {'Content-Type': 'application/x-www-form-urlencoded'}}
).then(function(res) {
res = res.data;
if (res.status == "ok") {
updTags();
$scope.lists = [{prototype: null, selection: ['null']}];
$scope.chosen = null;
} else
alert(res.message);
});
};
$scope.addGroup = function() {
$scope.groups.push($scope.curGroup = $scope.inputGroupName);
}
$scope.changeProto = function(newProto) {
$http.get('index.php/admin/tag_change_proto/' + $scope.chosen.id + '/' + newProto).then(function(res) {
updTags();
$scope.lists = [{prototype: null, selection: ['null']}];
$scope.chosen = null;
});
};
$scope.updProperties = function() {
$http.post(
'index.php/admin/tag_set_properties/' + $scope.chosen.id,
$.param({properties: JSON.stringify($scope.chosen.properties)}),
{headers: {'Content-Type': 'application/x-www-form-urlencoded'}}
).then(function(res) {
updTags();
});
};
$scope.addBtnTitle = function() {
if ($scope.chosen) {
return language == "english" ? "Add a tag as a sub-tag of " + $scope.chosen.name : "添加标签为" + $scope.chosen.name + "的子标签";
} else {
return language == "english" ? "Add a primary tag" : "添加一个顶级标签";
}
};
$scope.moveInfTitle = function(name) {
return language == "english" ? "Sub-tag of " + name : name + "的子标签";
};
}]);
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_error_twotone = void 0;
var ic_error_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M12 4c-4.42 0-8 3.58-8 8s3.58 8 8 8 8-3.58 8-8-3.58-8-8-8zm1 13h-2v-2h2v2zm0-4h-2V7h2v6z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M11.99 2C6.47 2 2 6.48 2 12s4.47 10 9.99 10C17.52 22 22 17.52 22 12S17.52 2 11.99 2zM12 20c-4.42 0-8-3.58-8-8s3.58-8 8-8 8 3.58 8 8-3.58 8-8 8zm-1-5h2v2h-2zm0-8h2v6h-2z"
},
"children": []
}]
};
exports.ic_error_twotone = ic_error_twotone; |
def common_elements(list1, list2):
common_list = []
for element in list1:
if element in list2:
common_list.append(element)
return common_list
list1 = [1, 2, 3, 4]
list2 = [2, 4, 6, 8]
print(common_elements(list1, list2)) |
<reponame>nieldk/updiprog<gh_stars>10-100
#include "sleep.h"
void msleep(uint32_t msec)
{
#ifdef __MINGW32__
SleepEx(msec, false);
#endif // __MINGW32__
#ifdef __linux
usleep(msec*1000);
#endif // __linux
}
|
import socket
import struct
import threading
MSG_REGISTER_REQ = 1
MSG_REGISTER_RES = 2
MSG_DEREGISTER_REQ = 3
MSG_DEREGISTER_RES = 4
MSG_READ_MEM_REQ = 5
MSG_READ_MEM_RES = 6
MSG_WRITE_MEM_REQ = 7
MSG_WRITE_MEM_RES = 8
MSG_CONNECT = 9
MSG_CONNECT_RESPONSE = 10
MSG_DATA = 11
MSG_EOS = 12
MSG_RESET = 13
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(('localhost', 7110))
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
class RecvThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.done = False
def run(self):
done = False
while not done:
data = client.recv(1000)
if data:
print map(ord, data)
else:
print 'Connection closed'
done = True
rt = RecvThread()
rt.daemon = True
rt.start()
def register(name):
l = len(name)
m = struct.pack('=IIB', l, 0, MSG_REGISTER_REQ) + name
client.sendall(m)
def deregister(name):
l = len(name)
m = struct.pack('=IIB', l, 0, MSG_DEREGISTER_REQ) + name
client.sendall(m)
def read_mem(address, length):
l = 8
m = struct.pack('=IIBII', l, 0, MSG_READ_MEM_REQ, address, length)
client.sendall(m)
def write_mem(address, data):
l = 4 + len(data)
m = struct.pack('=IIBI', l, 0, MSG_WRITE_MEM_REQ, address) + data
client.sendall(m)
def connect_response(stream_id, result):
l = 1
m = struct.pack('=IIBB', l, stream_id, MSG_CONNECT_RESPONSE, result)
client.sendall(m)
def data(stream_id, data):
l = len(data)
m = struct.pack('=IIB', l, stream_id, MSG_DATA) + data
client.sendall(m)
def eos(stream_id):
l = 0
m = struct.pack('=IIB', l, stream_id, MSG_EOS)
client.sendall(m)
def reset(stream_id):
l = 0
m = struct.pack('=IIB', l, stream_id, MSG_RESET)
client.sendall(m)
|
import { createApp } from "vue";
import App from "./App.vue";
import store from "./store";
import VConsole from "vconsole";
import GlobalComponents from "@/components";
import Directives from "@/directives";
// Import Framework7
import Framework7 from "./f7config/app";
// Import Framework7-Vue Plugin
import Framework7Vue, { registerComponents } from "framework7-vue/bundle";
// Import f7 icons
import "framework7-icons";
// Import styles
import "./styles/app.less";
import "./styles/var.less";
import "./styles/f7.less";
import "./styles/base.less";
// Init Framework7-Vue Plugin
Framework7.use(Framework7Vue);
// Init vconsole
// new VConsole();
// Init App
const app = createApp(App);
// Register Framework7 Vue components
registerComponents(app);
app.use(store);
app.use(Directives);
app.use(GlobalComponents);
app.mount("#app");
|
/*
Copyright © 2020 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"github.com/cloudsark/loganalyzer/logalizer"
"github.com/spf13/cobra"
)
// ip2locCmd represents the ip2loc command
var ip2locCmd = &cobra.Command{
Use: "ip2loc",
Short: "Print Top 10 IP addresses accessing your web server with their location",
Long: `Find Top 10 IP Addresses Accessing Your web server with their geo locations,
it helps you to quicly identify abuse`,
Run: func(cmd *cobra.Command, args []string) {
logalizer.TopIP2LocCmd(&filename)
},
}
func init() {
topCmd.AddCommand(ip2locCmd)
}
|
function toLowerCase(str) {
return str.toLowerCase();
}
var result = toLowerCase("HELLO WORLD");
console.log(result); // prints "hello world" |
"""
Butterworth Low-pass Filter
===========================
"""
# import numpy as np
import matplotlib.pyplot as plt
from scipy import signal, optimize
from scipy.optimize import curve_fit, least_squares
from scipy.signal import medfilt
# from scipy import signal, optimize
from scipy.fftpack import rfft, irfft, rfftfreq
from sklearn.linear_model import LinearRegression
# Butterworth filter
def data_filter(data_measured, order, cutoff):
"""
creates a 3. order Butterworth lowpass filter with a cutoff of 0.2 times
the Nyquist frequency or 200 Hz, returning enumerator (b) and
denominator (a) polynomials for a Infinite Impulse Response (IIR) filter
"""
b, a = signal.butter(order, cutoff)
# applies a linear digital filter twice, once forward and once backwards.
# The combined filter has zero phase and a filter order twice that of the original.
data_filtered = signal.filtfilt(b, a, data_measured)
return data_filtered
""""
The result should be approximately xlow, with no phase shift.
>>> b, a = signal.butter(8, 0.125)
>>> y = signal.filtfilt(b, a, x, padlen=150)
>>> np.abs(y - xlow).max()
9.1086182074789912e-06
data_filtered = np.zeros(n)
i = 0
# choose an alpha value between 0 and 1, where 1 is equivalent to
# unfiltered data
while i < n:
data_filtered[i] = data_measured[i] * alpha + (data_filtered * (1.0 - alpha))
i += 1
return data_filtered
"""
|
<gh_stars>0
// OS 9 Startup
import React, {Component} from 'react';
import PropTypes from 'prop-types';
import Cookies from 'js-cookie';
import MikeOS from '../resources/images/mikeos.jpg';
import './styles.sass';
const Loader = (props) => (
<div className='loader'>
<label className='loader-label'>Starting Up...</label>
<div className='loading-bar'>
<div className='loading-progress' style={{width: `${props.progress}%`}} />
</div>
</div>
);
class Startup extends Component {
constructor(props) {
super(props);
this.state = {
loaderProgress: 0
};
this.loaderSpeed = 50;
}
componentDidMount() {
if(Cookies.get('hasLoadedBefore')) {
this.loaderSpeed = 10;
}
this.initPseudoLoader();
Cookies.set('hasLoadedBefore', true);
}
handleContextMenu(e) {
e.preventDefault();
}
// Updates the pseudo loader to simulate a loading bar
initPseudoLoader(){
let updateInterval;
const update = (u, t) => {
this.setState({loaderProgress: this.state.loaderProgress + u});
if(this.state.loaderProgress >= 100) {
clearInterval(updateInterval);
}
};
updateInterval = setInterval(() => update(1), this.loaderSpeed);
}
render() {
return (
<div
className='startup-wrapper'
onContextMenu={e => this.handleContextMenu(e)}
style={{display: this.state.loaderProgress >= 100 ? 'none' : 'flex'}}
>
<div className='startup'>
<div className='content'>
<div style={{backgroundImage: `url(${MikeOS})`}} className='startup-image'/>
<h1 className='startup-title'>Mike OS 9</h1>
</div>
<Loader progress={this.state.loaderProgress}/>
</div>
</div>
);
}
}
Startup.defaultProps = {
};
export default Startup;
|
package handlers
import (
"net/http"
httputils "github.com/irenicaa/go-dice-generator/http-utils"
)
// StatsCopier ...
type StatsCopier interface {
CopyData() map[string]int
}
// StatsHandler ...
type StatsHandler struct {
Stats StatsCopier
Logger httputils.Logger
}
// ServeHTTP ...
// @router /stats [GET]
// @summary get stats of dice rolls
// @produce json
// @success 200 {object} map[string]int
func (statsHandler StatsHandler) ServeHTTP(
writer http.ResponseWriter,
request *http.Request,
) {
statsCopy := statsHandler.Stats.CopyData()
httputils.HandleJSON(writer, statsHandler.Logger, statsCopy)
}
|
<gh_stars>10-100
from nextcord.ext import commands
from Plugin import AutomataPlugin
import random
MAXIMUM_FLIPS = 5
MINIMUM_FLIPS = 1
class Coinflip(AutomataPlugin):
"""Literally a coin flip!"""
@commands.command()
async def coinflip(self, ctx: commands.Context, number_of_times: int = 1):
"""Flips the coin n times!"""
if number_of_times <= MAXIMUM_FLIPS and number_of_times >= MINIMUM_FLIPS:
for i in range(number_of_times):
flip = random.randint(0, 1)
if flip == 0:
await ctx.send("Heads!")
else:
await ctx.send("Tails!")
else:
await ctx.send(
f"Too many flips to handle, try less than {MAXIMUM_FLIPS + 1} and more than {MINIMUM_FLIPS - 1}!"
)
|
#include <iostream>
#include <vector>
#include <Eigen/Dense>
// Define the PointCloud type
typedef std::vector<Eigen::Vector3f> PointCloud;
typedef std::shared_ptr<PointCloud> PointCloudPtr;
// Define the processPointCloud function
PointCloudPtr processPointCloud(const std::vector<Eigen::Vector3f>& data, std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f>>& matrix_buffer) {
// Print the number of datasets loaded
printf("Loaded %d datasets.\n", static_cast<int>(data.size()));
// Create a new point cloud 'final'
PointCloudPtr final(new PointCloud);
// Initialize an empty 'final' point cloud
// (Assuming the point cloud is initially empty)
// Initialize an empty 'matrix_buffer' vector
// (Assuming the matrix_buffer is initially empty)
return final; // Return the 'final' point cloud
}
int main() {
// Example usage of the processPointCloud function
std::vector<Eigen::Vector3f> data = {Eigen::Vector3f(1.0, 2.0, 3.0), Eigen::Vector3f(4.0, 5.0, 6.0)};
std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f>> matrix_buffer;
PointCloudPtr result = processPointCloud(data, matrix_buffer);
// Further processing or usage of the 'result' point cloud
return 0;
} |
#!/bin/sh
if [ "$1" = "-h" -o "$1" = "--help" ] ; then
echo "Usage: $0 [OPTIONS] [REF]"
echo ""
echo "Options:"
echo " -h,--help This help menu"
echo ""
echo "Lists all files checked into a git repository. REF defaults to the current HEAD reference (usually the current branch)."
exit 1
fi
if [ $# -lt 1 ] ; then
REF="$(git rev-parse --abbrev-ref HEAD)"
if [ -z "$REF" ] ; then
echo "$0: Error: could not get current HEAD abbreviated reference name"
exit 1
fi
else
REF="$1"
shift
fi
git ls-tree --full-tree -r --name-only "$REF"
|
var express = require('express');
var router = express.Router();
/* GET users listing. */
router.get('/photo_list', function(req, res) {
var db = req.db;
var page = req.page;
var pics = [];
db.createReadStream({gte: 'photos|', lte: 'photos|\xff'})
.on('data', function(data) {
pics.push(data.value);
})
.on('error', function(err) {
console.log('Error while reading photo list from DB.');
})
.on('end', function() {
res.send(pics);
});
});
router.get('/video_list', function(req, res) {
var db = req.db;
var vids = [];
db.createReadStream({start: 'videos|', end: 'videos|\xff'})
.on('data', function(data) {
vids.push(data.value);
})
.on('error', function(err) {
console.log('Error while reading video list from DB.');
})
.on('end', function() {
res.send(vids);
});
});
module.exports = router;
|
#!/bin/bash
#SBATCH -J Act_prelu_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py prelu 1 Adadelta 3 0.37231609068433225 362 0.9000517298848979 he_uniform PE-infersent
|
<filename>scripts/initActions.js
/**
* Created by Nick on 10/22/2015.
*/
document.addEventListener("keydown", function (e) {
//If not in menu, move (only state as of now)
if (menuState == false)
move(e);
});
Array.prototype.peek = function () {
return this[this.length - 1];
};
//randomBetween is an inclusive both ways function
function randomBetween(min, max) {
return Math.floor(Math.random() * (max - min + 1) + min);
}
function godVision() {
var i, j;
for (i = 0; i < 250; i++) {
for (j = 0; j < 250; j++) {
window.map[i][j].lastSymbol = window.map[i][j].peek().symbol;
window.map[i][j].unseen = false;
}
}
}
function init() {
$.ajaxSetup({
async: false
});
$.getJSON("json/ground.json", function (json) {
window.groundObjects = json;
});
$.getJSON("json/items.json", function (json) {
window.itemObjects = json;
});
$.getJSON("json/monsters.json", function (json) {
window.monsterObjects = json;
});
$.getJSON("json/structures.json", function (json) {
window.structureObjects = json;
});
window.log = new messageLog();
window.playerInventory = new playerInventory();
window.player = new actor("@", "green", "You" , 1000, 500, 125, 125);
window.playerLight = new LightSource(window.player, 20);
window.map = [];
visible = [viewWidth][viewHeight];
var dirt = getObjectById(window.groundObjects, "dirt");
var wall = getObjectById(window.structureObjects, "wall");
for (var i = 0; i < 250; i++) {
window.map[i] = [];
for (var j = 0; j < 250; j++) {
window.map[i][j] = [];
window.map[i][j].push(dirt);
window.map[i][j].lit = false;
window.map[i][j].lookFlag = false;
window.map[i][j].unseen = true;
window.map[i][j].lastSymbol = null;
window.map[i][j].cameFrom = null;
window.map[i][j].gScore = Infinity;
window.map[i][j].fScore = Infinity;
window.map[i][j].neighbors = [];
window.map[i][j].xPos = i;
window.map[i][j].yPos = j;
}
}
for(i=0; i< mapWidth;i++){
for(j=0; j<mapHeight; j++)
{
var k = 0;
//left
if (i - 1 >= 0) {
window.map[i][j].neighbors[k] = window.map[i - 1][j];
window.map[i][j].neighbors[k].xPos = i-1;
window.map[i][j].neighbors[k].yPos = j;
k++;
}
//up-left
if (i - 1 >= 0 && j + 1 < mapHeight) {
window.map[i][j].neighbors[k] = window.map[i - 1][j + 1];
window.map[i][j].neighbors[k].xPos = i-1;
window.map[i][j].neighbors[k].yPos = j+1;
k++;
}
//up
if (j + 1 < mapHeight) {
window.map[i][j].neighbors[k] = window.map[i][j + 1];
window.map[i][j].neighbors[k].xPos = i;
window.map[i][j].neighbors[k].yPos = j+1;
k++;
}
//up right
if (i + 1 < mapWidth && j + 1 < mapHeight) {
window.map[i][j].neighbors[k] = window.map[i + 1][j + 1];
window.map[i][j].neighbors[k].xPos = i+1;
window.map[i][j].neighbors[k].yPos = j+1;
k++;
}
//right
if (i + 1 < mapWidth) {
window.map[i][j].neighbors[k] = window.map[i + 1][j];
window.map[i][j].neighbors[k].xPos = i+1;
window.map[i][j].neighbors[k].yPos = j;
k++;
}
//right down
if (i + 1 < mapWidth && j - 1 >= 0) {
window.map[i][j].neighbors[k] = window.map[i + 1][j - 1];
window.map[i][j].neighbors[k].xPos = i+1;
window.map[i][j].neighbors[k].yPos = j-1;
k++;
}
//down
if (j - 1 >= 0) {
window.map[i][j].neighbors[k] = window.map[i][j - 1];
window.map[i][j].neighbors[k].xPos = i;
window.map[i][j].neighbors[k].yPos = j-1;
k++;
}
//down left
if (i - 1 >= 0 && j - 1 >= 0) {
window.map[i][j].neighbors[k] = window.map[i - 1][j - 1];
window.map[i][j].neighbors[k].xPos = i-1;
window.map[i][j].neighbors[k].yPos = j-1;
k++;
}
//done?
}
}
dungeonGen();
window.map[window.player.xPos][window.player.yPos].push(window.player);
actorList.push(window.player);
//window.actorQueue.push(window.player,window.player.initiative);
var enemy = new actor("~", "red", "Hostile human" , 300, 350, 125, 120);
enemy.weapon = getObjectById(window.itemObjects, "sword");
actorList.push(enemy);
//window.actorQueue.push(enemy, enemy.initiative);
window.map[enemy.xPos][enemy.yPos].push(enemy);
document.getElementById("playerInit").innerHTML = String(window.player.initiative);
document.getElementById("playerHP").innerHTML = String(window.player.hp);
window.map[0][0].push(wall);
window.map[0][249].push(wall);
window.map[249][0].push(wall);
window.map[249][249].push(wall);
//window.map[130][130].push(wall);
for (i = 1; i < 249; i++) {
window.map[i][0].push(wall);
window.map[i][249].push(wall);
window.map[0][i].push(wall);
window.map[249][i].push(wall);
}
/*for(i=10; i < 240; i+=10){
for(j=10; j < 240; j+=10){
window.map[i][j].push(getObjectById(window.structureObjects, "debug_wall"));
window.map[i][j].visible = false;
window.map[i][j].lit = false;
window.map[i][j].lastSymbol = null;
}
}*/
for (i = 0; i < actorList.length; i++)
actorQueue.push(actorList[i], actorList[i].initiative);
window.look = new look();
vision();
} |
#!/bin/bash
# usage ./client.sh <sbid>
export ICE_CONFIG="../files/config/ingestmanager.ice_cfg"
../functests/Client.py $1
|
#pragma once
#include <SFML/Graphics.hpp>
#include <memory>
#include <string>
#include <cstdint>
#include "AssetManager.hpp"
#include "InputManager.hpp"
#include "StateMachine.hpp"
namespace Pacenstein {
/**
* A `std::map` containing a `std::string` and a `std::map`, which contains `std::string` times 2.
*/
typedef std::map<std::string, std::map<std::string, std::string>> settings_t;
/**
* This struct stores a few major objects required for the game.
*
* This class is a singleton, sadly. It does make accessing these things a lot easier.
*/
struct GameData {
/**
* Machine is a StateMachine, see the documentation on StateMachine for more info.
*/
StateMachine machine;
/**
* Window is a SFML RenderWindow, this class is implemented by SFML.
*
* See [www.sfml-dev.org](https://www.sfml-dev.org/documentation/2.5.1/classsf_1_1RenderWindow.php)
* for more information.
*/
sf::RenderWindow window;
/**
* Assets is an AssetManager, see the documentation on AssetManager for more info.
*/
AssetManager assets;
/**
* Input is an InputManager, see the documentation on InputManager for more info.
*/
InputManager input;
/**
* Score is an unsigned int, where the score is saved when the game is running.
*/
uint64_t score;
/**
* Lives is an unsigned int, where the lives are saved when the game is running.
*/
uint lives;
/**
* ghostsEaten is an unsigned int, where the number of ghosts eaten is saved when the game is running.
*/
uint ghostsEaten;
/**
* scattering is a bool, where the state of scattering is saved when the game is running.
*/
bool scattering;
/**
* Invincible is a bool, where the state of being invincible is saved when the game is running.
*/
bool invincible;
/**
* pacPelletsLeft saves the number of pac pellets left for the player to collect.
*/
uint pacPelletsLeft;
/**
* powerPelletsLeft saves the number of pac pellets left for the player to collect.
*/
uint powerPelletsLeft;
/**
* Settings saves the settings of the game.
*/
settings_t settings;
};
/**
* This is a shared smart pointer to GameData.
*/
typedef std::shared_ptr<GameData> game_data_ref_t;
/**
* This class is the main body of the game.
*
* The Game class loops while the window is open. While it is, it will ask the StateMachine to
* process it's changes, ask the input handler to handle input, update the state, and draw what
* needs to be drawn.
*/
class Game {
public:
/**
* The constructor for Game.
*
* This is called when you want the game to run. It will call the `create()` function of an
* `sf::RenderWindow`, and add the SplashState to the StateMachine.
* Then it will call the `run()` function, which starts the game.
*
* \param title The title for the game window.
*/
explicit Game(const std::string& title);
private:
const float dt = 0.02;
sf::Clock clock;
game_data_ref_t data;
void run();
void parseSettings();
};
}
|
import numpy as np
def convert_to_list(obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
else:
return obj |
#!/bin/bash
# -------
# Script for maintenance shutdown of Alfresco
#
# Copyright 2013 Loftux AB, Peter Löfgren
# Distributed under the Creative Commons Attribution-ShareAlike 3.0 Unported License (CC BY-SA 3.0)
# -------
USER=www-data
ALF_HOME_WWW=/opt/alfresco/www
DOWNTIME=10
#((!$#)) && echo Supply expected downtime in minutes as argument! && exit 1
die () {
echo >&2 "$@"
exit 1
}
if [ "$#" -gt 0 ]
then
echo $1 | grep -E -q '^[0-9]+$' || die "Numeric argument required, $1 provided"
DOWNTIME=$1
fi
echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
echo "Updating maintenance message script file"
echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
echo
echo "var downTime = ${DOWNTIME};" | sudo tee ${ALF_HOME_WWW}/downtime.js
echo "var startTime = `date +%s`;" | sudo tee -a ${ALF_HOME_WWW}/downtime.js
echo "var specialMessage = '$2';" | sudo tee -a ${ALF_HOME_WWW}/downtime.js
sudo chown -R ${USER}:nogroup ${ALF_HOME_WWW}
echo
echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
echo "Stopping the Alfresco tomcat instance"
echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
echo
sudo service alfresco stop |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jooby.reactor;
import com.google.inject.Binder;
import com.typesafe.config.Config;
import static java.util.Objects.requireNonNull;
import org.jooby.Deferred;
import org.jooby.Env;
import org.jooby.Jooby;
import org.jooby.Route;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.function.Function;
/**
* <h1>reactor</h1>
* <p>
* <a href="http://projectreactor.io">Reactor</a> is a second-generation Reactive library for
* building non-blocking applications on the JVM
* based on the <a href="http://www.reactive-streams.org">Reactive Streams Specification</a>
* </p>
*
* <h2>exports</h2>
* <ul>
* <li>map route operator that converts {@link Flux} and {@link Mono} into {@link Deferred} API.
* </li>
* </ul>
*
* <h2>usage</h2>
* <pre>{@code
*
* ...
* import org.jooby.reactor.Reactor;
* ...
*
* {
* use(new Reactor());
*
* get("/", req -> Flux.just("reactive programming in jooby!"));
* }
* }</pre>
*
* <h2>how it works?</h2>
* <p>
* Previous example is translated to:
* </p>
*
* <pre>{@code
* {
* use(new Reactor());
*
* get("/", req -> {
*
* return new Deferred(deferred -> {
* Flux.just("reactive programming in jooby!")
* .consume(deferred::resolve, deferred::reject);
* });
*
* });
* }
* }</pre>
*
* <p>
* Translation is done via {@link Reactor#reactor()} route mapper. If you are a
* <a href="http://projectreactor.io">reactor</a> programmer then you don't need to worry
* for learning a new API and semantic. The {@link Reactor#reactor()} route operator deal and take
* cares of the {@link Deferred} API.
* </p>
*
* <h2>reactor mapper</h2>
* <p>
* Advanced flux/mono configuration is allowed via function adapters:
* </p>
*
* <pre>{@code
*
* ...
* import org.jooby.reactor.Reactor;
* ...
*
* {
* use(new Reactor()
* .withFlux(f -> f.publishOn(Computations.concurrent())
* .withMono(m -> m.publishOn(Computations.concurrent()));
*
* get("/flux", req -> Flux...);
*
* get("/mono", req -> Mono...);
*
* }
* }</pre>
*
* <p>
* Here every Flux/Mono from a route handler will publish on the <code>concurrent</code> scheduler.
* </p>
*
* @author edgar
* @since 1.0.0.CR3
*/
@SuppressWarnings("rawtypes")
public class Reactor implements Jooby.Module {
private Function<Flux, Flux> flux = Function.identity();
private Function<Mono, Mono> mono = Function.identity();
public Reactor withFlux(final Function<Flux, Flux> adapter) {
this.flux = requireNonNull(adapter, "Flux's adapter is required.");
return this;
}
public Reactor withMono(final Function<Mono, Mono> adapter) {
this.mono = requireNonNull(adapter, "Mono's adapter is required.");
return this;
}
/**
* Map a reactor object like {@link Flux} or {@link Mono} into a {@link Deferred} object.
*
* <pre>{@code
* ...
* import org.jooby.reactor.Reactor;
* ...
*
* {
* with(() -> {
* get("/lux", req -> Flux...);
*
* get("/mono", req -> Mono...);
*
* }).map(Reactor.reactor(
* flux -> flux.publishOn(Computations.concurrent()),
* mono -> mono.publishOn(Computations.concurrent()));
* }
* }</pre>
*
* @param flux A flux adapter.
* @param mono A mono adapter.
* @return A new mapper.
*/
@SuppressWarnings("unchecked")
public static Route.Mapper<Object> reactor(final Function<Flux, Flux> flux,
final Function<Mono, Mono> mono) {
requireNonNull(flux, "Flux's adapter is required.");
requireNonNull(mono, "Mono's adapter is required.");
return Route.Mapper.create("reactor", value -> {
if (value instanceof Flux) {
return new Deferred(deferred -> flux.apply((Flux) value)
.consume(deferred::set, deferred::set));
}
if (value instanceof Mono) {
return new Deferred(deferred -> mono.apply((Mono) value)
.consume(deferred::set, deferred::set));
}
return value;
});
}
/**
* Map a reactor object like {@link Flux} or {@link Mono} into a {@link Deferred} object.
*
* <pre>{@code
* ...
* import org.jooby.reactor.Reactor;
* ...
*
* {
* with(() -> {
* get("/lux", req -> Flux...);
*
* get("/mono", req -> Mono...);
*
* }).map(Reactor.reactor(
* flux -> flux.publishOn(Computations.concurrent()),
* mono -> mono.publishOn(Computations.concurrent()));
* }
* }</pre>
*
* @return A new mapper.
*/
public static Route.Mapper<Object> reactor() {
return reactor(Function.identity(), Function.identity());
}
@Override
public void configure(final Env env, final Config conf, final Binder binder) {
env.router()
.map(reactor(flux, mono));
}
}
|
# -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path("../lib", __FILE__)
require "saml_idp/version"
Gem::Specification.new do |s|
s.name = %q{saml_idp}
s.version = SamlIdp::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["<NAME>", '<NAME>']
s.email = %q{<EMAIL>}
s.homepage = %q{http://github.com/18F/saml_idp}
s.summary = %q{SAML Indentity Provider in Ruby}
s.description = %q{SAML IdP (Identity Provider) library in ruby}
s.date = Time.now.utc.strftime("%Y-%m-%d")
s.files = Dir.glob("app/**/*") + Dir.glob("lib/**/*") + [
"LICENSE",
"README.md",
"Gemfile",
"saml_idp.gemspec"
]
s.license = "LICENSE"
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.rdoc_options = ["--charset=UTF-8"]
s.post_install_message = <<-INST
If you're just recently updating saml_idp - please be aware we've changed the default
certificate. See the PR and a description of why we've done this here:
https://github.com/sportngin/saml_idp/pull/29
If you just need to see the certificate `bundle open saml_idp` and go to
`lib/saml_idp/default.rb`
Similarly, please see the README about certificates - you should avoid using the
defaults in a Production environment. Post any issues you to github.
** New in Version 0.3.0 **
Encrypted Assertions require the xmlenc gem. See the example in the Controller
section of the README.
INST
s.add_dependency('activesupport')
s.add_dependency('uuid')
s.add_dependency('builder')
s.add_dependency('nokogiri', '>= 1.6.2')
s.add_development_dependency "rake"
s.add_development_dependency "simplecov"
s.add_development_dependency "rspec"
s.add_development_dependency "ruby-saml", "~> 1.4.1"
s.add_development_dependency("rails", "~> 4.2")
s.add_development_dependency("capybara")
s.add_development_dependency("timecop")
s.add_development_dependency("xmlenc", ">= 0.6.4")
end
|
<gh_stars>10-100
/**
* Export classes for mantle data filter.
*/
package io.opensphere.mantle.data.filter;
|
<reponame>tikalk/egroll
/**
* Main application routes
*/
'use strict';
var errors = require('./components/errors');
var passport = require('passport');
var jwt = require('jwt-simple');
var fs = require('fs');
module.exports = function(app) {
// var login = require('./api/login');
// login.init(app);
var excludedEndpoints = ['login'];
// Insert API endpoints Automaticaly below
fs.readdirSync('./src/server/api/')
.filter(function (endpoint) {
var isNotExcluded = excludedEndpoints.every(function (excluded) {
return endpoint !== excluded;
});
// endpoint doesn't have a .
return endpoint.indexOf('.') === -1 && isNotExcluded;
})
.forEach(function(endpoint){
app.use('/api/' + endpoint, require('./api/' + endpoint));
});
// All undefined asset or api routes should return a 404
app.route('/:url(api|auth|components|app|bower_components|assets)/*')
.get(errors[404]);
// All other routes should redirect to the index.html
app.route('/*')
.get(function(req, res) {
res.sendfile(app.get('appPath') + '/index.html');
});
// Simple route middleware to ensure user is authenticated.
// Use this route middleware on any resource that needs to be protected. If
// the request is authenticated (typically via a persistent login session),
// the request will proceed. Otherwise, the user will be redirected to the
// login page.
function ensureAuthenticated(req, res, next) {
passport.authenticate('bearer', { session: false }, function(err, user, info) {
if (err) { return next(err) }
if (!user) {
return res.json(401, { error: 'Unknown token' });
}
return next();
})(req, res, next);
}
};
|
<filename>src/components/DocumentField.js
import React from 'react';
import PropTypes from 'prop-types';
import ValidatorService from '../services/validator';
import TextField from './TextField';
const validateInput = value => ValidatorService.validateDocument(value);
const handleChange = ({target}, value, onChange) => {
if (value === target.value) {
return;
}
onChange({target});
};
const DocumentField = ({
control, label, value, disabled, onChange
}) => (
<TextField
required
minLength={7}
maxLength={8}
{...{
disabled, control, label, value
}}
onChange={e => handleChange(e, value, onChange)}
validateInput={validateInput(value)}
/>
);
DocumentField.propTypes = {
control: PropTypes.string.isRequired,
label: PropTypes.string.isRequired,
value: PropTypes.string.isRequired,
onChange: PropTypes.func.isRequired,
disabled: PropTypes.bool
};
DocumentField.defaultProps = {
disabled: false
};
export default DocumentField;
|
class Account {
constructor(id, username, password) {
this.id = id;
this.username = username;
this.password = password;
}
getId() {
return this.id;
}
getUsername() {
return this.username;
}
getPassword() {
return this.password;
}
} |
//added links to posters
movies = [{'name':'Inception', 'year':2010, 'director': '<NAME>',
'desc':'Inception Description', 'on_watchlist':0, 'genre':'Sci-Fi',
'poster':'../assets/posters/inception.jpg'},
{'name':"Avengers: Endgame", 'year':2019, 'director':'<NAME> and <NAME>',
'desc':'Endgame description', 'on_watchlist':0, 'genre':'Action',
'poster':'../assets/posters/endgame.jpg'},
{'name':'Joker', 'year':2019, 'director':'<NAME>',
'desc':'Joker description', 'on_watchlist':0, 'genre':'Thriller',
'poster':'../assets/posters/joker.jpg'},
{'name':'Split', 'year':2016, 'director':'<NAME>',
'desc':'Split description', 'on_watchlist':0, 'genre':'Thriller',
'poster':'../assets/posters/split.jpg'},
{'name':'<NAME>', 'year':1993, 'director':'<NAME>',
'desc':'Jurassic Park description', 'on_watchlist':0, 'genre':'Sci-Fi',
'poster':'../assets/posters/jurassic.jpg'}]
shows = [{'name':'Black Mirror', 'Available to stream at:':'', 'episodes':22,
'desc':'Black Mirror description', 'on_watchlist':0, 'genre':'Sci-Fi', 'seasons':5,
'poster':'../assets/posters/black_mirror.jpg'},
{'name':'The Good Place', 'Available to stream at:':'', 'episodes':53,
'desc':'The Good Place description', 'on_watchlist':0, 'genre':'Comedy', 'seasons':4,
'poster':'../assets/posters/good_place.jpg'},
{'name':'Breaking Bad', 'Available to stream at:':'', 'episodes':62,
'desc':'Breaking Bad description', 'on_watchlist':0, 'genre':'Drama', 'seasons':5,
'poster':'../assets/posters/breaking_bad.jpg'},
{'name':'Sherlock', 'Available to stream at:':'', 'episodes':13,
'desc':'Sherlock description', 'on_watchlist':0, 'genre':'Mystery', 'seasons':4,
'poster':'../assets/posters/sherlock.jpg'},
{'name':'<NAME>', 'Available to stream at:':'', 'episodes':25,
'desc':'Stranger Things description', 'on_watchlist':0, 'genre':'Horror', 'seasons':3,
'poster':'../assets/posters/stranger_things.jpg'}]
function display() {
//stop hiding results
var results = document.getElementById('results');
if (results.style.display == "none") {
results.style.display = "block";
}
document.getElementById('form').innerHTML = "";
document.getElementById('title').innerHTML = "";
document.getElementById('filters').innerHTML = "";
document.getElementById('spaces').innerHTML = "";
}
function search() {
var resultsdiv = document.getElementById('results');
var input = document.getElementById('search').value;
//look through movies db
for (var i = 0; i < movies.length; i++) {
if (movies[i].name == input) {
if (movies[i].name == "Avengers: Endgame"){
string = "<h1>Showing results for: Avengers: Endgame </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Avengers_Endgame.html\"><img src=" + movies[i].poster + "></a>";
}
else if (movies[i].name == "Inception"){
string = "<h1>Showing results for: Inception </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Inception.html\"><img src=" + movies[i].poster + "></a>";
}
else if (movies[i].name == "Joker"){
string = "<h1>Showing results for: Joker </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Joker.html\"><img src=" + movies[i].poster + "></a>";
}
else if (movies[i].name == "Jurassic Park"){
string = "<h1>Showing results for: Jurassic Park </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Jurassic Park.html\"><img src=" + movies[i].poster + "></a>";
}
else if (movies[i].name == "Split"){
string = "<h1>Showing results for: Split </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Split.html\"><img src=" + movies[i].poster + "></a>";
}
else {
string = "Sorry! No results were found."
}
resultsdiv.innerHTML = string;
}
}
//look through shows db
for (var i = 0; i < shows.length; i++) {
if (shows[i].name == input) {
if (shows[i].name == "Black Mirror"){
string = "<h1>Showing results for: Black Mirror </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Black Mirror.html\"><img src=" + shows[i].poster + "></a>";
}
else if (shows[i].name == "Breaking Bad"){
string = "<h1>Showing results for: Breaking Bad </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Breaking Bad.html\"><img src=" + shows[i].poster + "></a>";
}
else if (shows[i].name == "Sherlock"){
string = "<h1>Showing results for: Sherlock </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Sherlock.html\"><img src=" + shows[i].poster + "></a>";
}
else if (shows[i].name == "Stranger Things"){
string = "<h1>Showing results for: Stranger Things </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/Stranger Things.html\"><img src=" + shows[i].poster + "></a>";
}
else if (shows[i].name == "The Good Place"){
string = "<h1>Showing results for: The Good Place </h1> <br> <a href=\"\" class=\"link\">Back to Previous Page</a> <br> <a href=\"details/The Good Place.html\"><img src=" + shows[i].poster + "></a>";
}
else {
string = "Sorry! No results were found."
}
resultsdiv.innerHTML = string
}
}
display();
}
function submit() {
document.getElementById('submit').addEventListener('click', search, false);
}
window.addEventListener('load', submit, false);
|
module.exports = ({
name: "ban",
usage: "ban <user> (reason)",
description: "Bans the specified user",
category: "Moderation",
code: `$title[Banned]
$description[Successfully Banned $userTag[$get[user]]]
$addField[Moderator:;$userTag;no]
$addField[Reason:;$get[reason];no]
$color[RANDOM]
$addTimestamp
$thumbnail[$userAvatar[$get[user]]]
$ban[$get[user];$userTag: $get[reason];7]
$let[reason;$replaceText[$replaceText[$checkCondition[$messageSlice[1]==];true;No reason was provided.];false;$messageSlice[1]]]
$onlyIf[$rolePosition[$highestRole[$get[user]]]>$rolePosition[$highestRole[$clientID]];**⛔ I can't ban someone higher/equal to my role postion!!**]
$onlyBotPerms[ban;**⛔ I need \`BAN_MEMBERS\` perms to ban a user!!**]
$onlyIf[$rolePosition[$highestRole[$get[user]]]>$rolePosition[$highestRole[$authorID]];**⛔ You can't ban someone higher/equal than you on role position!!**]
$onlyIf[$isBanned[$get[user]]==false;**⛔ That user is already banned!!**]
$onlyIf[$memberExists[$get[user]]==true;]
$onlyIf[$get[user]!=$ownerID;**⛔ You can't ban the owner of the server!!**]
$onlyIf[$get[user]!=$authorID;**⛔ It is not good to harm yourself 😢!!**]
$onlyIf[$get[user]!=undefined;**⛔ Please specify a valid user!!**]
$onlyIf[$message[1]!=;**⛔ Please specify a user you want to ban!!**]
$onlyPerms[ban;**⛔ You must have \`BAN_MEMBERS\` perms to perform this action!!**]
$let[user;$findUser[$message[1];no]]
$suppressErrors[Error!]`
})
|
<reponame>Kang-Gumsil/capstone_2
package dankook.kanghyeyoung.capstone_2;
import android.app.AlertDialog;
import android.app.DatePickerDialog;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.NumberPicker;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.DialogFragment;
public class YearMonthPickerDialog extends Dialog {
private DatePickerDialog.OnDateSetListener listener;
private NumberPicker mYearPicker;
private NumberPicker mMonthPicker;
private int mCurYear;
private int mSelectedYear;
private int mSelectedMonth;
public YearMonthPickerDialog(@NonNull Context context, int curYear, int year, int month) {
super(context);
mCurYear=curYear;
mSelectedYear=year;
mSelectedMonth=month;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
/* layout 적용 */
setContentView(R.layout.dialog_year_month_picker);
/* view 참조 */
mYearPicker=findViewById(R.id.numberPicker_year);
mMonthPicker=findViewById(R.id.numberPicker_month);
Button button_ok=findViewById(R.id.button_ok);
Button button_cancel=findViewById(R.id.button_cancel);
/* 버튼에 clickListener 설정 */
button_ok.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
listener.onDateSet( // 날짜 설정
null, mYearPicker.getValue(), mMonthPicker.getValue(), 0);
YearMonthPickerDialog.this.dismiss(); // 다이얼로그 종료
}
});
button_cancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
YearMonthPickerDialog.this.dismiss(); // 다이얼로그 종료
}
});
// 2000년부터 내년까지 설정 가능
mYearPicker.setMinValue(2000);
mYearPicker.setMaxValue(mCurYear+1);
mYearPicker.setValue(mSelectedYear);
// 1월 ~ 12월 설정 가능
mMonthPicker.setMinValue(1);
mMonthPicker.setMaxValue(12);
mMonthPicker.setValue(mSelectedMonth);
}
/* dialog에 listener 설정 */
public void setDateSetListener(DatePickerDialog.OnDateSetListener listener) {
this.listener=listener;
}
}
|
#!/bin/bash
function usage
{
echo "usage: start-performance-tests.sh"
}
while [ "$1" != "" ]; do
echo "PARAM is: $1"
case $1 in
-h | --help ) usage
exit
;;
* ) usage
exit 1
esac
shift
done
data/src/tests/performance-test/run-performance-tests.sh -j /usr/local/jetty -p /usr/bin -r /tmp -s /opt/joynr/performance-test/ -t CPP_SYNC -y /usr/bin -c 2 -x 1 -m OFF
|
import { Meteor } from 'meteor/meteor';
import { Tracker } from 'meteor/tracker';
import { settings } from '../../settings';
import { TabBar } from '../../ui-utils';
Meteor.startup(function() {
return Tracker.autorun(function() {
if (settings.get('Message_AllowPinning')) {
TabBar.addButton({
groups: ['channel', 'group', 'direct'],
id: 'pinned-messages',
i18nTitle: 'Pinned_Messages',
icon: 'pin',
template: 'pinnedMessages',
order: 10,
});
} else {
TabBar.removeButton('pinned-messages');
}
});
});
|
# /usr/bin/env python
# -*- coding:utf8 -*-
from .base import BaseModule
class Logistic(BaseModule):
"""
shopee Logistic api
"""
def get_logistics(self):
"""
Use this call to get all supported Logistic Channel
:return:
"""
return self.client.execute("logistics/channel/get", "POST")
def get_address(self):
"""
Use this call to get all required param for init logistic.
:return:
"""
return self.client.execute("logistics/address/get", "POST")
def get_airway_bill(self, **kwargs):
"""
Use this API to get airway bill for orders
:param kwargs:
:return:
"""
return self.client.execute("logistics/airway_bill/get_mass", "POST", kwargs)
def get_branch(self, **kwargs):
"""
Use this call to get all required param for init logistic.
:param kwargs:
:return:
"""
return self.client.execute("logistics/branch/get", "POST", kwargs)
def get_logistic_message(self, **kwargs):
"""
Use this call to get the logistics tracking information of an order.
:param kwargs:
:return:
"""
return self.client.execute("logistics/tracking", "POST", kwargs)
def get_order_logistic(self, **kwargs):
"""
Use this call to fetch the logistics information of an order, these info can be used for waybill printing.
:param kwargs:
:return:
"""
return self.client.execute("logistics/order/get", "POST", kwargs)
def get_parameter_for_init(self, **kwargs):
"""
Use this call to get all required param for init logistic.
:param kwargs:
:return:
"""
return self.client.execute("logistics/init_parameter/get", "POST", kwargs)
def get_time_slot(self, **kwargs):
"""
Use this call to get all required param for init logistic.
:param kwargs:
:return:
"""
return self.client.execute("logistics/timeslot/get", "POST", kwargs)
def get_tracking_no(self, **kwargs):
"""
Use this API to get tracking number of orders
:param kwargs:
:return:
"""
return self.client.execute("logistics/tracking_number/get_mass", "POST", kwargs)
def init(self, **kwargs):
"""
Use this call to arrange Pickup or Dropoff. Should call shopee.logistics.GetParameterForInit to fetch all required param first.
pickup = {}
dropoff = {}
non_integrated = {}
:param kwargs:
:return:
"""
return self.client.execute("logistics/init", "POST", kwargs)
def set_logistic_status(self, **kwargs):
"""
Set Logistic Status to PICKUP_DONE, this API only works for non-integrated logistic channels
:param kwargs:
:return:
"""
return self.client.execute("logistics/init", "POST", kwargs)
def set_tracking_no(self, **kwargs):
"""
User this call to set tracking number for each order in batch.
One order can only have one tracking number.
This API can only be used on orders with the logisitcs channels that need sellers to provide tracking no to Shopee,
instead that tracking no is generated from Shopee.
:param kwargs:
:return:
"""
return self.client.execute("logistics/tracking_number/set_mass", "POST", kwargs)
|
def search_list(list, value):
i = 0
while(i < len(list)):
if list[i] == value:
return i
i = i + 1
return -1
list = [2, 3, 5, 9]
value = 5
result = search_list(list, value)
if result != -1:
print("Element is present at index", result)
else:
print("Element is not present in list") |
<gh_stars>1-10
/***************************************************************************
*
* Project _____ __ ____ _ _
* ( _ ) /__\ (_ _)_| |_ _| |_
* )(_)( /(__)\ )( (_ _)(_ _)
* (_____)(__)(__)(__) |_| |_|
*
*
* Copyright 2018-present, <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
#include "FileStream.hpp"
namespace oatpp { namespace data{ namespace stream {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// FileInputStream
oatpp::data::stream::DefaultInitializedContext FileInputStream::DEFAULT_CONTEXT(data::stream::StreamType::STREAM_FINITE);
FileInputStream::FileInputStream(FileInputStream&& other)
: m_file(other.m_file)
, m_ownsFile(other.m_ownsFile)
, m_ioMode(other.m_ioMode)
{
other.m_file = nullptr;
other.m_ownsFile = false;
}
FileInputStream::FileInputStream(std::FILE* file, bool ownsFile, const std::shared_ptr<void>& captureData)
: m_file(file)
, m_ownsFile(ownsFile)
, m_ioMode(IOMode::ASYNCHRONOUS)
, m_capturedData(captureData)
{}
FileInputStream::FileInputStream(const char* filename, const std::shared_ptr<void>& captureData)
: FileInputStream(std::fopen(filename, "rb"), true, captureData)
{
if(!m_file) {
OATPP_LOGE("[oatpp::data::stream::FileInputStream::FileInputStream(filename)]", "Error. Can't open file '%s'.", filename);
throw std::runtime_error("[oatpp::data::stream::FileInputStream::FileInputStream(filename)]: Error. Can't open file.");
}
}
FileInputStream::~FileInputStream() {
this->close();
}
std::FILE* FileInputStream::getFile() {
return m_file;
}
v_io_size FileInputStream::read(void *data, v_buff_size count, async::Action& action) {
(void) action;
if(m_file != nullptr) {
return std::fread(data, 1, count, m_file);
}
return oatpp::IOError::BROKEN_PIPE;
}
void FileInputStream::setInputStreamIOMode(IOMode ioMode) {
m_ioMode = ioMode;
}
IOMode FileInputStream::getInputStreamIOMode() {
return m_ioMode;
}
Context& FileInputStream::getInputStreamContext() {
return DEFAULT_CONTEXT;
}
void FileInputStream::close() {
if(m_ownsFile && m_file) {
std::fclose(m_file);
}
}
FileInputStream& FileInputStream::operator=(FileInputStream&& other) {
if(this != &other) {
close();
}
m_file = other.m_file;
m_ownsFile = other.m_ownsFile;
m_ioMode = other.m_ioMode;
other.m_file = nullptr;
other.m_ownsFile = false;
return *this;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// FileOutputStream
oatpp::data::stream::DefaultInitializedContext FileOutputStream::DEFAULT_CONTEXT(data::stream::StreamType::STREAM_FINITE);
FileOutputStream::FileOutputStream(FileOutputStream&& other)
: m_file(other.m_file)
, m_ownsFile(other.m_ownsFile)
, m_ioMode(other.m_ioMode)
{
other.m_file = nullptr;
other.m_ownsFile = false;
}
FileOutputStream::FileOutputStream(std::FILE* file, bool ownsFile, const std::shared_ptr<void>& captureData)
: m_file(file)
, m_ownsFile(ownsFile)
, m_ioMode(IOMode::ASYNCHRONOUS)
, m_capturedData(captureData)
{}
FileOutputStream::FileOutputStream(const char* filename, const char* mode, const std::shared_ptr<void>& captureData)
: FileOutputStream(std::fopen(filename, mode), true, captureData)
{
if(!m_file) {
OATPP_LOGE("[oatpp::data::stream::FileOutputStream::FileOutputStream(filename, mode)]", "Error. Can't open file '%s'.", filename);
throw std::runtime_error("[oatpp::data::stream::FileOutputStream::FileOutputStream(filename, mode)]: Error. Can't open file.");
}
}
FileOutputStream::~FileOutputStream() {
this->close();
}
std::FILE* FileOutputStream::getFile() {
return m_file;
}
v_io_size FileOutputStream::write(const void *data, v_buff_size count, async::Action& action) {
(void) action;
return std::fwrite(data, 1, count, m_file);
}
void FileOutputStream::setOutputStreamIOMode(IOMode ioMode) {
m_ioMode = ioMode;
}
IOMode FileOutputStream::getOutputStreamIOMode() {
return m_ioMode;
}
Context& FileOutputStream::getOutputStreamContext() {
return DEFAULT_CONTEXT;
}
void FileOutputStream::close() {
if(m_ownsFile && m_file) {
std::fclose(m_file);
}
}
FileOutputStream& FileOutputStream::operator=(FileOutputStream&& other) {
if(this != &other) {
close();
}
m_file = other.m_file;
m_ownsFile = other.m_ownsFile;
m_ioMode = other.m_ioMode;
other.m_file = nullptr;
other.m_ownsFile = false;
return *this;
}
}}}
|
/*
* Copyright (c) 2015, EURECOM (www.eurecom.fr)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project.
*/
#include "lfds611_abstraction_internal_body.h"
/****************************************************************************/
#if (defined _WIN64 && defined _MSC_VER)
/* TRD : 64 bit Windows (user-mode or kernel) on any CPU with the Microsoft C compiler
_WIN64 indicates 64 bit Windows
_MSC_VER indicates Microsoft C compiler
*/
static LFDS611_INLINE unsigned char
lfds611_abstraction_dcas (
volatile lfds611_atom_t * destination,
lfds611_atom_t * exchange,
lfds611_atom_t * compare)
{
unsigned char
cas_result;
assert (destination != NULL);
assert (exchange != NULL);
assert (compare != NULL);
LFDS611_BARRIER_COMPILER_FULL;
cas_result = _InterlockedCompareExchange128 ((volatile __int64 *)destination, (__int64) * (exchange + 1), (__int64) * exchange, (__int64 *) compare);
LFDS611_BARRIER_COMPILER_FULL;
return (cas_result);
}
#endif
/****************************************************************************/
#if (!defined _WIN64 && defined _WIN32 && defined _MSC_VER)
/* TRD : 32 bit Windows (user-mode or kernel) on any CPU with the Microsoft C compiler
(!defined _WIN64 && defined _WIN32) indicates 32 bit Windows
_MSC_VER indicates Microsoft C compiler
*/
static LFDS611_INLINE unsigned char
lfds611_abstraction_dcas (
volatile lfds611_atom_t * destination,
lfds611_atom_t * exchange,
lfds611_atom_t * compare)
{
__int64 original_compare;
assert (destination != NULL);
assert (exchange != NULL);
assert (compare != NULL);
*(__int64 *) & original_compare = *(__int64 *) compare;
LFDS611_BARRIER_COMPILER_FULL;
*(__int64 *) compare = _InterlockedCompareExchange64 ((volatile __int64 *)destination, *(__int64 *) exchange, *(__int64 *) compare);
LFDS611_BARRIER_COMPILER_FULL;
return ((unsigned char)(*(__int64 *) compare == *(__int64 *) & original_compare));
}
#endif
/****************************************************************************/
#if (defined __x86_64__ && defined __GNUC__)
/* TRD : any OS on x64 with GCC
__x86_64__ indicates x64
__GNUC__ indicates GCC
*/
static LFDS611_INLINE unsigned char
lfds611_abstraction_dcas (
volatile lfds611_atom_t * destination,
lfds611_atom_t * exchange,
lfds611_atom_t * compare)
{
unsigned char
cas_result;
assert (destination != NULL);
assert (exchange != NULL);
assert (compare != NULL);
// TRD : __asm__ with "memory" in the clobber list is for GCC a full compiler barrier
__asm__ __volatile__ (
"lock;" // make cmpxchg16b atomic
"cmpxchg16b %0;" // cmpxchg16b sets ZF on success
"setz %3;" // if ZF set, set cas_result to 1
// output
:"+m" (*(volatile lfds611_atom_t (*)[2])destination),
"+a" (*compare),
"+d" (*(compare + 1)),
"=q" (cas_result)
// input
:"b" (*exchange),
"c" (*(exchange + 1))
// clobbered
:"cc",
"memory");
return (cas_result);
}
#endif
/****************************************************************************/
#if ((defined __i686__ || defined __arm__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 1 && __GNUC_PATCHLEVEL__ >= 0)
/* TRD : any OS on x86 or ARM with GCC 4.1.0 or better
GCC 4.1.0 introduced the __sync_*() atomic intrinsics
__GNUC__ / __GNUC_MINOR__ / __GNUC_PATCHLEVEL__ indicates GCC and which version
*/
static LFDS611_INLINE unsigned char
lfds611_abstraction_dcas (
volatile lfds611_atom_t * destination,
lfds611_atom_t * exchange,
lfds611_atom_t * compare)
{
unsigned char
cas_result = 0;
unsigned long long int
original_destination;
assert (destination != NULL);
assert (exchange != NULL);
assert (compare != NULL);
LFDS611_BARRIER_COMPILER_FULL;
original_destination = __sync_val_compare_and_swap ((volatile unsigned long long int *)destination, *(unsigned long long int *)compare, *(unsigned long long int *)exchange);
LFDS611_BARRIER_COMPILER_FULL;
if (original_destination == *(unsigned long long int *)compare)
cas_result = 1;
*(unsigned long long int *)compare = original_destination;
return (cas_result);
}
#endif
|
#!/bin/bash
echo "" > /root/infouser.txt
echo "" > /root/expireduser.txt
echo "" > /root/alluser.txt
cat /etc/shadow | cut -d: -f1,8 | sed /:$/d > /tmp/expirelist.txt
totalaccounts=`cat /tmp/expirelist.txt | wc -l`
for((i=1; i<=$totalaccounts; i++ ))
do
tuserval=`head -n $i /tmp/expirelist.txt | tail -n 1`
username=`echo $tuserval | cut -f1 -d:`
userexp=`echo $tuserval | cut -f2 -d:`
userexpireinseconds=$(( $userexp * 86400 ))
tglexp=`date -d @$userexpireinseconds`
tgl=`echo $tglexp |awk -F" " '{print $3}'`
while [ ${#tgl} -lt 2 ]
do
tgl="0"$tgl
done
while [ ${#username} -lt 15 ]
do
username=$username" "
done
bulantahun=`echo $tglexp |awk -F" " '{print $2,$6}'`
echo " User : $username Expire tanggal : $tgl $bulantahun" >> /root/alluser.txt
todaystime=`date +%s`
if [ $userexpireinseconds -ge $todaystime ] ;
then
timeto7days=$(( $todaystime + 604800 ))
if [ $userexpireinseconds -le $timeto7days ];
then
echo " User : $username Expire tanggal : $tgl $bulantahun" >> /root/infouser.txt
fi
else
echo " User : $username Expire tanggal : $tgl $bulantahun" >> /root/expireduser.txt
passwd -l $username
fi
done
|
#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
sudo apt-get update
sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce
curl -LO https://storage.googleapis.com/container-diff/latest/container-diff-linux-amd64 && chmod +x container-diff-linux-amd64 && sudo mv container-diff-linux-amd64 /usr/local/bin/container-diff
mkdir -p $HOME/.docker/
echo '{}' > $HOME/.docker/config.json
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.