text stringlengths 1 1.05M |
|---|
<filename>src/database/offline-write.ts
import { WriteCache } from './interfaces';
import { LocalUpdateService } from './local-update-service';
export function OfflineWrite(
firebasePromise,
type: string,
ref: string,
method: string,
args: any[],
localUpdateService: LocalUpdateService) {
localUpdateService.update('write', (writeCache: WriteCache) => {
if (!writeCache) {
writeCache = {
lastId: 0,
cache: {}
};
}
writeCache.lastId++;
writeCache.cache[writeCache.lastId] = {type: type, ref: ref, method: method, args: args};
return writeCache;
}).then((writeCache: WriteCache) => {
const id = writeCache.lastId;
firebasePromise.then(() => {
WriteComplete(id, localUpdateService);
});
});
}
export function WriteComplete(id, localUpdateService: LocalUpdateService) {
return localUpdateService.update('write', (writeCache: WriteCache) => {
delete writeCache.cache[id];
return writeCache;
});
}
|
<gh_stars>0
package model.drivers;
import javafx.application.Platform;
import model.display.CarDisplayManager;
import model.road.Waypoint;
import model.vechicle.Car;
import model.vechicle.Vehicle;
import utils.SetNodeOrientation;
public class CarDriver extends Driver {
private CarDisplayManager carDisplayManager;
private Driver.VehiclesSynchronization vehiclesSynchronization;
public CarDriver(Vehicle vehicle,CarDisplayManager carDisplayManager)
{
this.vehicle=vehicle;
this.carDisplayManager = carDisplayManager;
vehiclesSynchronization = this.new VehiclesSynchronization();
}
public void drive()
{
int waypointIndex = 0;
for(Waypoint waypoint: vehicle.getRoute())
{
Platform.runLater(() ->
{
vehicle.getImageView().setLayoutX(waypoint.getMapX());
vehicle.getImageView().setLayoutY(waypoint.getMapY());
vehicle.getImageView().setRotate(waypoint.getRequiredVehicleRotation());
});
changeNodeOrientation(waypoint);
carDisplayManager.ensureCarImageIsInCorrectAnchorPane((Car)vehicle,waypoint);
vehiclesSynchronization.setVehicleDrivesThroughWaypoint(waypointIndex,vehicle.getRoute());
if(waypointIndex+30<vehicle.getRoute().size())
{
vehiclesSynchronization.checkIfThereIsAnotherVehicleInFrontOf(waypointIndex + 30, vehicle.getRoute());
}
super.simulateSpeed();
vehiclesSynchronization.setVehicleDroveThroughWaypoint(waypointIndex,vehicle.getRoute());
waypointIndex++;
}
}
private void changeNodeOrientation(Waypoint waypoint)
{
if(waypoint.getVehicleShouldRotate())
{
SetNodeOrientation.setNodeOrientation(true,vehicle.getImageView());
}
else
{
SetNodeOrientation.setNodeOrientation(false,vehicle.getImageView());
}
}
}
|
#include <stdio.h>
int h[1<<10], n, s;
void up(int k){
if(k==1) return;
if(h[k]>h[k/2]){
int t=h[k/2];
h[k/2]=h[k];
h[k]=t;
up(k/2);
}
}
void push(int k){
h[++s]=k;
up(s);
}
int main(){
int a;
scanf("%d", &n);
for(int i=0;i<n;i++){
scanf("%d", &a);
push(a);
}
for(int k=1;k<=n;k*=2){
for(int j=k;j<=n&&j<k*2;j++){
printf("%d\t", h[j]);
}
printf("\n");
}
return 0;
}
|
import styled, { keyframes } from "styled-components"
import { motion } from "framer-motion"
export const HeaderNav = styled(motion.div)`
height: 0px;
width: 100%;
position: absolute;
top: 72px;
right: 0;
left: 0;
z-index: 99;
`
export const Logo = styled.div`
a {
font-size: 1.8rem;
text-decoration: none;
font-weight: 700;
color: #000;
color: ${props => props.theme.text};
}
`
const wiggle = keyframes`
0% {transform: rotate(0deg); }
25% {transform: rotate(10deg); }
50% {transform: rotate(-10deg); }
100% {transform: rotate(0deg); }
`
export const LightSwitch = styled.p`
font-size: 1.8rem;
transition: 0.2s ease;
:hover {
font-size: 2.8rem;
animation: ${wiggle} 1s ease infinite;
}
`
export const Menu = styled.div`
button {
transform-origin: center;
border: none;
padding: 20px;
background: none;
outline: none;
span {
width: 36px;
height: 3px;
display: block;
background: ${props => props.theme.text};
margin: 8px;
}
}
`
|
<filename>achilles-core/src/test/java/info/archinnov/achilles/test/parser/entity/ChildBean.java<gh_stars>1-10
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.test.parser.entity;
import info.archinnov.achilles.annotations.Column;
import info.archinnov.achilles.annotations.Entity;
@Entity(table = "ChildBean")
public class ChildBean extends ParentBean {
@Column
private String nickname;
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
}
|
#! /bin/bash
shapeworks readimage --name $DATA/image_with_holes.nrrd closeholes compareimage --name $DATA/closedholes.nrrd
|
#!/bin/bash
source env.sh
echo "<passphrase>" | java $JLIB -cp $JARS:.:./java \
bitshift.registrar.WebServer $REGISTRARPORT $SCA $TOK \
> log.out 2>&1 &
|
<filename>src/algebra/vector_sparse_fprint_only_values.c
#include "double.h"
#include "int.h"
#include "vector_sparse_private.h"
void vector_sparse_fprint_only_values(FILE * out, const vector_sparse * a)
{
double_array_fprint(out, a->nonzero_max, a->values, "--raw");
}
|
#!/usr/bin/env bash
if [ ! -f /usr/sbin/testimonyd ]; then
sudo cp -v go/testimonyd/testimonyd /usr/sbin/testimonyd
fi
if [ ! -f /etc/testimony.conf ]; then
sudo cp -v configs/testimony.conf /etc/testimony.conf
fi
if [ ! -f /etc/systemd/system/testimony.service ]; then
sudo cp -v configs/systemd.conf /etc/systemd/system/testimony.service
sudo chmod 0644 /etc/systemd/system/testimony.service
fi
sudo service testimony start |
package controllers;
import models.User;
import com.fasterxml.jackson.databind.JsonNode;
public class TestController extends AppController {
public Result echo(JsonNode params) {
return Ok(params);
}
public play.mvc.Result verifyEmail() {
String link = webServer + "dashboard/verify-email/" +
"96278552-8abc-41be-a9b8-bfa52c5f13c2";
return ok(views.html.Email.verify_email.render(link));
}
public Result pushNotification(JsonNode params) {
User me = getMe(params);
me.pushNotification();
return Ok();
}
}
|
<reponame>syberflea/materials
import os
import random
import spacy
from spacy.util import minibatch, compounding
import pandas as pd
TEST_REVIEW = """
Transcendently beautiful in moments outside the office, it seems almost
sitcom-like in those scenes. When <NAME> walks out and ponders
life silently, it's gorgeous.<br /><br />The movie doesn't seem to decide
whether it's slapstick, farce, magical realism, or drama, but the best of it
doesn't matter. (The worst is sort of tedious - like Office Space with less
humor.)
"""
eval_list = []
def train_model(
training_data: list, test_data: list, iterations: int = 20
) -> None:
# Build pipeline
nlp = spacy.load("en_core_web_sm")
if "textcat" not in nlp.pipe_names:
textcat = nlp.create_pipe(
"textcat", config={"architecture": "simple_cnn"}
)
nlp.add_pipe(textcat, last=True)
else:
textcat = nlp.get_pipe("textcat")
textcat.add_label("pos")
textcat.add_label("neg")
# Train only textcat
training_excluded_pipes = [
pipe for pipe in nlp.pipe_names if pipe != "textcat"
]
with nlp.disable_pipes(training_excluded_pipes):
optimizer = nlp.begin_training()
# Training loop
print("Beginning training")
print("Loss\tPrecision\tRecall\tF-score")
batch_sizes = compounding(
4.0, 32.0, 1.001
) # A generator that yields infinite series of input numbers
for i in range(iterations):
print(f"Training iteration {i}")
loss = {}
random.shuffle(training_data)
batches = minibatch(training_data, size=batch_sizes)
for batch in batches:
text, labels = zip(*batch)
nlp.update(text, labels, drop=0.2, sgd=optimizer, losses=loss)
with textcat.model.use_params(optimizer.averages):
evaluation_results = evaluate_model(
tokenizer=nlp.tokenizer,
textcat=textcat,
test_data=test_data,
)
print(
f"{loss['textcat']}\t{evaluation_results['precision']}"
f"\t{evaluation_results['recall']}"
f"\t{evaluation_results['f-score']}"
)
# Save model
with nlp.use_params(optimizer.averages):
nlp.to_disk("model_artifacts")
def evaluate_model(tokenizer, textcat, test_data: list) -> dict:
reviews, labels = zip(*test_data)
reviews = (tokenizer(review) for review in reviews)
true_positives = 0
false_positives = 1e-8 # Can't be 0 because of presence in denominator
true_negatives = 0
false_negatives = 1e-8
for i, review in enumerate(textcat.pipe(reviews)):
true_label = labels[i]["cats"]
for predicted_label, score in review.cats.items():
# Every cats dictionary includes both labels, you can get all
# the info you need with just the pos label
if predicted_label == "neg":
continue
if score >= 0.5 and true_label["pos"]:
true_positives += 1
elif score >= 0.5 and true_label["neg"]:
false_positives += 1
elif score < 0.5 and true_label["neg"]:
true_negatives += 1
elif score < 0.5 and true_label["pos"]:
false_negatives += 1
precision = true_positives / (true_positives + false_positives)
recall = true_positives / (true_positives + false_negatives)
if precision + recall == 0:
f_score = 0
else:
f_score = 2 * (precision * recall) / (precision + recall)
return {"precision": precision, "recall": recall, "f-score": f_score}
def test_model(input_data: str = TEST_REVIEW):
# Load saved trained model
loaded_model = spacy.load("model_artifacts")
# Generate prediction
parsed_text = loaded_model(input_data)
# Determine prediction to return
if parsed_text.cats["pos"] > parsed_text.cats["neg"]:
prediction = "Positive"
score = parsed_text.cats["pos"]
else:
prediction = "Negative"
score = parsed_text.cats["neg"]
print(
f"Review text: {input_data}\nPredicted sentiment: {prediction}"
f"\tScore: {score}"
)
def load_training_data(
data_directory: str = "aclImdb/train", split: float = 0.8, limit: int = 0
) -> tuple:
# Load from files
reviews = []
for label in ["pos", "neg"]:
labeled_directory = f"{data_directory}/{label}"
for review in os.listdir(labeled_directory):
if review.endswith(".txt"):
with open(f"{labeled_directory}/{review}") as f:
text = f.read()
text = text.replace("<br />", "\n\n")
if text.strip():
spacy_label = {
"cats": {
"pos": "pos" == label,
"neg": "neg" == label,
}
}
reviews.append((text, spacy_label))
random.shuffle(reviews)
if limit:
reviews = reviews[:limit]
split = int(len(reviews) * split)
return reviews[:split], reviews[split:]
if __name__ == "__main__":
train, test = load_training_data(limit=25)
print("Training model")
train_model(train, test)
df = pd.DataFrame(eval_list)
pd.DataFrame.plot(df)
print("Testing model")
test_model()
|
package ru.job4j.chess;
import ru.job4j.chess.figures.*;
/**
* Board - class describes chess board.
*
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class Board {
/** Chess figures storage */
private final Figure[] figures = new Figure[32];
/** The number of figures on the board */
private int index = 0;
/**
* Method adds a new figure on the board.
* @param figure - the figure which is added on the board.
*/
public void add(Figure figure) {
this.figures[this.index++] = figure;
}
/**
* The method produces the process of movement of figure from its current position on the board to the destination position.
* 1. At first method checks that the figure existing on the start position. If figure doesn`t exist the method throws FigureNotFoundException.
* 2. Then the method produces the way of figure movement. If the way is incorrect for this figure throws ImpossibleMoveException.
* 3. After that the method checks that the way of figure is not occupied by other figures. If it is occupied the method throws OccupiedWayException.
* 4. Finally the method recording the figure into the destination cell (the start cell becomes empty).
* variable number - is a serial number of figure in the figures storage.
*
* @param source - start position.
* @param dest - destination position.
* @throws ImpossibleMoveException
* @throws OccupiedWayException
* @throws FigureNotFoundException
*/
public void move(Cell source, Cell dest) throws ImpossibleMoveException, OccupiedWayException, FigureNotFoundException {
Figure figure = null;
int number = 0;
boolean found = false;
for (int i = 0; i < this.index; i++) {
if ((this.figures[i].getPosition().x == source.x) && (this.figures[i].getPosition().y == source.y)) {
figure = this.figures[i];
found = true;
break;
} else {
number++;
}
}
if (!found) {
throw new FigureNotFoundException("No any figure");
}
Cell[] way = figure.way(source, dest);
for (Cell cell : way) {
for (int i = 0; i < this.index; i++) {
if ((cell.x == this.figures[i].getPosition().x) && (cell.y == this.figures[i].getPosition().y)) {
throw new OccupiedWayException("Occupied way");
}
}
}
this.figures[number] = figure.copy(dest);
}
/**
* The method cleans the board from all figures.
*/
public void clean() {
for (int position = 0; position != this.figures.length; position++) {
this.figures[position] = null;
}
this.index = 0;
}
}
|
<reponame>Kiera-Hegarty/IMS-Project
package com.qa.ims.controller;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.qa.ims.persistence.dao.ItemDAO;
import com.qa.ims.persistence.dao.OrderDAO;
import com.qa.ims.persistence.domain.Order;
import com.qa.ims.utils.Utils;
public class OrderController implements CrudController<Order>{
public static final Logger LOGGER = LogManager.getLogger();
private OrderDAO orderDAO;
private Utils utils;
public OrderController(OrderDAO orderDAO, Utils utils) {
super();
this.orderDAO = orderDAO;
this.utils = utils;
}
// Reads all orders to the logger
@Override
public List<Order> readAll() {
List<Order> orders = orderDAO.readAll();
for (Order order : orders) {
LOGGER.info(orders);
}
return orders;
}
//Creates an order by taking in user input
@Override
public Order create() {
LOGGER.info("Please enter the date when the order was placed");
String datePlaced = utils.getString();
LOGGER.info("Please enter the customer id");
Long customerID = utils.getLong();
LOGGER.info("Please enter the tottal cost of order");
int totalCost = utils.getInt();
Order order = orderDAO.create(new Order(datePlaced, customerID, totalCost));
LOGGER.info("Order created");
return order;
}
// Updates an existing order by taking in user input
@Override
public Order update() {
LOGGER.info("Please enter the id of the order you would like to update");
Long id = utils.getLong();
LOGGER.info("Please enter the date when the order was placed");
String datePlaced = utils.getString();
LOGGER.info("Please enter the customer id of the order you would like to update");
Long customerID = utils.getLong();
LOGGER.info("Please enter the total cost of the updated order");
int totalCost = utils.getInt();
Order order = orderDAO.update(new Order(id, datePlaced, customerID, totalCost));
LOGGER.info("Order updated");
return order;
}
// Deletes an existing order by the id of the customer
@Override
public int delete() {
LOGGER.info("Please enter the id of the order you would like to delete");
Long id = utils.getLong();
return orderDAO.delete(id);
}
// private OrderDAO orderDAO;
// private ItemDAO itemDAO;
// private Utils utils;
//
// public OrderController(OrderDAO orderDAO, Utils utils) {
// super();
// this.orderDAO = orderDAO;
// this.utils = utils;
// }
//
// // Reads all orders to the logger
//
// @Override
// public List<Order> readAll() {
// List<Order> orders = orderDAO.readAll();
// for (Order order : orders) {
// LOGGER.info(orders);
// }
// return orders;
// }
//
// //Creates an order by taking in user input
//
// @Override
// public Order create() {
//// boolean vaild = false;
// Order order;
//// do {
// LOGGER.info("Please enter the customer id");
// Long customerID = utils.getLong();
// LOGGER.info("Please enter the date order was placed");
// String datePlaced = utils.getString();
//// LOGGER.info("Please enter the total cost of order");
//// int totalCost = utils.getInt();
// order = orderDAO.create(new Order(customerID, datePlaced));
//// if(order != null) {
//// vaild = true;
//// }
//// }while(!vaild);
////
//
//
// //boolean while loop, so when false statement is given loop will stop
//
// boolean od = false;
// while(true) {
// do {
// LOGGER.info("Please enter item id you would like to add to the order");
// LOGGER.info("Enter n to stop adding items to order");
// Long itemID = utils.getOrderDetailAction();
// if(itemID == null) {
// od = true;
// }else {
// LOGGER.info("Enter the total cost of items");
// int totalCost = utils.getInt();
// orderDAO.createOrderDetail(order, itemID, totalCost);
// }
// }while(!od);
//
// LOGGER.info("Order created");
// }
// }
//
//
// // Updates an existing order by taking in user input
//
// @Override
// public Order update() {
// boolean validOrder = false;
// Long id;
// Order order;
// do {
// LOGGER.info("Please enter the id of the order you would like to update");
// id = utils.getLong();
// order = orderDAO.ReadOrder(id);
// if(order != null) {
// validOrder = true;
// }else {
// LOGGER.info("Order id does not exist");
// }
// }while(!validOrder);
//
// LOGGER.info("Please enter order date");
// String datePlaced = utils.getString();
//
// boolean giveOptions = true;
// do {
// LOGGER.info("What would you like to update?");
// LOGGER.info("ADD: add item to order");
// LOGGER.info("REMOVE: remove item from order");
// LOGGER.info("RETURN: return to previous section");
//
// boolean stop = false;
// do {
// String optio = utils.getString().toLowerCase();
// String options = null;
// switch(options) {
//
// case "add":
// boolean validItem = false;
// do {
// LOGGER.info("Please enter item id you would like to add to the order");
// LOGGER.info("Enter n to exit without adding items to order");
// Long itemID = utils.getOrderDetailAction();
// if(itemID == null) {
// stop = true;
// break;
// }
// LOGGER.info("Enter the total cost of items to added into the order");
// int totalCost = utils.getInt();
// order = orderDAO.createOrderDetail(order, itemID, totalCost);
// if(order != null) {
// LOGGER.info("Item(s) added to order!");
// validItem = true;
// stop = true;
// }
// }while(!validItem);
// break;
//
// case "remove":
// boolean removeItem = false;
// do {
// LOGGER.info("Please enter item id you would like to remove the order");
// LOGGER.info("Enter n to exit without removing items from order");
// Long itemID = utils.getOrderDetailAction();
// if(itemID == null) {
// stop = true;
// break;
// }
// LOGGER.info("Enter the price of items added into the order");
// int price = utils.getInt();
// order = orderDAO.removeOrderDetail(order, itemID, price);
// if(order != null) {
// LOGGER.info("Item(s) removed frm order!");
// validItem = true;
// stop = true;
// }
// }while(!removeItem);
// break;
//
// case "return":
// stop = true;
// giveOptions = false;
// break;
//
// default:
// LOGGER.info("Try again");
// break;
// }
//
// }while (!stop);
//
// }while(giveOptions);
// LOGGER.info("Order updated");
// return order;
// }
//
//
// // Deletes an existing order by the id of the customer
//
// @Override
// public int delete() {
// LOGGER.info("Please enter the id of the order you would like to delete");
// Long id = utils.getLong();
// return orderDAO.delete(id);
// }
}
|
<reponame>glameyzhou/training<filename>codes/src/main/java/org/glamey/training/codes/leetcode/stack_queue/QueueStack.java
package org.glamey.training.codes.leetcode.stack_queue;
import java.util.Queue;
/**
* queue实现stack
*
* @author yang.zhou 2020.01.17.10
*/
public class QueueStack<E> {
private Queue<E> q1;
private Queue<E> q2;
public QueueStack() {
}
public void push(E e) {
}
public E pop() {
return null;
}
}
|
def sort_array(arr):
arr.sort(key=lambda x: x[1])
return arr
data = [[[1,6,4], [4,3,2], [2,3,6]], [[3,3,3], [2,2,2], [1,1,1]]]
print(sort_array(data)) |
#!/usr/bin/env bash
scriptname=$(basename $0)
scriptpath=$(realpath $0)
repo_root="${SYSTEM_DEFAULTWORKINGDIRECTORY:-"$(dirname ${scriptpath%/*})"}"
function print_and_log {
message="$@"
logger "[$scriptname] $message"
printf "[$scriptname] $message\n"
}
pushd $repo_root
print_and_log "dotnet clean Release builds"
#delete any previous distributable
rm -rf dist
dotnet clean ./src/Calculator/Calculator.csproj -c Release
dotnet clean ./src/CalcWeb/CalcWeb.csproj -c Release
dotnet clean ./test/calculatorTests/calculatorTests.csproj -c Release |
/* globals test,expect */
import { Replacer } from '@gik/tools-thrower';
import Populator from '../lib/populator';
import Types from '../lib/types';
test('should throw an error when sent a non-object as subject', () => {
expect.assertions(2);
try {
Populator('foo');
} catch (err) {
expect(err.name).toBe(Types.ParamError.name);
const msg = Replacer(Types.ParamError.message, ['subject', 'Object', 'string']);
expect(err.message).toBe(msg);
}
});
test('should throw when using non existent key.', () => {
expect.assertions(2);
try {
Populator({ a: '${b}' });
} catch (err) {
expect(err.name).toBe(Types.KeyError.name);
const msg = Replacer(Types.KeyError.message, ['a', 'b']);
expect(err.message).toBe(msg);
}
});
test('should throw when using invalid key.', () => {
expect.assertions(2);
try {
Populator({ a: true, b: '${a}' });
} catch (err) {
expect(err.name).toBe(Types.KeyTypeError.name);
const msg = Replacer(Types.KeyTypeError.message, ['b', 'a', 'boolean']);
expect(err.message).toBe(msg);
}
});
test('should resolve the example correctly', () => {
expect.assertions(2);
const subject = {
a: { b: { c: 'world' } },
d: 'hello ${a.b.c}${e}',
e: '!!!',
f: ['${e}', '${a.b.c}'],
};
expect(() => Populator(subject)).not.toThrow();
expect(Populator(subject)).toEqual({
a: { b: { c: 'world' } },
d: 'hello world!!!',
e: '!!!',
f: ['!!!', 'world'],
});
});
|
<gh_stars>1-10
import transactionCategoryModel, {
ITransactionCategoryModel,
} from "../models/transaction-category-model";
export const findTransactionCategoriesByUser = async (
userId: string
): Promise<ITransactionCategoryModel[] | null> =>
transactionCategoryModel.find({
owner: userId,
deleted: { $ne: true },
});
export const findTransactionCategoryById = async (
id: string
): Promise<ITransactionCategoryModel | null> =>
transactionCategoryModel.findOne({
_id: id,
deleted: { $ne: true },
});
export const createTransactionCategory = async (
newTransactionCategory: ITransactionCategoryModel
): Promise<ITransactionCategoryModel | null> =>
transactionCategoryModel.create(newTransactionCategory);
export const markTransactionCategoryAsDeleted = async (
id: string
): Promise<ITransactionCategoryModel | null> => {
const transactionCategory = await findTransactionCategoryById(id);
if (transactionCategory === null) return null;
transactionCategory.deleted = true;
return transactionCategory.save();
};
// eslint-disable-next-line @typescript-eslint/naming-convention
export const DANGER_truncateTransactionCategoriesByUser = async (
userId: string
): Promise<void> => {
await transactionCategoryModel.deleteMany({ owner: userId });
};
|
#!/usr/bin/env bash
if [ -z "${NODE_ENV}" ]; then
export NODE_ENV=development
fi
if [ -z "${PORT}" ]; then
export PORT=3000
fi
|
<filename>core/src/mindustry/entities/type/base/BaseDrone.java
package mindustry.entities.type.base;
import arc.math.Mathf;
import arc.math.geom.Geometry;
import mindustry.entities.units.*;
import mindustry.world.Tile;
import mindustry.world.meta.BlockFlag;
import static mindustry.Vars.*;
/**
* 基础无人机
* */
public abstract class BaseDrone extends FlyingUnit{
/** 撤退状态*/
public final UnitState retreat = new UnitState<BaseDrone>(){
@Override
public void enter(BaseDrone u){
target = null;
}
@Override
public void update(BaseDrone u){
if(health >= maxHealth()){
state.changeState(getStartState());
}else if(!targetHasFlag(BlockFlag.repair)){
if(retarget()){
Tile repairPoint = Geometry.findClosest(x, y, indexer.getAllied(team, BlockFlag.repair));
if(repairPoint != null){
target = repairPoint;
}else{
setState(getStartState());
}
}
}else{
circle(40f);
}
}
};
public boolean countsAsEnemy(){
return false;
}
@Override
public void onCommand(UnitCommand command){
//do nothing, normal commands are not applicable here
}
@Override
protected void updateRotation(){
if(target != null && shouldRotate() && target.dst(this) < type.range){
rotation = Mathf.slerpDelta(rotation, angleTo(target), 0.3f);
}else{
rotation = Mathf.slerpDelta(rotation, velocity.angle(), 0.3f);
}
}
@Override
public void behavior(){
if(health <= maxHealth() * type.retreatPercent && !state.isInState(retreat) && Geometry.findClosest(x, y, indexer.getAllied(team, BlockFlag.repair)) != null){
setState(retreat);
}
}
public boolean shouldRotate(){
return state.isInState(getStartState());
}
@Override
public abstract UnitState getStartState();
}
|
<reponame>vitale232/InspectionPlanner
import { Component, OnInit } from '@angular/core';
import { Title } from '@angular/platform-browser';
import { Router } from '@angular/router';
@Component({
selector: 'app-not-found',
templateUrl: './not-found.component.html',
styleUrls: ['./not-found.component.scss']
})
export class NotFoundComponent implements OnInit {
currentUrl: string;
constructor(
private router: Router,
private titleService: Title,
) { }
ngOnInit() {
this.currentUrl = this.router.url;
this.titleService.setTitle('IPA - 404 error');
}
}
|
def generate_tag_cloud(items: list) -> dict:
tag_list = []
for item in items:
tag_list += [i.strip() for i in item['tags'].split(',')]
tag_cloud = {}
for tag in tag_list:
tag_cloud[tag] = tag_cloud.get(tag, 0) + 1
return tag_cloud |
<filename>node_modules/@chakra-ui/textarea/dist/types/index.d.ts
export * from "./textarea";
//# sourceMappingURL=index.d.ts.map |
<filename>src/main/scala/codecheck/github/models/SortDirection.scala<gh_stars>10-100
package codecheck.github.models
sealed abstract class SortDirection(val name: String) {
override def toString = name
}
object SortDirection {
case object asc extends SortDirection("asc")
case object desc extends SortDirection("desc")
val values = Array(asc, desc)
def fromString(str: String) = values.filter(_.name == str).head
}
|
<gh_stars>0
const { logger } = require('@vtfk/logger')
const withTokenAuth = require('../lib/token-auth')
const { getSchools } = require('../lib/api/schools')
const returnSchools = async function (context, request) {
try {
const schools = await getSchools(context)
if (!schools || schools.length <= 0) {
context.res = {
status: 403,
body: 'No schools found.'
}
return
}
logger('info', ['pifu-api', 'schools', 'length', schools.length])
context.res = {
body: schools
}
} catch (error) {
logger('error', ['pifu-api', 'schools', 'error', error.message])
context.res = {
status: 500,
body: error.message
}
}
}
module.exports = (context, request) => withTokenAuth(context, request, returnSchools)
|
<reponame>hmsjy2017/transfer<gh_stars>100-1000
package cowtransfer
import (
"fmt"
"transfer/apis"
"transfer/utils"
"github.com/spf13/cobra"
)
var (
Backend = new(cowTransfer)
)
type cowTransfer struct {
apis.Backend
sendConf prepareSendResp
Config cowOptions
Commands [][]string
}
func (b *cowTransfer) SetArgs(cmd *cobra.Command) {
cmd.Flags().IntVarP(&b.Config.Parallel, "parallel", "p", 2, "Set the number of upload threads")
cmd.Flags().StringVarP(&b.Config.token, "cookie", "c", "", "Your user cookie (optional)")
cmd.Flags().StringVarP(&b.Config.authCode, "auth", "a", "", "Your auth code (optional)")
// cmd.Flags().StringVarP(&b.Config.header, "header-file", "", "", "Extra header you want to add (read from file, optional)")
cmd.Flags().Int64VarP(&b.Config.blockSize, "block", "", 1200000, "Upload block size")
cmd.Flags().IntVarP(&b.Config.interval, "timeout", "t", 10, "Request retry/timeout limit in second")
cmd.Flags().BoolVarP(&b.Config.singleMode, "single", "s", false, "Upload multi files in a single link")
cmd.Flags().BoolVarP(&b.Config.hashCheck, "hash", "", false, "Check hash after block upload")
cmd.Flags().StringVarP(&b.Config.passCode, "password", "", "", "Set password")
cmd.Long = fmt.Sprintf("cowTransfer - https://cowtransfer.com/\n\n" +
utils.Spacer(" Size Limit: 2G(Anonymous), ~100G(Login)\n") +
utils.Spacer(" Upload Service: qiniu object storage, East China\n") +
utils.Spacer(" Download Service: qiniu cdn, Global\n"))
}
|
#!/bin/bash
ARTIFACTS_SHARE_NAME="share_artifacts"
#Directories
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
TOP_DIR="${SCRIPTS_DIR}/.."
CONTAINER_DIR="${TOP_DIR}/container"
cd ${CONTAINER_DIR}
mkdir -p ${ARTIFACTS_SHARE_NAME}
ARTIFACTS_SHARE=$(realpath ${ARTIFACTS_SHARE_NAME})
source $SCRIPTS_DIR/ContainerCommon.sh
docker run --privileged \
--tty \
--mount type=bind,source=${ARTIFACTS_SHARE},target=/media/artifacts \
-p 80:80 \
$COMMON \
/bin/bash -c "./import_dependencies.sh && nginx"
|
<reponame>askamn/Venus-Game-Engine
#include "Package.h"
namespace Syndicate {
Package::Package(const std::string& signature) :
m_ImageCount(0),
m_ShaderCount(0),
m_SoundCount(0),
m_FontCount(0),
m_DataCount(0)
{
const char *sig = signature.c_str();
this->m_Header.signature = 0;
this->m_Header.signature = sig[2];
this->m_Header.signature = (this->m_Header.signature << 8) | sig[1];
this->m_Header.signature = (this->m_Header.signature << 8) | sig[0];
this->m_Header.length = 0;
this->m_Header.version = SYNDICATE_VERSION;
this->m_Header.itemCount = 0;
}
int Package::AddImage(const std::string& identifier, ImageData image)
{
image.id = this->m_ImageCount + 1;
this->m_Images.push_back(image);
this->m_ImageCount++;
this->m_Header.itemCount++;
this->m_Header.length += image.length;
this->m_DataMap.emplace(identifier, image.id);
return image.id;
}
int Package::AddShader(const std::string& identifier, ShaderData shader)
{
shader.id = this->m_ShaderCount + 1;
this->m_Shaders.push_back(shader);
this->m_ShaderCount++;
this->m_Header.itemCount++;
this->m_Header.length += shader.length;
this->m_DataMap.emplace(identifier, shader.id);
return shader.id;
}
int Package::AddSound(const std::string& identifier, SoundData sound)
{
sound.id = this->m_SoundCount + 1;
this->m_Sounds.push_back(sound);
this->m_SoundCount++;
this->m_Header.itemCount++;
this->m_Header.length += sound.length;
this->m_DataMap.emplace(identifier, sound.id);
return sound.id;
}
int Package::AddFont(const std::string& identifier, FontData font)
{
font.id = this->m_FontCount + 1;
this->m_Fonts.push_back(font);
this->m_FontCount++;
this->m_Header.itemCount++;
this->m_Header.length += font.length;
this->m_DataMap.emplace(identifier, font.id);
return font.id;
}
int Package::AddData(const std::string& identifier, Data data)
{
data.id = this->m_DataCount + 1;
this->m_Data.push_back(data);
this->m_DataCount++;
this->m_Header.itemCount++;
this->m_Header.length += data.length;
this->m_DataMap.emplace(identifier, data.id);
return data.id;
}
void Package::Pack(std::string package)
{
ZLib zlib;
if (FileSystem::i()->GetExtension(package) != "spkg")
{
package = package + ".spkg";
}
#ifndef SYNDICATE_DEBUG
package = SyndicateRoot + package;
#endif
// Create a temp file
std::string tempFile = FileSystem::i()->CreateTempFile();
// Write the data temporarily to some file
std::ofstream stream(tempFile, std::ios::binary);
// Write the header
stream.write((char*)&m_Header, sizeof(PackageHeader));
// Write the required counters
stream.write((char*)&m_ImageCount, sizeof(U32));
stream.write((char*)&m_SoundCount, sizeof(U32));
stream.write((char*)&m_FontCount, sizeof(U32));
stream.write((char*)&m_ShaderCount, sizeof(U32));
stream.write((char*)&m_DataCount, sizeof(U32));
int dataLength;
// The data map is going to help us retrieve data back from the Package
for (auto pair : this->m_DataMap)
{
dataLength = pair.first.length();
stream.write((char*)&pair.second, sizeof(U32));
stream.write((char*)&dataLength, sizeof(U32));
stream.write((char*)&pair.first[0], pair.first.length());
}
// Write images, if any
for (auto image : this->m_Images)
{
stream << image;
}
// Write sounds, if any
for (auto sound : this->m_Sounds)
{
stream << sound;
}
// Write Fonts, if any
for (auto font : this->m_Fonts)
{
stream << font;
}
// Write shaders, if any
for (auto shader : this->m_Shaders)
{
stream << shader;
}
// Write other data, if any
for (auto data : this->m_Data)
{
stream << data;
}
stream.close();
if (!zlib.Compress(tempFile, package))
{
SYNDICATE_ERROR(zlib.getError());
SYNDICATE_ERROR("Failed to pack resources");
}
if (!FileSystem::i()->RemoveFile(tempFile))
{
SYNDICATE_ERROR("Failed to remove file temporary created while packing resources");
}
// Clear out all data
this->m_Data.clear();
this->m_Images.clear();
this->m_Shaders.clear();
this->m_Sounds.clear();
this->m_DataMap.clear();
}
void Package::Unpack(std::string package)
{
ZLib zlib;
if (FileSystem::i()->GetExtension(package) != "spkg")
{
this->m_PackageName = package;
this->m_PackageFullName = package + ".spkg";
}
else
{
this->m_PackageFullName = package;
this->m_PackageName = package.replace(package.find_last_of("."), 5, "");
}
#ifndef SYNDICATE_DEBUG
this->m_PackageFullName = SyndicateRoot + "/" + this->m_PackageFullName;
#endif
if (!Utilities::File::Exists(this->m_PackageFullName))
{
SYNDICATE_ERROR("Package: " + this->m_PackageFullName + " does not exist");
return;
}
// Create a temp file
std::string tempFile = FileSystem::i()->CreateTempFile();
// Unpack the resources
if (!zlib.Decompress(this->m_PackageFullName, tempFile))
{
SYNDICATE_ERROR(zlib.getError());
SYNDICATE_ERROR("Failed to unpack resources");
return;
}
std::ifstream stream(tempFile, std::ios::binary);
stream.read((char*)&m_Header, sizeof(PackageHeader));
stream.read((char*)&m_ImageCount, sizeof(U32));
stream.read((char*)&m_SoundCount, sizeof(U32));
stream.read((char*)&m_FontCount, sizeof(U32));
stream.read((char*)&m_ShaderCount, sizeof(U32));
stream.read((char*)&m_DataCount, sizeof(U32));
int id;
int dataLength;
std::vector<char> dataIdentifier;
// The data map is going to help us retrieve data back from the Package
for (int i = 0; i < this->m_Header.itemCount; ++i)
{
stream.read((char*)&id, sizeof(U32));
stream.read((char*)&dataLength, sizeof(U32));
dataIdentifier.resize(dataLength);
stream.read((char*)&dataIdentifier[0], dataLength);
// Place it in the map
this->m_DataMap.emplace(std::string(dataIdentifier.begin(), dataIdentifier.end()), id);
// We don't want to resuse the old data now, do we?
dataIdentifier.clear();
}
ImageData image;
// Read images, if any
for (int i = 0; i < m_ImageCount; ++i)
{
stream >> image;
this->m_Images.push_back(image);
}
SoundData sound;
// Read sounds, if any
for (int i = 0; i < m_SoundCount; ++i)
{
stream >> sound;
this->m_Sounds.push_back(sound);
}
FontData font;
// Read fonts, if any
for (int i = 0; i < m_FontCount; ++i)
{
stream >> font;
this->m_Fonts.push_back(font);
}
ShaderData shader;
// Read shaders, if any
for (int i = 0; i < m_ShaderCount; ++i)
{
stream >> shader;
this->m_Shaders.push_back(shader);
}
Data data;
// Read shaders, if any
for (int i = 0; i < m_DataCount; ++i)
{
stream >> data;
this->m_Data.push_back(data);
}
stream.close();
if (!FileSystem::i()->RemoveFile(tempFile))
{
SYNDICATE_ERROR( "Failed to remove file temporary file created while unpacking resources" );
}
this->m_ImageCount = this->m_Images.size();
this->m_SoundCount = this->m_Sounds.size();
this->m_FontCount = this->m_Fonts.size();
this->m_ShaderCount = this->m_Shaders.size();
this->m_DataCount = this->m_Data.size();
}
ImageData* Package::GetImage(const int& id)
{
if (id > this->m_ImageCount || id <= 0)
{
return nullptr;
}
return &this->m_Images[id - 1];
}
SoundData* Package::GetSound(const int& id)
{
if (id > this->m_SoundCount || id <= 0)
{
return nullptr;
}
return &this->m_Sounds[id - 1];
}
ShaderData* Package::GetShader(const int& id)
{
if (id > this->m_ShaderCount || id <= 0)
{
return nullptr;
}
return &this->m_Shaders[id - 1];
}
FontData* Package::GetFont(const int& id)
{
if (id > this->m_DataCount || id <= 0)
{
return nullptr;
}
return &this->m_Fonts[id - 1];
}
Data* Package::GetData(const int& id)
{
if (id > this->m_DataCount || id <= 0)
{
return nullptr;
}
return &this->m_Data[id - 1];
}
FontData* Package::GetFont(const std::string& identifier)
{
auto it = this->m_DataMap.find(identifier);
if (it == this->m_DataMap.end())
{
return nullptr;
}
return &this->m_Fonts[it->second - 1];
}
ImageData* Package::GetImage(const std::string& identifier)
{
auto it = this->m_DataMap.find(identifier);
if (it == this->m_DataMap.end())
{
return nullptr;
}
return &this->m_Images[it->second - 1];
}
SoundData* Package::GetSound(const std::string& identifier)
{
auto it = this->m_DataMap.find(identifier);
if (it == this->m_DataMap.end())
{
return nullptr;
}
return &this->m_Sounds[it->second - 1];
}
ShaderData* Package::GetShader(const std::string& identifier)
{
auto it = this->m_DataMap.find(identifier);
if (it == this->m_DataMap.end())
{
return nullptr;
}
return &this->m_Shaders[it->second - 1];
}
Data* Package::GetData(const std::string& identifier)
{
auto it = this->m_DataMap.find(identifier);
if (it == this->m_DataMap.end())
{
return nullptr;
}
return &this->m_Data[it->second - 1];
}
Package::~Package()
{
this->m_Data.clear();
this->m_Images.clear();
this->m_Shaders.clear();
this->m_Sounds.clear();
this->m_DataMap.clear();
}
} |
import axios from 'axios'
import { Message } from 'element-ui'
import store from '@/store'
axios.defaults.timeout = 40000
axios.defaults.baseURL = process.env.VUE_APP_BASE_PRS_EPORTAL
axios.defaults.headers.post['Content-Type'] = 'application/json charset=UTF-8'
//请求拦截器:携带的token字段
axios.interceptors.request.use(
config => {
// do something before request is sent
if (localStorage.getItem("accessToken")) {
// 添加登录成功的token
config.headers['accessToken'] = localStorage.getItem("accessToken")
store.dispatch('loginData/setLoginData',{accessToken:localStorage.getItem("accessToken")})
}
return config
},
error => {
// do something with request error
console.log(error) // for debug
return Promise.reject(error)
}
)
//响应拦截器
axios.interceptors.response.use(
/**
* If you want to get http information such as headers or status
* Please return response => response
*/
/**
* Determine the request status by custom code
* Here is just an example
* You can also judge the status by HTTP Status Code
*/
response => {
const res = response.data
const code = res.code
if(code==="400"){
//清空accessToken
localStorage.setItem('accessToken',"")
return res
}
return res
},
error => {
console.log('err' + error) // for debug
Message({
message: error.message,
type: 'error',
duration: 5 * 1000
})
return Promise.reject(error)
}
)
export default {
get(url, params = {}) {
return axios.get(url, {
params: params
})
},
post(url, data = {}) {
return axios.post(url, data)
}
} |
package jp.col.Model;
import javax.validation.constraints.NotBlank;
import org.hibernate.validator.constraints.Length;
public class ApplyModel {
private String id;
private String name;
@NotBlank(message = "申請種類を選択してください。")
/* 申請種類のsfid */
private String applyType;
private String applyTypeName;
private String applyDateTime;
@NotBlank(message = "申請内容を入力してください。")
@Length(max=32000 , message = "申請内容が長すぎです。")
private String applyContent;
private String applyStatus;
private String employeeId;
private String rejectReason;
private String applicantMail;
private String applyVacationDays;
private String applyVacationHours;
private String restVacationDays;
private String restVacationHours;
private String applyVacation;
private String HolidayStartDate;
private String HolidayEndDate;
public String getHolidayStartDate() {
return HolidayStartDate;
}
public void setHolidayStartDate(String holidayStartDate) {
HolidayStartDate = holidayStartDate;
}
public String getHolidayEndDate() {
return HolidayEndDate;
}
public void setHolidayEndDate(String holidayEndDate) {
HolidayEndDate = holidayEndDate;
}
public String getApplyVacation() {
return applyVacation;
}
public void setApplyVacation(String applyVacation) {
this.applyVacation = applyVacation;
}
public String getRestVacationDays() {
return restVacationDays;
}
public void setRestVacationDays(String restVacationDays) {
this.restVacationDays = restVacationDays;
}
public String getRestVacationHours() {
return restVacationHours;
}
public void setRestVacationHours(String restVacationHours) {
this.restVacationHours = restVacationHours;
}
public String getApplicantMail() {
return applicantMail;
}
public String getApplyVacationDays() {
return applyVacationDays;
}
public void setApplyVacationDays(String applyVacationDays) {
this.applyVacationDays = applyVacationDays;
}
public String getApplyVacationHours() {
return applyVacationHours;
}
public void setApplyVacationHours(String applyVacationHours) {
this.applyVacationHours = applyVacationHours;
}
public void setApplicantMail(String applicantMail) {
this.applicantMail = applicantMail;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public void setApplyType(String applyType) {
this.applyType = applyType;
}
public String getApplyType() {
return applyType;
}
public String getApplyDateTime() {
return applyDateTime;
}
public void setApplyDateTime(String applyDateTime) {
this.applyDateTime = applyDateTime;
}
public String getApplyContent() {
return applyContent;
}
public void setApplyContent(String applyContent) {
this.applyContent = applyContent;
}
public String getApplyStatus() {
return applyStatus;
}
public void setApplyStatus(String applyStatus) {
this.applyStatus = applyStatus;
}
public String getEmployeeId() {
return employeeId;
}
public void setEmployeeId(String employeeId) {
this.employeeId = employeeId;
}
public String getRejectReason() {
return rejectReason;
}
public void setRejectReason(String rejectReason) {
this.rejectReason = rejectReason;
}
public String getApplyTypeName() {
return applyTypeName;
}
public void setApplyTypeName(String applyTypeName) {
this.applyTypeName = applyTypeName;
}
} |
<filename>scaffold/src/lib/Exception.ts<gh_stars>1-10
import { ERROR_CODE, MODULE_NAME, EXCEPTION_MAJOR, EXCEPTION_MINOR } from '../constant/exception';
type ParamsType = string | number | Array<string | number>;
type ParamsTypes = Array<ParamsType>;
export class Exception extends Error {
public name: string;
public code: number;
public message: string;
public constructor(code: number, params: ParamsType | ParamsTypes = null, moduleName ?: string) {
super();
this.code = code;
this.message = Exception.getExtMsg(code, params, moduleName);
}
public static getExtMsg(code: number, params: ParamsType | ParamsTypes = null, moduleName ?: string) {
let message: string;
if (ERROR_CODE.hasOwnProperty(code)) {
message = ERROR_CODE[code];
} else {
message = `[%m]ErrorCode does not exist, Code:${code}.`;
}
// replace module name
message = message.replace('%m', (moduleName) ? moduleName : MODULE_NAME);
// replace params
if (params != null) {
if (typeof params !== 'object') {
params = [params];
}
for (let i in params as Array<any>) {
message = message.replace('%s', params[i].toString());
}
}
const formatCode = `000${code}`;
const realCode = formatCode.substr(formatCode.length - 3);
let result = JSON.stringify({
code: `${EXCEPTION_MAJOR}${EXCEPTION_MINOR}${realCode}`,
message: message,
});
// add message length
let len = message.length;
let trueLen = Exception.getRealLen(message);
for (let i = 0; i < trueLen - len; i++) {
result += ' ';
}
return result;
}
public static parseErrorMsg(err: Error) {
try {
let message = err.message;
if(err.hasOwnProperty('details')) {
message = err['details'];
}
return JSON.parse(message);
} catch (e) {
return Exception.parseErrorMsg(new Exception(6, err.message));
}
}
public static getRealLen(str) {
return str.replace(/[^\x00-\xff]/g, '__').length;
}
}
|
#!/usr/bin/env bash
curl \
-v -H "Content-Type: plain/text" --data "<html><body>Hello World</body></html>" \
http://localhost:8080/pdf > escoConcept.pdf
|
// @@tagdynamic@@
// @@tagdepends: vle.discrete-time @@endtagdepends
#include <vle/DiscreteTime.hpp>
namespace vd = vle::devs;
namespace vv = vle::value;
// Definition du namespace de la classe du modele
namespace record {
namespace Phenology {
using namespace vle::discrete_time;
class Leafnumber: public DiscreteTimeDyn {
public:
Leafnumber(const vd::DynamicsInit& atom, const vd::InitEventList& events) : DiscreteTimeDyn(atom, events)
{
// Ces parametres ont une valeur par defaut utilise si la condition n'est pas definie
// Variables gérées par ce composant
leafNumber.init(this,"leafNumber", events);
// Variables gérées par un autre composant
phyllochron.init(this,"phyllochron", events);
hasFlagLeafLiguleAppeared.init(this,"hasFlagLeafLiguleAppeared", events);
phase.init(this,"phase", events);
deltaTT.init(this,"deltaTT", events);
}
/**
* @brief Destructeur de la classe du modèle.
**/
virtual ~Leafnumber() {};
/**
* @brief Methode de calcul effectuée à chaque pas de temps
* @param time la date du pas de temps courant
*/
virtual void compute(const vd::Time& /*time*/)
{
leafNumber = leafNumber(-1);
double phyllochron_;
if (phase() >= 1.0 && phase() < 4.0)
{
if (hasFlagLeafLiguleAppeared() == 0)
{
if (phyllochron(-1) == 0.0)
{
phyllochron_ = 0.0000001;
}
else
{
phyllochron_ = phyllochron(-1);
}
leafNumber = leafNumber(-1) + min(deltaTT() / phyllochron_, 0.999);
}
}
}
private:
//Variables d'etat
/**
* @brief Actual number of phytomers (leaf)
*/
Var leafNumber
//Entrées
/**
* @brief phyllochron (°C d leaf-1)
*/
Var phyllochron/**
* @brief true if flag leaf has appeared (leafnumber reached finalLeafNumber) ()
*/
Var hasFlagLeafLiguleAppeared/**
* @brief the name of the phase ( )
*/
Var phase/**
* @brief daily delta TT (°C d)
*/
Var deltaTT
//Paramètres du modele
};
}
}
DECLARE_DYNAMICS(record::Phenology::Leafnumber); // balise specifique VLE |
<gh_stars>0
package com.example.android.miwok;
import android.app.Activity;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import java.util.ArrayList;
public class ColorsActivity extends AppCompatActivity {
private MediaPlayer mMediaPlayer;
private AudioManager mAudioManager;
private AudioManager.OnAudioFocusChangeListener mAudioFocusChangeListener =
new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int focusChange) {
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) {
// Pause playback because your Audio Focus was temporarily stolen, but will be back soon.
// i.e. for a phone call
mMediaPlayer.pause();
mMediaPlayer.seekTo(0);
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
// Stop playback, because you lost the Audio Focus.
// i.e. the user started some other playback app
// Remember to unregister your controls/buttons here.
// And release the kra — Audio Focus!
// You’re done.
mMediaPlayer.stop();
releaseMediaPlayer();
} else if (focusChange ==
AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) {
// Lower the volume, because something else is also playing audio over you.
// i.e. for notifications or navigation directions
// Depending on your audio playback, you may prefer to pause playback here instead. You do you.
mMediaPlayer.pause();
mMediaPlayer.seekTo(0);
} else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
// Resume playback, because you hold the Audio Focus again!
// i.e. the phone call ended or the nav directions are finished
// If you implement ducking and lower the volume, be sure to return it to normal here, as well.
mMediaPlayer.start();
}
}
};
private MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
releaseMediaPlayer();
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.word_list);
mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
final ArrayList<Word> words = new ArrayList<Word>();
words.add(new Word("red", "weṭeṭṭi", R.drawable.color_red, R.raw.color_red));
words.add(new Word("green", "chokokki", R.drawable.color_green, R.raw.color_green));
words.add(new Word("brown", "ṭakaakki", R.drawable.color_brown, R.raw.color_brown));
words.add(new Word("gray", "ṭopoppi", R.drawable.color_gray, R.raw.color_gray));
words.add(new Word("black", "kululli", R.drawable.color_black, R.raw.color_black));
words.add(new Word("white", "kelelli", R.drawable.color_white, R.raw.color_white));
words.add(new Word("dusty yellow", "ṭopiisә", R.drawable.color_dusty_yellow, R.raw.color_dusty_yellow));
words.add(new Word("mustard yellow", "chiwiiṭә", R.drawable.color_mustard_yellow, R.raw.color_mustard_yellow));
WordAdapter adapter = new WordAdapter(this, words, R.color.category_colors);
ListView listView = (ListView) findViewById(R.id.list);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
releaseMediaPlayer();
int result = mAudioManager.requestAudioFocus(mAudioFocusChangeListener,
AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
mMediaPlayer = MediaPlayer.create(ColorsActivity.this, words.get(i).getPronunciationResId());
mMediaPlayer.start();
mMediaPlayer.setOnCompletionListener(mCompletionListener);
}
}
});
}
/**
* Clean up the media player by releasing its resources.
*/
private void releaseMediaPlayer() {
// If the media player is not null, then it may be currently playing a sound.
if (mMediaPlayer != null) {
// Regardless of the current state of the media player, release its resources
// because we no longer need it.
mMediaPlayer.release();
// Set the media player back to null. For our code, we've decided that
// setting the media player to null is an easy way to tell that the media player
// is not configured to play an audio file at the moment.
mMediaPlayer = null;
mAudioManager.abandonAudioFocus(mAudioFocusChangeListener);
}
}
@Override
protected void onStop() {
super.onStop();
releaseMediaPlayer();
}
}
|
<filename>google/ads/googleads/v8/googleads-ruby/lib/google/ads/googleads/v8/resources/billing_setup_pb.rb
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/resources/billing_setup.proto
require 'google/ads/googleads/v8/enums/billing_setup_status_pb'
require 'google/ads/googleads/v8/enums/time_type_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v8/resources/billing_setup.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v8.resources.BillingSetup" do
optional :resource_name, :string, 1
proto3_optional :id, :int64, 15
optional :status, :enum, 3, "google.ads.googleads.v8.enums.BillingSetupStatusEnum.BillingSetupStatus"
proto3_optional :payments_account, :string, 18
optional :payments_account_info, :message, 12, "google.ads.googleads.v8.resources.BillingSetup.PaymentsAccountInfo"
oneof :start_time do
optional :start_date_time, :string, 16
optional :start_time_type, :enum, 10, "google.ads.googleads.v8.enums.TimeTypeEnum.TimeType"
end
oneof :end_time do
optional :end_date_time, :string, 17
optional :end_time_type, :enum, 14, "google.ads.googleads.v8.enums.TimeTypeEnum.TimeType"
end
end
add_message "google.ads.googleads.v8.resources.BillingSetup.PaymentsAccountInfo" do
proto3_optional :payments_account_id, :string, 6
proto3_optional :payments_account_name, :string, 7
proto3_optional :payments_profile_id, :string, 8
proto3_optional :payments_profile_name, :string, 9
proto3_optional :secondary_payments_profile_id, :string, 10
end
end
end
module Google
module Ads
module GoogleAds
module V8
module Resources
BillingSetup = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.resources.BillingSetup").msgclass
BillingSetup::PaymentsAccountInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.resources.BillingSetup.PaymentsAccountInfo").msgclass
end
end
end
end
end
|
#!/bin/sh
num1=$1
num2=$2
sum=$((num1 + num2))
echo "The sum of $num1 and $num2 is $sum" |
const axios = require("axios");
const { stripIndents } = require("common-tags");
const dateFormat = require("dateformat");
const nextPageEmoji = "▶️";
const prevPageEmoji = "◀️";
const generateEmbed = (
{ deptCourseId, description, sections, subjectCode },
index,
total
) => ({
color: 0x57068c,
title: `${subjectCode.code}-${subjectCode.school} ${deptCourseId}.${
sections[index - 1].code
} ${sections[index - 1].name}`,
description,
fields: [
{
name: "Registration Number",
value: sections[index - 1].registrationNumber,
inline: true,
},
{
name: "Instructors",
value: sections[index - 1].instructors.join(", "),
inline: true,
},
{
name: "Units",
value: sections[index - 1].maxUnits,
inline: true,
},
{
name: "Status",
value:
sections[index - 1].status === "WaitList"
? `Waitlist (${sections[index - 1].waitlistTotal})`
: sections[index - 1].status,
inline: true,
},
{
name: "Meetings",
value: sections[index - 1].meetings.reduce((result, meeting, i) => {
const dt = new Date(Date.parse(meeting.beginDate));
const day = dateFormat(dt, "ddd");
const time = dateFormat(dt, "h:MM");
const endtime = dateFormat(
new Date(dt.getTime() + meeting.minutesDuration * 60000),
"h:MM"
);
return (result += `${day} ${time}-${endtime}${
i === sections[index - 1].meetings.length - 1 ? "" : "\n"
}`);
}, ""),
inline: true,
},
{
name: "Campus",
value: sections[index - 1].campus,
inline: true,
},
{
name: "Recitations",
value: "\u200b",
},
].concat(
sections[index - 1].recitations &&
sections[index - 1].recitations.length < 17
? sections[index - 1].recitations.map((recitation) => ({
name: `Recitation: ${recitation.code}`,
value: stripIndents`
Instructor(s): ${recitation.instructors.join(", ")}
Status: ${
recitation.status === "WaitList"
? `Waitlist (${recitation.waitlistTotal})`
: recitation.status
}
Instruction: ${recitation.instructionMode}
Meetings:
${recitation.meetings.reduce((result, meeting, i) => {
const dt = new Date(Date.parse(meeting.beginDate));
const day = dateFormat(dt, "ddd");
const time = dateFormat(dt, "h:MM");
const endtime = dateFormat(
new Date(dt.getTime() + meeting.minutesDuration * 60000),
"h:MM"
);
return (result += `${day} ${time}-${endtime}${
i === recitation.meetings.length - 1 ? "" : "\n"
}`);
}, "")}
Registration Number: ${recitation.registrationNumber}
`,
inline: true,
}))
: [
{
name: "Recitations",
value: sections[index - 1].recitations
? sections[index - 1].recitations.length
: "0",
inline: true,
},
]
),
timestamp: new Date(),
footer: {
text: `Course ${index}/${total}`,
},
});
module.exports = {
name: "course",
type: "schedge",
aliases: ["c"],
description: "Get the sections for a course",
cooldown: 5,
args: true,
usage: "[<year> <semester-code: [su, fa, sp, ja]>] <subject>-<school> <code>",
execute: async (message, args) => {
let year, semester, schoolCode, deptCourseId;
if (args.length >= 4){
[year, semester, schoolCode, deptCourseId] = args;
}
//try to auto-detect current year and semester
else if (args.length >= 2){
[schoolCode, deptCourseId] = args;
year = new Date().getFullYear();
month = new Date().getMonth();
//Jan
if (month == 0){
semester = "ja";
}
//Feb-May
else if (month <= 4){
semester = "sp";
}
//Jun-Aug
else if (month <= 7){
semester = "su";
}
//Sep-Dec
else{
semester = "fa";
}
}
const filter = (reaction, user) =>
(reaction.emoji.name === nextPageEmoji ||
reaction.emoji.name === prevPageEmoji) &&
user.id === message.author.id;
let currentPage = 0;
try {
const { data } = await axios({
method: "get",
url: `https://schedge.a1liu.com/${year}/${semester}/${
schoolCode.split("-")[1]
}/${schoolCode.split("-")[0]}`,
params: {
full: true,
},
});
const course = data.filter((c) => c.deptCourseId === deptCourseId)[0];
// console.log(course);
if (!course || course.sections.length === 0) {
message.channel.send({
embed: {
color: 0xcf000e,
description: "Sorry, I can't find any courses for this search.",
},
});
return;
}
const msg = await message.channel.send({
embed: generateEmbed(course, currentPage + 1, course.sections.length),
});
const collector = msg.createReactionCollector(filter, {
time: 900000,
idle: 120000,
dispose: true,
});
await msg.react(prevPageEmoji);
await msg.react(nextPageEmoji);
collector.on("collect", async (reaction) => {
if (reaction.emoji.name === nextPageEmoji) {
currentPage =
currentPage < course.sections.length - 1 ? currentPage + 1 : 0;
} else {
currentPage =
currentPage > 0 ? currentPage - 1 : course.sections.length - 1;
}
const embed = generateEmbed(
course,
currentPage + 1,
course.sections.length
);
await msg.edit({ embed });
});
collector.on("remove", async (reaction) => {
if (reaction.emoji.name === nextPageEmoji) {
currentPage =
currentPage < course.sections.length - 1 ? currentPage + 1 : 0;
} else {
currentPage =
currentPage > 0 ? currentPage - 1 : course.sections.length - 1;
}
const embed = generateEmbed(
course,
currentPage + 1,
course.sections.length
);
await msg.edit({ embed });
});
collector.on("end", async () => {
const embed = generateEmbed(
course,
currentPage + 1,
course.sections.length
);
embed.color = 6381923;
await msg.edit({ content: "This message is now inactive", embed });
});
} catch (error) {
message.channel.send({
embed: {
color: 0xcf000e,
description: "An unexpected error occurred. Please try again later.",
},
});
console.log(error);
}
},
};
|
<gh_stars>0
/*
* @Date: 2022-03-30 15:49:30
* @LastEditors: huangzh873
* @LastEditTime: 2022-03-30 16:55:22
* @FilePath: /vt-cesium2.0/src/components/jt-toolbar/config/contents/view/index.ts
*/
import { Content } from '../Types'
import groups from './groups'
const content: Content = {
name: '视图',
groups,
}
export default content
|
<filename>backend/grad-trax-test-suite/src/test/java/ca/bc/gov/educ/gtts/services/GradServiceTests.java
package ca.bc.gov.educ.gtts.services;
import ca.bc.gov.educ.gtts.config.GttsProperties;
import ca.bc.gov.educ.gtts.model.dto.GradSearchStudent;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.mockito.Mockito.when;
@SpringBootTest
@RunWith(SpringRunner.class)
public class GradServiceTests {
@Before
public void init() {
MockitoAnnotations.openMocks(this);
}
@Autowired
@InjectMocks
GradServiceImpl gradService;
@Mock
GenericHTTPRequestServiceImpl requestService;
@Autowired
GttsProperties gttsProperties;
static String testPen="123456789";
public GradServiceTests() {}
@Test
public void testGetStudentByPEN() throws Exception {
String url = gttsProperties.getAndExpandEndPoint("students-api-search-by-pen", Map.of("pen", testPen));
when(requestService.get(url, ArrayList.class)).thenReturn(getMockStudentMap());
GradSearchStudent gradSearchStudent = gradService.getStudentByPen(testPen);
Assert.assertEquals(gradSearchStudent.getPen(), testPen);
}
private static ArrayList<Map<String, String>> getMockStudentMap(){
ArrayList<Map<String, String>> list = new ArrayList<>();
Map<String, String> studentMap = new LinkedHashMap<>();
studentMap.put("studentID", "ac339d70-7649-1a2e-8176-4a0b17dhry45");
studentMap.put("pen", testPen);
studentMap.put("legalFirstName", "Totally");
studentMap.put("legalMiddleNames", "Fake");
studentMap.put("legalLastName", "Name");
studentMap.put("dob", "1989-11-30");
list.add(studentMap);
return list;
}
}
|
"""
def get_square_list(given_list):
result = []
for item in given_list:
result.append(item**2)
return result
if __name__ == '__main__':
print(get_square_list(given_list))
""" |
#!/usr/bin/env bash
#Petit script pour créer mon environnement git ;-)
#hv180601.1549
#export GIT_AUTHOR_NAME="Christian Zufferey"
#export GIT_AUTHOR_EMAIL="christian@zufferey.com"
#export GIT_COMMITTER_NAME="Christian Zufferey"
#export GIT_COMMITTER_EMAIL="christian@zufferey.com"
git config --global user.name "Hugo Valente"
git config --global user.email "hugo.valente1996@gmail.com"
git config --global credential.helper "cache --timeout=3000"
git config --global core.editor vim
alias gtree='git log --graph --oneline --decorate --all'
git config -l
env |grep GIT
echo "end"
|
SELECT C.customer_id, C.name
FROM customers C
WHERE EXISTS
(SELECT 1
FROM orders O
WHERE O.customer_id = C.customer_id
GROUP BY product_id
HAVING COUNT(*) = COUNT(DISTINCT product_id)) |
// Function to find Factorial of a given number
// Factorial 5 = 5 * 4 * 3 * 2 * 1
int factorial(int n)
{
int result = 1;
while (n > 0) {
result *= n;
n--;
}
return result;
} |
<reponame>opentaps/opentaps-1
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
/* This file has been modified by Open Source Strategies, Inc. */
package org.ofbiz.base.util;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import org.apache.log4j.Logger;
import org.apache.log4j.Level;
import org.apache.log4j.Priority;
/**
* Writer implementation for writing to a log4j logger.
*
*/
public class Log4jLoggerWriter extends PrintWriter {
public Log4jLoggerWriter(Logger logger) {
this(logger, Level.INFO);
}
public Log4jLoggerWriter(Logger logger, Priority priority) {
super(new Log4jPrintWriter(logger, priority), true);
}
static class Log4jPrintWriter extends Writer {
private Logger logger = null;
private Priority priority = null;
private boolean closed = false;
public Log4jPrintWriter(Logger logger, Priority priority) {
lock = logger;
this.logger = logger;
this.priority = priority;
}
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
if (closed) {
throw new IOException("Writer is closed");
}
// Remove the eol
while (len > 0 && (cbuf[len - 1] == '\n' || cbuf[len - 1] == '\r')) {
len--;
}
// send to log4j
if (len > 0) {
logger.log(priority, String.copyValueOf(cbuf, off, len));
}
}
@Override
public void flush() throws IOException {
if (closed) {
throw new IOException("Writer is closed");
}
}
@Override
public void close() {
closed = true;
}
}
}
|
#!/bin/bash
{# Copyright 2017 Cargill Incorporated
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. #}
{# This function will put the --map-column-java col=String parameter for any clob data types.#}
{% macro map_clobs_macro(columns) -%}
{{ map_clobs(columns) }}
{%- endmacro -%}
# Create a Sqoop job
set -eu
{% set mapcolumn = [] %}
{%- for column in table.columns -%}
{%- if column["datatype"].lower() == "varbinary" or column["datatype"].lower() == "binary" or column["datatype"].lower() == "longvarbinary" -%}
{%- set mapcolumn = mapcolumn.append(column["name"]) -%}
{%- endif -%}
{%- endfor -%}
sqoop import \
--connect '{{ conf.source_database.connection_string }}' \
--username '{{ conf.user_name }}' \
--password-file '{{ conf.sqoop_password_file }}' \
{%- if conf["sqoop_driver"] is defined %}
--driver {{ conf.sqoop_driver }} \
{%- endif %}
{% if mapcolumn|length > 0 -%}
--map-column-java {% for column in mapcolumn -%}
{% if loop.last -%}
{{ '"{}"'.format(column) }}=String \
{%- else -%}
{{ '"{}"'.format(column) }}=String,
{%- endif -%}
{% endfor %}
{% endif -%}
--delete-target-dir \
--target-dir {{ conf.raw_database.path }}/{{ table.destination.name }}_avro/ \
--temporary-rootdir {{ conf.raw_database.path }}/{{ table.destination.name }}_avro/ \
--as-avrodatafile \
--fetch-size {% if table.columns|length < 30 -%} 10000 {% else %} 5000 {% endif %} \
--compress \
--compression-codec snappy \
-m 1 \
{%- if conf["sqoop_driver"] is defined %}
{%- if "sqlserver" in conf["sqoop_driver"].lower() -%}
--query 'SELECT {% for column in table.columns%} {% if loop.last %} {{ '"{}"'.format(column.name) }} {% else %} {{ '"{}",'.format(column.name) }} {% endif %} {% endfor %} FROM {{ table.source.name }} WHERE $CONDITIONS'
{%- elif "sap" in conf["sqoop_driver"].lower() -%}
--query 'SELECT {% for column in table.columns%} {% if loop.last %} {{ '"{}"'.format(column.name) }} {% else %} {{ '"{}",'.format(column.name) }} {% endif %} {% endfor %} FROM {{ conf.source_database.name }}.{{ table.source.name }} WHERE $CONDITIONS'
{%- else -%}
--query 'SELECT {% for column in table.columns%} {% if loop.last %} {{ column.name }} {% else %} {{ column.name }}, {% endif %} {% endfor %} FROM {{ conf.source_database.name }}.{{ table.source.name }} WHERE $CONDITIONS'
{% endif -%}
{%- else %}
--query 'SELECT {% for column in table.columns%} {% if loop.last %} {{ column.name }} {% else %} {{ column.name }}, {% endif %} {% endfor %} FROM {{ conf.source_database.name }}.{{ table.source.name }} WHERE $CONDITIONS'
{%- endif -%}
|
#!/bin/sh
set -e
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\""
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH"
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod.png"
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod@2x.png"
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod@3x.png"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod.png"
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod@2x.png"
install_resource "YBBaseUI/YBBaseUIDemo/YBBaseUIDemo/Resources/back_blod@3x.png"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
// Generated code
// run `make generate` to update
// Code generated by set-gen. DO NOT EDIT.
// Package sets has auto-generated set types.
package sets
|
<filename>isomorfeus-data/test_app_files/spec/data_file_spec.rb
require 'spec_helper'
RSpec.describe 'LucidFile' do
after do
SimpleFile.destroy(key: '123')
end
context 'on the server' do
it 'can instantiate a file by inheritance' do
result = on_server do
class TestFileBase < LucidFile::Base
end
file = TestFileBase.new(key: 1, data: 'a')
[file.key, file.data]
end
expect(result).to eq(['1', 'a'])
end
it 'can instantiate a file by mixin' do
result = on_server do
class TestFileMixin
include LucidFile::Mixin
end
file = TestFileBase.new(key: 2, data: 'b')
[file.key, file.data]
end
expect(result).to eq(['2', 'b'])
end
it 'can create a simple file' do
result = on_server do
file = SimpleFile.create(key: '123', data: 'a')
file.data
end
expect(result).to eq('a')
end
it 'can load a simple file' do
result = on_server do
SimpleFile.create(key: '123', data: 'a')
file = SimpleFile.load(key: '123')
file.data
end
expect(result).to eq('a')
end
it 'can destroy a simple file' do
result = on_server do
SimpleFile.destroy(key: '123')
end
expect(result).to eq(true)
end
it 'can save a simple file' do
result = on_server do
SimpleFile.create(key: '123', data: 'a')
file = SimpleFile.load(key: '123')
file.data = 'changed'
before_changed = file.changed?
file.save
after_save = file.changed?
file = SimpleFile.load(key: '123')
[file.data, before_changed, after_save]
end
expect(result).to eq(['changed', true, false])
end
it 'converts to sid' do
result = on_server do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 11)
file.sid
end
expect(result).to eq(['TestFileMixinC', '11'])
end
it 'converts to transport' do
result = on_server do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 12, data: 'a')
file.to_transport
end
expect(result).to eq("TestFileMixinC" => {"12"=>{"data_uri"=>"data:;base64,YQ=="}})
end
end
context 'on the client' do
before :each do
@page = visit('/')
end
it 'can instantiate a file by inheritance' do
result = @page.eval_ruby do
class TestFileBase < LucidFile::Base
end
file = TestFileBase.new(key: 14)
file.key
end
expect(result).to eq('14')
end
it 'can instantiate a file by mixin' do
result = @page.eval_ruby do
class TestFileMixin
include LucidFile::Mixin
end
file = TestFileMixin.new(key: 15)
file.key
end
expect(result).to eq('15')
end
it 'reports a change' do
result = @page.eval_ruby do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 23)
file.changed?
end
expect(result).to be(false)
result = @page.eval_ruby do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 23)
file.data = 20
file.changed?
end
expect(result).to be(true)
end
it 'converts to sid' do
result = @page.eval_ruby do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 24)
file.sid
end
expect(result).to eq(['TestFileMixinC', '24'])
end
it 'converts to transport' do
result = @page.eval_ruby do
class TestFileMixinC < LucidFile::Base
end
file = TestFileMixinC.new(key: 28, data: 'a')
file.to_transport.to_n
end
expect(result).to eq("TestFileMixinC" => {"28"=>{"data_uri"=>"data:;base64,YQ=="}})
end
it 'can save' do
result = @page.await_ruby do
file = SimpleFile.new(key: '123')
file.data = 654321
file.promise_save.then do |file|
file.data
end
end
expect(result).to eq('654321')
end
end
context 'on the client with existing file' do
before :each do
SimpleFile.create(key: '123', data: 'a')
@page = visit('/')
end
it 'can load a simple file' do
result = @page.await_ruby do
SimpleFile.promise_load(key: '123').then do |file|
file.data
end
end
expect(result).to eq('a')
end
it 'can destroy a simple file' do
result = @page.await_ruby do
SimpleFile.promise_destroy(key: '123').then { |result| result }
end
expect(result).to eq(true)
end
it 'can save a simple file' do
result = @page.await_ruby do
SimpleFile.promise_load(key: '123').then do |file|
file.data = 'changed'
before_changed = file.changed?
file.promise_save.then do |file|
[file.data, before_changed, file.changed?]
end
end
end
expect(result).to eq(['changed', true, false])
end
end
end
|
<gh_stars>1-10
// -*- coding: utf-8 -*-
//-----------------------------------------------------------------------------
// file: $Id$
// desc: unit test for the syncml-js/codec module
// auth: metagriffin <<EMAIL>>
// date: 2012/10/13
// copy: (C) CopyLoose 2012 UberDev <<EMAIL>>, No Rights Reserved.
//-----------------------------------------------------------------------------
// for node compatibility...
if ( typeof(define) !== 'function' )
var define = require('amdefine')(module);
define([
'underscore',
'elementtree',
'../src/syncml-js/constant',
'../src/syncml-js/codec',
'../src/syncml-js/logging',
'./helpers'
], function(_, ET, constant, codec, logging, helpers) {
describe('syncml-js/codec', function() {
var handler = new logging.ConsoleHandler();
beforeEach(function () {
logging.level = logging.WARNING;
logging.getLogger().addHandler(handler);
this.addMatchers(helpers.matchers);
});
afterEach(function() {
logging.getLogger().removeHandler(handler);
});
//-------------------------------------------------------------------------
it('throws an exception for unknown codecs', function() {
expect(function() {
codec.Codec.factory('no-such-codec');
}).toThrow('UnknownCodec: unknown or unimplemented codec "no-such-codec"');
});
//-------------------------------------------------------------------------
it('encodes XML', function() {
var encoder = codec.Codec.factory(constant.CODEC_XML);
var xdoc = ET.Element(constant.NODE_SYNCML);
xdoc.set('xmlns', constant.NAMESPACE_SYNCML_1_2);
var xhdr = ET.SubElement(xdoc, 'SyncHdr');
var xver = ET.SubElement(xhdr, 'VerDTD');
xver.set('xmlns', constant.NAMESPACE_METINF);
xver.text = '1.2'
var chk = '<?xml version="1.0" encoding="utf-8"?>\n'
+ '<SyncML xmlns="syncml:syncml1.2">'
+ '<SyncHdr>'
+ '<VerDTD xmlns="syncml:metinf">1.2</VerDTD>'
+ '</SyncHdr>'
+ '</SyncML>';
encoder.encode(xdoc, function(err, ctype, data) {
expect(err).toBeFalsy();
expect(ctype).toEqual('application/vnd.syncml+xml; charset=UTF-8');
expect(data).toEqual(chk);
});
});
//-------------------------------------------------------------------------
it('decodes XML', function() {
var decoder = codec.Codec.factory(constant.CODEC_XML);
decoder.decode(
'application/vnd.syncml+xml',
'<?xml version="1.0" encoding="utf-8"?>'
+ '<root><node1>v1</node1><node2 xmlns="syncml:metinf">v2</node2></root>',
function(err, node) {
expect(err).toBeFalsy();
expect(node.findtext('node1')).toEqual('v1');
expect(node.findtext('node2')).toEqual('v2');
});
});
});
});
//-----------------------------------------------------------------------------
// end of $Id$
//-----------------------------------------------------------------------------
|
#!/bin/bash
SEALER_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd -P)"
# run test
echo "starting to test sealer ..."
cd $SEALER_ROOT/test && go test
|
#!/bin/sh
#
# Make graph of size of usage by FDSNWS per day or month over a whole year.
#
# Begun by Peter L. Evans, September 2015.
# Quick hack of make_year_graph.sh
#
# Input: reqlogstats-*.db SQLite database
# Parameters: network code [optional]
# User id or pattern
# Output: PNG plot - total
# PNG plot - break-out by source.
# text total
#
# Copyright (C) 2015-7 Helmholtz-Zentrum Potsdam - Deutsches GeoForschungsZentrum GFZ
#
# This software is free software and comes with ABSOLUTELY NO WARRANTY.
#
# ----------------------------------------------------------------------
set -u
progname=`basename $0`
dirname=`dirname $0`
today=`date +%F`
start_year=`date +%Y`
start_month=`date +%m`
img_dir='/srv/www/webdc/eida/data'
db_dir="${HOME}/reqlogstats/var"
if [ ! -d ${img_dir} ] ; then
echo "${progname}: Images directory ${img_dir} does not exist. Using local var."
img_dir=var
fi
if [ ! -d ${db_dir} ] ; then
echo "${progname}: SQLite DB directory ${db_dir} does not exist. Using local var."
db_dir=var
fi
show_usage() {
echo "Usage: ${progname} {userpatt} [--dcid {dcid} ] [ {month} [ {year} [ {db file} ]]]"
echo
echo "Create usage images from {db file} for the given date."
echo " ** {userpatt} is UNUSED for FDSNWS summary **"
echo "If {dcid} is not given, all DCIDs are included."
echo "If {month} or {year} are not given, use today's date."
echo "If {db file} is not given, use a default."
}
dcid=
dcid_constr=""
table=
if [ $# -gt 0 ] ; then
userpatt=$1
shift
else
show_usage
fi
if [ $# -gt 0 ] ; then
first=$1
if [ "$first" = "--dcid" ] ; then
dcid=$2
dcid_constr="AND Y.dcid = '${dcid}'"
shift 2;
fi
fi
echo "Restricted to dcid=${dcid}; setting constraint: '${dcid_constr}'"
if [ $# -ge 1 ] ; then
start_month=$1 # Should be a two-digit number
fi
if [ $# -ge 2 ] ; then
start_year=$2 # Should be a four-digit number
fi
if [ $# -ge 3 ] ; then
dbfile=$3
else
dbfile="${db_dir}/reqlogstats-${start_year}.db"
fi
echo "Looking in ${dbfile} for ${start_year} month ${start_month}"
if [ ! -s "${dbfile}" ] ; then
echo "Error: ${dbfile} not found or is empty. Bye"
exit 1
fi
tables="ArcStatsSource as Y JOIN ArcStatsVolume as V"
join="WHERE (V.src = Y.id)"
user_constr="" # There is no user info in ArcStatsVolume.
volume_type_patt=fdsnws
volume_constr="AND (V.type = '$volume_type_patt')"
cmd="SELECT start_day, dcid, size/1024.0/1024.0 FROM ${tables} ${join} ${user_constr} ${dcid_constr} ${volume_constr} GROUP BY start_day, dcid ORDER BY start_day, dcid;"
echo ${cmd}
echo ${cmd} \
| sqlite3 ${dbfile} | sed -e 's/|/ /g' \
| python ${dirname}/t2.py > days3.dat
if [ $(wc -l days3.dat | awk '{print $1}') -le 1 ] ; then
echo "Nothing in db with '${dcid_constr}'."
rm days3.dat
exit 0
fi
head -1 days3.dat
tail -5 days3.dat
start_month_name=$(date +%B -d "$start_year-$start_month-01")
xtic_density=14
sed -e "s/\#year\#/${start_year}/g" \
-e "s/\#xtic_density\#/${xtic_density}/g" \
total-user.gnu | gnuplot
if [ -z "${dcid}" ] ; then
out_dir="${img_dir}"
outfile="${out_dir}/total-user-${start_year}.svg"
else
echo "Sorry, can't do it yet. Bye."
exit 22
fi
if [ -s out.svg ] ; then
mkdir -p ${out_dir}
mv out.svg $outfile
echo "Wrote $outfile"
else
echo "No output!"
rm -f out.svg
exit 0
fi
# ----------------------------------------------------------------------
sed -e "s/\#year\#/$start_year/" \
-e "s/\#volume_type_patt\#/$volume_type_patt/" \
sources-user.gnu | gnuplot
if [ -z "${dcid}" ] ; then
out_dir="${img_dir}"
outfile="${out_dir}/sources-user-${start_year}.svg"
txtfile="${out_dir}/total-user-${start_year}.txt"
else
echo "Sorry, can't do it yet. Bye."
exit 22
#out_dir="${img_dir}/${start_year}/${start_month}"
fi
if [ -s out.svg ] ; then
mkdir -p "${out_dir}"
mv out.svg $outfile
echo "Wrote $outfile"
else
rm -f out.svg
echo "No SVG output!"
fi
if [ -s days3.dat ] ; then
mv days3.dat $txtfile
else
echo "No text file output!"
fi
rm -f days3.dat
|
#!/bin/bash
# TODO: Error checking for vars
echo "Initializing into ${DEVPISERVER_SERVERDIR}..."
devpi-init --serverdir ${DEVPISERVER_SERVERDIR}
echo "Starting server on 127.0.0.1 to setup mirror..."
devpi-server --host 127.0.0.1 --port 3141 --serverdir ${DEVPISERVER_SERVERDIR} &
DEVPI_PID=$!
echo "Waiting for local server to start..."
${APP_HOME}/scripts/wait-for-it.sh localhost:3141 -t 10
RESULT=$?
if [ $RESULT == 0 ] ; then
echo "Server is responsive, setup client..."
devpi --clientdir ${DEVPICLIENT_CLIENTDIR} use http://127.0.0.1:3141
devpi --clientdir ${DEVPICLIENT_CLIENTDIR} login root --password=''
echo "Deleting existing mirror group..."
devpi --clientdir ${DEVPICLIENT_CLIENTDIR} index root/mirror --delete -y > /dev/null 2>&1
echo "Setup the mirror..."
devpi --clientdir ${DEVPICLIENT_CLIENTDIR} index -c root/mirror bases=root/pypi volatile=True mirror_whitelist="*"
echo "Config complete..."
else
echo "Server did not respond within 10 seconds..."
fi
echo "Terminating the local server..."
kill -SIGTERM $DEVPI_PID
exit $RESULT |
class Test::EuRegulation < Test::Event
end
|
def roman_to_int(roman_numeral):
roman_dict = {
'I': 1,
'V': 5,
'X': 10,
'L': 50,
'C': 100,
'D': 500,
'M': 1000
}
num = 0
for i in range(0, len(roman_numeral)):
if i+1 < len(roman_numeral) and roman_dict[roman_numeral[i]] < roman_dict[roman_numeral[i+1]]:
num -= roman_dict[roman_numeral[i]]
else:
num += roman_dict[roman_numeral[i]]
return num
int_value = roman_to_int(roman_numeral) |
var debug = require('debug')('api:main');
var fs = require('fs');
var saml = require('passport-saml/lib/passport-saml/saml');
var SamlLib = function SamlLib(options) {
this.saml = new saml.SAML(options);
return this;
};
SamlLib.create = function (options) {
return new SamlLib(options);
};
module.exports = SamlLib;
|
import { Component } from '@angular/core';
import { TypedModelComponent } from '../base/TypedModelComponent';
import { AllowedValues } from '../../../model/swe/AllowedValues';
@Component({
selector: 'swe-allowed-values',
styles: [`list-add-section > .row:first-child {
margin-bottom: 10px;
}`],
templateUrl: './AllowedValuesComponent.html'
})
export class AllowedValuesComponent extends TypedModelComponent<AllowedValues> {
private singleItem = 0;
private pairItem: [number, number] = [0, 0];
protected createModel(): AllowedValues {
return new AllowedValues();
}
protected removeValue(index: number) {
this.model.values.splice(index, 1);
}
protected addSingleItem() {
this.model.values.push(this.singleItem);
this.singleItem = 0;
}
protected addPairItem() {
this.model.values.push(this.pairItem);
this.pairItem = [0, 0];
}
}
|
#!/bin/sh
echo "Cora"
echo "===="
echo "Cold start"
CUDA_VISIBLE_DEVICES=1 python cold_start.py --dataset=Cora --random_splits=True
|
#!/bin/bash
PS=$(ps ax | sed s/^' '*// | grep python | grep App.py | cut -d' ' -f1)
echo "Killing process ID now $PS"
kill $PS
|
#!/bin/bash
source ./SBProjeto.prop
mysqladmin processlist -u root -psenhaDev#123 $NOME_BANCO | \
awk '$2 ~ /^[0-9]/ {print "KILL "$2";"}' | \
mysql -u root -psenhaDev#123
mysqladmin -u root -psenhaDev#123 drop $NOME_BANCO -f
|
#!/bin/bash -e
function kubectl::ensure_namespace () {
local namespace=${1:?}
(
# Ensure namespace exists and it uses safe defaults
taito::executing_start
kubectl create namespace "${namespace}" &> /dev/null && \
echo "Namespace ${namespace} created" && \
kubectl patch serviceaccount default \
-p "automountServiceAccountToken: false" --namespace "${namespace}" || :
)
}
function kubectl::use_context () {
: "${kubernetes_cluster:?}"
: "${taito_dout:?}"
local context="${taito_namespace:-$taito_zone}"
local namespace="${taito_namespace:-kube-system}"
# We must always set context as context is not saved in the container image
# between runs.
(
local user=${kubernetes_user:-$kubernetes_cluster}
user=${kubernetes_admin:-$user}
taito::executing_start
kubectl config set-context "${context}" \
--namespace="${namespace}" \
--cluster="${kubernetes_cluster}" \
--user="${user}" > "${taito_dout}"
kubectl config use-context "${context}" > "${taito_dout}"
)
}
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
#sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
sed -i '$a src-git small https://github.com/kenzok8/small' feeds.conf.default
sed -i '$a src-git jell https://github.com/kenzok8/jell' feeds.conf.default
|
import numpy as np
from sklearn.metrics.pairwise import euclidean_distances
def calculate_pairwise_distances(data: np.ndarray) -> np.ndarray:
return euclidean_distances(data, data) |
def rock_paper_scissors(player_choice: str, computer_choice: str) -> str:
if player_choice == computer_choice:
return "It's a tie"
elif (player_choice == "rock" and computer_choice == "scissors") or \
(player_choice == "scissors" and computer_choice == "paper") or \
(player_choice == "paper" and computer_choice == "rock"):
return "Player wins"
else:
return "Computer wins" |
package moze_intel.projecte.integration.crafttweaker.actions;
import com.blamejared.crafttweaker.api.actions.IUndoableAction;
import java.util.Map;
import java.util.Map.Entry;
import moze_intel.projecte.api.nss.NormalizedSimpleStack;
import moze_intel.projecte.integration.crafttweaker.mappers.CrTConversionEMCMapper;
import moze_intel.projecte.integration.crafttweaker.mappers.CrTConversionEMCMapper.CrTConversion;
public class CustomConversionAction implements IUndoableAction {
private final CrTConversion conversion;
public CustomConversionAction(NormalizedSimpleStack output, int amount, Map<NormalizedSimpleStack, Integer> ingredients) {
conversion = new CrTConversion(output, amount, ingredients);
}
@Override
public void apply() {
CrTConversionEMCMapper.addConversion(conversion);
}
@Override
public String describe() {
StringBuilder inputString = new StringBuilder();
for (Entry<NormalizedSimpleStack, Integer> entry : conversion.ingredients.entrySet()) {
if (inputString.length() > 0) {
//If we already have elements, prepend a comma
inputString.append(", ");
}
int amount = entry.getValue();
if (amount > 1) {
inputString.append(amount).append(" ");
}
inputString.append(entry.getKey());
}
return "Added custom conversion creating '" + conversion.amount + "' of " + conversion.output + ", from: " + inputString;
}
@Override
public void undo() {
CrTConversionEMCMapper.removeConversion(conversion);
}
@Override
public String describeUndo() {
return "Undoing adding of custom conversion creating '" + conversion.amount + "' of " + conversion.output;
}
} |
class LIS():
def LIS(self, arr):
n = len(arr)
sub = [1 for i in range(n)]
for i in range (1 , n):
for j in range(0 , i):
if arr[i] > arr[j] and sub[i] < sub[j] + 1 :
sub[i] = sub[j]+1
maximum = 0
for i in range(n):
maximum = max(maximum , sub[i])
return maximum
arr = [10, 22, 9, 33, 21, 50, 41, 60]
lis = LIS()
print ("Length of lis is", lis.LIS(arr)) |
#!/bin/bash
set -v
network_setup() {
sudo iptables -F
sudo iptables -P FORWARD ACCEPT
for i in $(find /proc/sys/net/bridge/ -type f); do
echo 0 | sudo tee $i
done
}
mem_prof() {
echo start memory profiling
while(true); do
echo trigger memory profiling snapshot
sudo pkill -USR2 functionals 2>/dev/null || true
sleep 10
sudo mv /tmp/skydive-memory.prof /tmp/skydive-memory.prof.$( date +%T ) 2>/dev/null || true
done
}
es_setup() {
docker run -d --name elasticsearch -p 9201:9200 -p 9301:9300 -e "discovery.type=single-node" elasticsearch:7.7.1
}
es_cleanup() {
docker stop elasticsearch
docker rm elasticsearch
}
tests_run() {
cd ${GOPATH}/src/github.com/skydive-project/skydive
if [ -z "$WORKSPACE" ]; then
echo "need to define WORKSPACE before running script"
exit 1
fi
LOGFILE=$WORKSPACE/output.log
TESTFILE=$WORKSPACE/tests.xml
BACKEND=${BACKEND:-memory}
if [ "$BACKEND" = "elasticsearch" ]; then
es_setup && trap es_cleanup EXIT
fi
ARGS="$ARGS -standalone -analyzer.topology.backend $BACKEND -analyzer.flow.backend $BACKEND"
export ORIENTDB_ROOT_PASSWORD=root
if [ "$COVERAGE" != "true" -a "$(uname -m)" != "ppc64le" ]; then
export TEST_COVERPROFILE=../functionals-$BACKEND.cover
fi
if [ "$WITH_PROF" = "true" ]; then
mem_prof&
MEMPROFPID=$!
fi
make test.functionals.batch \
GOFLAGS="$GOFLAGS" VERBOSE=true TAGS="$TAGS" GORACE="history_size=7" TIMEOUT=20m \
WITH_HELM="$WITH_HELM" WITH_EBPF="$WITH_EBPF" WITH_EBPF_DOCKER_BUILDER=true \
WITH_K8S="$WITH_K8S" WITH_ISTIO="$WITH_ISTIO" \
WITH_PROF="$WITH_PROF" ARGS="$ARGS" TEST_PATTERN="$TEST_PATTERN" 2>&1 | tee $LOGFILE
RETCODE=$?
if [ "$WITH_PROF" = "true" ]; then
kill $MEMPROFPID 2>/dev/null
fi
go run github.com/tebeka/go2xunit -fail -fail-on-race -suite-name-prefix tests \
-input $LOGFILE -output $TESTFILE
sed -i 's/\x1b\[[0-9;]*m//g' $TESTFILE
if [ -e functionals.cover ]; then
mv functionals.cover $TEST_COVERPROFILE
fi
}
|
#!/bin/bash
#Run this with OS and ARCH defined in enviornment....
#OS="linux" ARCH="amd64" ./deploy-minion.sh 2>&1 | logger -t minion &
#Install GPG key if not present
gpg --list-keys 24F6D50F
HASGPG=$?
if [ $HASGPG -gt 0 ]
then
#Key is not present.
echo "Installing Public key"
gpg --import <<EOF
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1.4.12 (GNU/Linux)
mQMuBFV7Ar0RCADaT4Uzyq7ZHIkR5/WqbcLTkN9jxj4kp5XKzNQt9nrIbVw8nRt6
r3+2FFMPdLnSCqDpuz5X5pUnUlny+T5fgx0/OCJrz4J3iUgMftxc1TYN80rs5HuM
ClZqovw2T4VOvS+jRqJErzMUcAPIY4EPCNxQTWpcnjzQfrw5aLgAZ80wjZr7gpUf
dC2PgkW3QZtCtkTD8LB59fjeaVnRuWlQ7CXKX+MNxLGHD3BkZxHV7NBoc0TTJiHr
QGwS5/Ghiqbnm2julWmZKShB6s97ZDBfLCD4iSPbOZyKJIYlcGwhp3boqzL+714Q
2n16bZcEsnKI/Hle4tOKjJLk67rM7hM5oEdXAQCKAcvkNuAsTmEBg7PTa3iFfKxE
NDIS6A5r3qLWLISqwQf+NFJMa29AcTRSQNC587qjuxR/u2owBUtdkzyl0fIYeBXO
+LFTm9gJRXiNBsFI0A/qnyyAXHL4Vkf79hz6JW+jnFglvpXE0RebPSPLeWOdn3Bb
Mid8mm1iFagjstITqXy/RdzjFaoeTsl40JlyYiGPU2lvfMKWimVQ97E2Gn00kKrZ
HLvCHjANGY0nMnyUFroVdO9yZ3tM3dOFfL+TV/MnnaokFFOxbd7Gxq27ZcYIs7kO
alnroHHsWCCemidF0TAzJexF1AXAVfMMacxeJD3yPX6SUqPbloDf6WRPfAhjPIjw
WeRb3dhcd+/ct21gP5pG8U1pPJ+/yCGiVKn5MF8cwggA1xLmX1Xx4Z1Ncu+V6YHy
ZHbVAb3vtBZnL+hdYoJxpDoV7ML0SDX7ZsMXQ65eD0NHSCJehcK1jkYDwMvf8mi8
pQL3+veXsGh41uHPl9sFGHpZZCvfvggcDLr5Pa0gQuLOpUiXctUmw60B2Xvcp0js
6R98TKaeIyOJMVp3OTO95JaVZFxpYCJqzs5GFBroMpPYCIWn0vNLp3HOx2R59Y3Y
rfcD727Z0aG1MEnqWmShutTHXG/hm2no/nyDYxSWLq17ZQjhPO5pF6qcoy7zhhrh
6uJrfPLT41D2/HH4XDHKPBYxdyEBWt4EAC0bWcgSBnM8TcfdmfraFC+2DX9ZM7Q+
KbRFU2FqYWwgS2F5YW4gKFNpZ25pbmcga2V5IGZvciBUdXJib0J5dGVzIFB1bHNl
KSA8c2FqYWxAdHVyYm9ieXRlcy5jb20+iIEEExEIACkFAlV7Ar0CGwMFCQlmAYAH
CwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRC3o80eJPbVD/NbAQCJcBRISrWH
MC04vRPS/XLVTjJhLOApy0uMmfvbEZr6dAD9ESndQ71KPKQ3I/ikKJOEbBx9Kxzl
56OObA0/fiMHJns=
=x/VJ
-----END PGP PUBLIC KEY BLOCK-----
EOF
gpg --list-keys 24F6D50F
HASGPG=$?
#If key is present and gpg is working fine, then HASGPG should be zero
fi
if [ $HASGPG -eq 0 ]
then
echo "We will use GPG for veryfying tar downloads and not sha256sum"
fi
#Check if minion is latest or not...
function main_function
{
#Some autodetction for OS...
if [ "$OS" = "" ]; then
unamestr=`uname`
if [ "$unamestr" = 'Linux' ]; then
OS='linux'
fi
fi
#Some autodetction for ARCH...
if [ "$ARCH" = "" ]; then
unamestr=`uname -m`
#Matches my laptop
if [ "$unamestr" = 'x86_64' ]; then
ARCH='amd64'
fi
#Match 386
if [ "$unamestr" = 'i686' ]; then
ARCH='386'
fi
#Matches rpi debian
if [ "$unamestr" = 'armv6l' ]; then
ARCH='arm'
fi
#Matches online labs c4
if [ "$unamestr" = 'armv7l' ]; then
ARCH='arm'
fi
fi
if [ "$OS" = "" ]; then
echo "Must provide enviornment variable OS"
exit 1
fi
if [ "$ARCH" = "" ]; then
echo "Must provide enviornment variable ARCH"
exit 1
fi
TARFILE="minion.$OS.$ARCH.tar.gz"
SHAFILE="minion.$OS.$ARCH.tar.gz.sha256sum"
GPGFILE="minion.$OS.$ARCH.tar.gz.sig"
BASEURL="https://s3.amazonaws.com/tb-minion/"
echo "$TARFILE $SHAFILE"
#set -o xtrace
while :
do
if [ ! -f current ]; then
echo "none" > current
fi
if [ ! -f $TARFILE ]; then
echo "none" > current
fi
if [ ! -f minion ]; then
echo "none" > current
fi
curl -so latest "${BASEURL}latest"
diff --brief current latest >/dev/null
comp_value=$?
if [ $comp_value -eq 1 ]
then
#Current did not match latest
echo "need to upgrade..."
curl -so "$TARFILE" "$BASEURL$TARFILE"
curl -so "$SHAFILE" "$BASEURL$SHAFILE"
curl -so "$GPGFILE" "$BASEURL$GPGFILE"
if [ $HASGPG -eq 0 ]
then
#Validate using gpg
gpg --verify "$GPGFILE"
else
#Validate sha256sum as fallback...
sha256sum -c "$SHAFILE" > /dev/null
fi
if [ $? -eq 0 ]
then
echo "Successfully downloaded"
tar -xf "$TARFILE"
cp latest current
fi
else
echo "no need to upgrade..."
fi
./minion -cnc="distdns.turbobytes.com:7777" $EXTRAARGS
sleep 60 #rest for a minute... Avoid crash loop...
done
}
main_function 2>&1 | logger -t minion |
##/bin/bash source'd
## Installation functions for a semantic wiki.
##
## Installation is performed in unattended mode. Supports Windows and Debian-based Linux.
##
## Installs the Bitnami Tomcat stack, Jena Fuseki, MediaWiki, and various MediaWiki extensions.
##
set -e
##
function install_semantic_wiki() {
install_technology_stack
restart_technology_stack
install_module_fuseki
install_module_mediawiki
restart_technology_stack
report "Done."
}
##
function install_technology_stack() {
report "Starting Bitnami Tomcat installation to: ${technology_stack_installation_root_dpn:?} ..."
install_technology_stack_base
install_technology_stack_configuration
}
function install_technology_stack_base() {
! [ -e "${technology_stack_installation_root_dpn:?}" ] || return 0
local installation_mode="$(inferred_bitnami_installation_mode)"
local installer_program_pn="$(inferred_technology_stack_installer_program_pn)"
local -a command=(
"${installer_program_pn:?}"
--mode "${installation_mode:?}"
--prefix "${technology_stack_installation_root_dpn:?}"
--mysql_password "${password:?}"
--phpmyadmin_password "${password:?}"
--mysql_database_name "${product_database_name}"
--mysql_database_username "${product_name_id:?}"
--mysql_database_password "${password:?}"
--tomcat_manager_username "${product_name_id:?}"
--tomcat_manager_password "${password:?}"
--launch_cloud 0
)
mkdir -p "${technology_stack_installation_root_dpn:?}"
case "${installation_mode:?}" in
unattended.DISABLED)
xx "${command[@]}" &
wait_for_unattended_technology_stack_installation_to_finish
;;
*)
xx "${command[@]}"
;;
esac
}
function install_technology_stack_configuration() {
install_technology_stack_git_configuration
install_technology_stack_php_configuration
install_technology_stack_httpd_configuration
install_technology_stack_tomcat_configuration
}
function install_technology_stack_git_configuration() {
xxq git config --global --get user.name ||
xxv git config --global --add user.name "${product_name_tc:?} Administrator"
xxq git config --global --get user.email ||
xxv git config --global --add user.email "${product_admin_mail_address:?}"
}
function install_technology_stack_php_configuration() {
local f1
case "$(inferred_os_type)" in
msys|windows)
for f1 in "${technology_stack_installation_root_dpn:?}/php/php.ini" ; do
perl -i~ -pe '
s{^(\s*;\s*)(extension\s*=\s*php_fileinfo.dll)}{$2}i;
' "$f1"
done
;;
esac
}
function install_technology_stack_httpd_configuration() {
set_ownership_of_web_content "${technology_stack_installation_root_dpn:?}/apache2/htdocs"
}
function install_technology_stack_tomcat_configuration() {
local f1
for f1 in "$(inferred_tomcat_service_setenv_script_fpn)" ; do
case "$(inferred_os_type)" in
msys|windows)
perl -i~ -pe '
chomp; s{\s*$}{\r\n}; # trim trailing space and ensure proper line terminator
s{^\s*((?:set\s+)?JAVA_OPTS\s*=.*)(--JvmMs\s+\d+)}{${1}--JvmMs '"${tomcat_service_jvm_heap_min:?}"'};
s{^\s*((?:set\s+)?JAVA_OPTS\s*=.*)(--JvmMx\s+\d+)}{${1}--JvmMx '"${tomcat_service_jvm_heap_max:?}"'};
' "$f1"
xx "$(inferred_tomcat_service_controller_xpn)" //US//tomcatstackTomcat --JvmMs "${tomcat_service_jvm_heap_min:?}"
xx "$(inferred_tomcat_service_controller_xpn)" //US//tomcatstackTomcat --JvmMx "${tomcat_service_jvm_heap_max:?}"
;;
*)
perl -i~ -pe '
chomp; s{\s*$}{\n}; # trim trailing space and ensure proper line terminator
s{^\s*((?:export\s+)?JAVA_OPTS\s*=.*)(-Xms\d+[mM])}{${1}-Xms'"${tomcat_service_jvm_heap_min:?}"'M};
s{^\s*((?:export\s+)?JAVA_OPTS\s*=.*)(-Xmx\d+[mM])}{${1}-Xmx'"${tomcat_service_jvm_heap_max:?}"'M};
' "${f1%.*}.sh"
;;
esac
done
for f1 in "${tomcat_service_installation_root_dpn:?}/conf/catalina.properties" ; do
perl -i~ -pe '
s{\b(tomcat.util.scan.StandardJarScanFilter.jarsToSkip\s*=\s*)}{${1}*,} unless m{=\s*\*,};
' "$f1"
done
}
##
function wait_for_unattended_technology_stack_installation_to_finish() {
wait_for_technology_stack_to_start
wait_for_technology_stack_installer_to_create_uninstaller
wait_for_technology_stack_installer_to_stop
}
function wait_for_technology_stack_installer_to_create_uninstaller() {
report "Ensuring that the technology stack uninstaller has been created..."
wait_for_nonempty_file "${technology_stack_uninstaller_fpn}"
wait_for_nonempty_file "${technology_stack_uninstaller_data_fpn}"
}
function wait_for_technology_stack_installer_to_stop() {
report "Ensuring that the technology stack installer has stopped..."
wait %% || report_exit_code $? || :
}
##
function start_technology_stack() {
report "Ensuring that all ${product_name_tc:?} services have started..."
case "$(inferred_os_type)" in
msys|windows)
local s1
for s1 in MySQL Tomcat Apache ; do
xx net start "tomcatstack${s1}" ||
report_exit_code $?
done
;;
*)
"${technology_stack_cli_fpn:?}" start ||
report_exit_code $?
;;
esac
wait_for_technology_stack_to_start
}
function wait_for_technology_stack_to_start() {
wait_for_nonempty_file "${mysql_service_pid_fpn}"
wait_for_nonempty_file "${tomcat_service_pid_fpn}"
wait_for_nonempty_file "${http_service_pid_fpn}"
}
function stop_technology_stack() {
report "Ensuring that all ${product_name_tc:?} services have stopped..."
case "$(inferred_os_type)" in
msys|windows)
local s1
for s1 in Apache Tomcat MySQL ; do
xx net stop "tomcatstack${s1}" ||
report_exit_code $?
done
;;
*)
"${technology_stack_cli_fpn:?}" stop ||
"${technology_stack_cli_fpn:?}" stop ||
report_exit_code $?
;;
esac
}
function restart_technology_stack() {
report "Restarting all ${product_name_tc:?} services..."
case "$(inferred_os_type)" in
msys|windows)
local s1
for s1 in Apache Tomcat MySQL ; do
xxq net stop "tomcatstack${s1}" || :
done
for s1 in MySQL Tomcat Apache ; do
xxv net start "tomcatstack${s1}"
done
;;
*)
"${technology_stack_cli_fpn:?}" restart ||
report_exit_code $?
;;
esac
wait_for_technology_stack_to_start
report_technology_stack_status
}
function report_technology_stack_status() {
report "Resulting status of ${product_name_tc:?} services:"
case "$(inferred_os_type)" in
msys|windows)
local s1
for s1 in Apache Tomcat MySQL ; do
xx sc query "tomcatstack${s1}" ||
report_exit_code $?
done
;;
*)
"${technology_stack_cli_fpn:?}" status ||
report_exit_code $?
;;
esac
}
##
function install_module_fuseki() {
if [ ! -e "${technology_stack_installation_root_dpn:?}" ] ; then
report "Not yet installed: ${technology_stack_installation_root_dpn:?}; aborting..."
return 2
fi
report "Starting Fuseki installation to: ${module_fuseki_installation_root_dpn:?} ..."
install_module_fuseki_base
stop_technology_stack
#^-- prevents tomcat from auto-loading fuseki webapp
install_module_fuseki_configuration
#^-- prepares tomcat to (correctly) load fuseki webapp
start_technology_stack # fuseki webapp will be loaded
}
function install_module_fuseki_base() {
! [ -e "${module_fuseki_installation_root_dpn:?}" ] || return 0
local tarball_pn="$(inferred_module_fuseki_tarball_pn)"
mkdir -p "${module_fuseki_installation_root_dpn:?}"
xx tar xf "${tarball_pn:?}" --strip-components 1 \
-C "${module_fuseki_installation_root_dpn:?}"
}
function install_module_fuseki_configuration() {
install_module_fuseki_local_configuration
install_module_fuseki_tomcat_configuration
}
function install_module_fuseki_local_configuration() {
local d1
for d1 in "${module_fuseki_installation_root_dpn:?}/etc" ; do
mkdir -p "$d1" ; set_ownership_of_webapp_content "$d1"
done
}
function install_module_fuseki_tomcat_configuration() {
local d1 f1
for d1 in "${module_fuseki_installation_root_dpn:?}/etc" ; do
for f1 in "$(inferred_tomcat_service_setenv_script_fpn)" ; do
case "$(inferred_os_type)" in
msys|windows)
perl -i~ -pe '
chomp; s{\s*$}{\r\n}; # trim trailing space and ensure proper line terminator
if (m#^\s*(?:set\s+)FUSEKI_BASE\s*=#) {
$_ = "" . "set FUSEKI_BASE='"${d1}"'\r\n";
$found_FUSEKI_BASE = 1;
}
if (eof && ! defined($found_FUSEKI_BASE)) {
$_ = $_ . "set FUSEKI_BASE='"${d1}"'\r\n";
$found_FUSEKI_BASE = 1;
}
#^-- no spaces allowed in FUSEKI_BASE
' "$f1"
xx "$(inferred_tomcat_service_controller_xpn)" //US//tomcatstackTomcat ++Environment FUSEKI_BASE=${d1}
;;
*)
perl -i~ -pe '
chomp; s{\s*$}{\n}; # trim trailing space and ensure proper line terminator
if (m#^\s*(?:export\s+)FUSEKI_BASE\s*=#) {
$_ = "" . "export FUSEKI_BASE='"${d1}"'$/";
$found_FUSEKI_BASE = 1;
}
if (eof && ! defined($found_FUSEKI_BASE)) {
$_ = $_ . "export FUSEKI_BASE='"${d1}"'$/";
$found_FUSEKI_BASE = 1;
}
#^-- no spaces allowed in FUSEKI_BASE
' "$f1"
;;
esac
set_ownership_of_webapp_content "$f1"
done;done
for f1 in "${module_fuseki_installation_root_dpn:?}/fuseki.war" ; do
for f2 in "${tomcat_service_installation_root_dpn:?}/webapps/${f1##*/}" ; do
cp "$f1" "$f2" ; set_ownership_of_webapp_content "$f2"
done;done
}
##
function install_module_mediawiki() {
if [ ! -e "${technology_stack_installation_root_dpn:?}" ] ; then
report "Not yet installed: ${technology_stack_installation_root_dpn:?}; aborting..."
return 2
fi
report "Starting MediaWiki installation to: ${module_mediawiki_installation_root_dpn:?} ..."
install_module_mediawiki_base
install_module_mediawiki_configuration
install_module_mediawiki_extensions_needed
}
function install_module_mediawiki_base() {
! [ -e "${module_mediawiki_installation_root_dpn:?}/htdocs" ] || return 0
local tarball_pn="$(inferred_module_mediawiki_tarball_pn)"
mkdir -p "${module_mediawiki_installation_root_dpn:?}/htdocs"
xx tar xf "${tarball_pn:?}" --strip-components 1 \
-C "${module_mediawiki_installation_root_dpn:?}/htdocs"
}
function install_module_mediawiki_configuration() {
install_module_mediawiki_php_configuration
install_module_mediawiki_local_configuration
install_module_mediawiki_httpd_configuration
}
function install_module_mediawiki_php_configuration() {
true # TODO: srogers: performance: customize PHP for MediaWiki (use APC, perhaps?)
}
function install_module_mediawiki_local_configuration() {(
local wiki_admin_user_name="${product_name_id:?}"
local wiki_name="${product_name_tc% Wiki} Wiki"
local wiki_script_path="/wiki"
local php_path_fragment=
case "$(inferred_os_type)" in
msys|windows)
php_path_fragment="${technology_stack_installation_root_dpn:?}/php"
php_path_fragment="$(as_unix_pathname "${php_path_fragment:?}")"
PATH="${php_path_fragment:?}:${PATH}"
#^-- FIXME: srogers: test against Windows 10 bash
;;
*)
php_path_fragment="${technology_stack_installation_root_dpn:?}/php/bin"
PATH="${php_path_fragment:?}:${PATH}"
;;
esac
cd "${module_mediawiki_installation_root_dpn:?}/htdocs"
rm -f LocalSettings.php
xx php maintenance/install.php \
--dbtype "mysql" \
--dbserver "localhost" \
--installdbuser "root" \
--installdbpass "${password:?}" \
--dbname "${product_database_name?}" \
--dbuser "${product_name_id:?}" \
--dbpass "${password:?}" \
--server "${product_site_root_url:?}" \
--pass "${password:?}" \
"${wiki_name:?}" \
"${wiki_admin_user_name:?}"
perl -i~ -pe "
s'\".*\"'\"$(printf %q "${product_admin_mail_address:?}")\"' if m{\\\$wgEmergencyContact\\s*=};
s'\".*\"'\"$(printf %q "${product_admin_mail_address:?}")\"' if m{\\\$wgPasswordSender\\s*=};
s'\".*\"'\"$(printf %q "${wiki_script_path:?}")\"' if m{\\\$wgScriptPath\\s*=};
" LocalSettings.php
set_ownership_of_web_content .
)}
function install_module_mediawiki_httpd_configuration() {
local this_package_root_dpn="$(dirname "$(dirname "$0")")"
local f1
for f1 in {htaccess,httpd-app,httpd-prefix}.conf ; do
create_file_from_template \
"${this_package_root_dpn:?}/share/mediawiki/conf/${f1}.in" \
"${module_mediawiki_installation_root_dpn:?}/conf/${f1}"
done
##
local include_directive="Include \"${module_mediawiki_installation_root_dpn:?}/conf/httpd-prefix.conf\""
for f1 in "${http_service_installation_root_dpn:?}/conf/bitnami/bitnami-apps-prefix.conf" ; do
fgrep -q "${include_directive}" "$f1" || echo "${include_directive}" >> "$f1"
done
##
for f1 in "${tomcat_service_installation_root_dpn:?}/conf/tomcat.conf" ; do
perl -i~ -pe 'next if m{(\bserver-status\b)\|wiki\b} ; s{(\bserver-status\b)}{$1|wiki}' "$f1"
done
}
function install_module_mediawiki_extensions_needed() {
local extensions_root_dpn="${module_mediawiki_installation_root_dpn:?}/htdocs/extensions"
local f1
mkdir -p "${extensions_root_dpn:?}"
for f1 in "$(dirname "$(dirname "$0")")/share/mediawiki-extensions.needed.txt" ; do
xx cat "$f1" | omit_wsac |
while read name release git_commit_id git_repo_url ; do
install_module_mediawiki_extension_under "${extensions_root_dpn:?}" \
"${name:?}" "${release:?}" "${git_commit_id:?}" "${git_repo_url:?}"
done
done
set_ownership_of_web_content "${extensions_root_dpn:?}"
}
function install_module_mediawiki_extension_under() { # extensions_root_dpn name release git_commit_id git_repo_url
local extensions_root_dpn="${1:?}" ; shift 1
local name="${1:?}" release="${2:?}" ; shift 2
local git_commit_id="${1:?}" git_repo_url="${2:?}" ; shift 2
local extension_patches_root_dpn="$(realpath "$(dirname "$(dirname "$0")")/share/mediawiki-extension-patches")"
local d1 p1
for d1 in "${extensions_root_dpn:?}" ; do
(
xx cd "$d1"
if [ -d "${name:?}" -a ! -e "${name:?}/.git" ] ; then
xx rm -rf "${name:?}" # prepare to replace a standard extension
fi
! [ -e "${name:?}" ] || return 0
xx git clone "${git_repo_url:?}" "${name:?}"
xx cd "${name:?}"
xx git checkout -b "${release:?}" "${git_commit_id:?}"
xx git checkout -b "${release:?}-local"
capture_git_submodule_updates
for p1 in "${extension_patches_root_dpn:?}/${name:?}/${release:?}"/*.patch ; do
[ -e "$p1" ] || continue
xx patch -p1 -i "$p1"
xx git add -A :/
xx git commit -m "Apply patch: ${p1##*/}."
done
capture_git_submodule_updates
! false ||
xx git log --reverse "${release:?}..${release:?}-local"
)
done
}
function capture_git_submodule_updates() {
if [ -e ".gitmodules" ] ; then
xx git submodule update --init --recursive
xx git add -A :/
xx git commit -m "Capture submodule updates (if any)." --allow-empty
fi
}
##
function inferred_technology_stack_installer_program_pn() {
local version="${technology_stack_version:?}"
local sha1= # depends on platform
local local_fpn="./Bitnami-Tomcat-${version:?}.installer$(inferred_installer_program_suffix)"
local remote_url="https://downloads.bitnami.com/files/stacks/tomcatstack/${version:?}/bitnami-tomcatstack-${version:?}-"
case "$(inferred_os_type)" in
msys|windows)
case "$(inferred_cpu_type)" in
x86_32|x86_64)
sha1="${technology_stack_installer_x86_32_windows_sha1:?}"
remote_url="${remote_url:?}windows-installer.exe"
;;
*)
report "CPU type not supported: $(inferred_cpu_type)"
return 2
;;
esac
;;
linux-gnu)
case "$(inferred_cpu_type)" in
x86_64)
sha1="${technology_stack_installer_x86_64_linux_sha1:?}"
remote_url="${remote_url:?}linux-x64-installer.run"
;;
*)
report "CPU type not supported: $(inferred_cpu_type)"
return 2
;;
esac
;;
*)
assert_cannot_happen
;;
esac
ensure_file_downloaded_to "${local_fpn:?}" "${sha1:?}" "${remote_url:?}"
chmod +x "${local_fpn:?}"
echo "${local_fpn:?}"
}
function inferred_module_fuseki_tarball_pn() {
local version="${module_fuseki_version:?}"
local sha1="${module_fuseki_tarball_sha1:?}"
local local_fpn="./fuseki-${version:?}.tar.gz"
local remote_url="http://archive.apache.org/dist/jena/binaries/apache-jena-fuseki-${version:?}.tar.gz"
ensure_file_downloaded_to "${local_fpn:?}" "${sha1:?}" "${remote_url:?}"
echo "${local_fpn:?}"
}
function inferred_module_mediawiki_tarball_pn() {
local version="${module_mediawiki_version:?}"
local version_major_minor="$(echo "${version:?}" | perl -lpe 's{^(\d+\.\d+)\D.*$}{$1}')"
local sha1="${module_mediawiki_tarball_sha1:?}"
local local_fpn="./mediawiki-${version:?}.tar.gz"
local remote_url="https://releases.wikimedia.org/mediawiki/${version_major_minor:?}/mediawiki-${version:?}.tar.gz"
ensure_file_downloaded_to "${local_fpn:?}" "${sha1:?}" "${remote_url:?}"
echo "${local_fpn:?}"
}
##
function inferred_product_site_root_url() {
echo "${product_site_root_url:-http://$(hostname --fqdn)}"
}
function inferred_product_name_tc() {
echo "${product_name_tc:-Semantic Wiki}" # title case
}
function inferred_product_name_lc() {
inferred_product_name_tc |
perl -lpe '$_ = lc' # lower case
}
function inferred_product_name_id() {
inferred_product_name_lc |
perl -lpe 's{\W}{_}g' # identifier syntax
}
function inferred_product_name_pn() {
case "$(inferred_os_type)" in
msys|windows)
inferred_product_name_tc
;;
*)
inferred_product_name_lc
;;
esac |
perl -lpe 's{\s}{-}g' # pathname without spaces
}
function inferred_installer_program_suffix() {
local rc=0 result=".$(inferred_cpu_type)"
case "$(inferred_os_type)" in
msys|windows)
result="${result}.exe"
;;
linux-gnu)
result="${result}.linux.run"
;;
*)
report "OS type not supported: $(inferred_os_type)"
rc=2
;;
esac
echo "${result:?}"
return ${rc}
}
function inferred_XAMP_name() {
case "$(inferred_os_type)" in
msys|windows)
echo "WAMP"
;;
*)
echo "LAMP"
;;
esac
}
function inferred_os_type() {
local rc=0 result="${OSTYPE:-unspecified}"
case "${result:?}" in
linux-gnu)
case "${OS}"xx in
Windows*)
result="windows"
#^-- Windows 10 provides its own bash
;;
esac
;;
msys)
true
;;
unspecified)
report "OS type not specified"
rc=2
;;
esac
echo "${result:?}"
return ${rc}
}
function inferred_cpu_type() {
local rc=0 result="${HOSTTYPE:-unspecified}"
case "$(inferred_os_type)" in
msys|windows)
case "${PROCESSOR_ARCHITECTURE:-unspecified}" in
unspecified)
report "Windows processor architecture not specified"
rc=2
;;
*)
result="${PROCESSOR_ARCHITECTURE:?}"
;;
esac
;;
esac
case "${result:?}" in
x86_32|x86|i[0-9]86)
result="x86_32"
;;
x86_64|AMD64|amd64)
result="x86_64"
;;
unspecified)
report "CPU type not specified"
rc=2
;;
esac
echo "${result:?}"
return ${rc}
}
function inferred_configuration_file_pn() {
local this_script_fbn="$(basename "$0")"
local this_script_fpn="$0"
local result="${this_script_fbn%.*sh}".conf
[ -e "${result:?}" ] || result="${this_script_fpn%.*sh}".conf
echo "${result}"
}
function inferred_technology_stack_installation_root_dpn() {
case "$(inferred_os_type)" in
msys|windows)
echo "c:/Stacks/$(inferred_product_name_pn)"
;;
*)
echo "/opt/$(inferred_product_name_pn)"
;;
esac
}
function inferred_module_mediawiki_installation_root_dpn() {
echo "$(inferred_technology_stack_installation_root_dpn)/apps/mediawiki"
}
function inferred_module_fuseki_installation_root_dpn() {
echo "$(inferred_technology_stack_installation_root_dpn)/apps/fuseki"
}
function inferred_tomcat_service_setenv_script_fpn() {
local result="${tomcat_service_installation_root_dpn:?}/bin/setenv"
case "$(inferred_os_type)" in
msys|windows)
result="${result}.bat"
;;
*)
result="${result}.sh"
;;
esac
echo "${result:?}"
}
function inferred_tomcat_service_controller_xpn() {
local result_stem="${tomcat_service_installation_root_dpn:?}/bin/tomcat"
local version_major="${technology_stack_version%%.*}"
local rc=0 result=
case "$(inferred_os_type)" in
msys|windows)
result="${result_stem:?}8"
[ -e "${result:?}" ] ||
result="${result_stem:?}${version_major:?}"
;;
*)
result=
rc=2
;;
esac
echo "${result:?}"
return ${rc}
}
function inferred_bitnami_installation_mode() {
case "$(inferred_os_type)" in
msys|windows)
echo "qt" # interactive GUI-based installation
;;
*)
echo "unattended" # non-interactive installation
;;
esac
}
##
function ensure_file_downloaded_to() { # file_pn file_sha1 file_remote_url
local file_pn="${1:?}"
local file_sha1="${2:?}"
local file_remote_url="${3:?}"
[ -e "${file_pn:?}" ] ||
xx curl --output "${file_pn:?}" "${file_remote_url:?}"
check_sha1sum_of "${file_pn:?}" "${file_sha1:?}"
}
function check_sha1sum_of() { # file_pn file_sha1
xx echo "${2:?} ${1:?}" | xx sha1sum --check --status -
}
##
function create_file_from_template() { # template_file_pn file_pn
local template_file_pn="${1:?}"
local file_pn="${2:?}"
mkdir -p "$(dirname "${file_pn:?}")" ; > "${file_pn:?}"
xx cat "${template_file_pn:?}" | perl -lpe "
s'\\\$\\{technology_stack_installation_root_dpn}'$(printf %q "${technology_stack_installation_root_dpn:?}")';
s'\\\$\\{module_mediawiki_installation_root_dpn}'$(printf %q "${module_mediawiki_installation_root_dpn:?}")';
" > "${file_pn:?}"
}
##
function set_ownership_of_web_content() { # file_or_directory_pn ...
set_ownership daemon www-data "$@"
}
function set_ownership_of_webapp_content() { # file_or_directory_pn ...
set_ownership tomcat tomcat "$@"
}
function set_ownership() { # user_name group_name file_or_directory_pn ...
local u1="${1:?}" ; shift
local g1="${1:?}" ; shift
local x1
(is_user "$u1" && is_group "$g1") || return 0
for x1 in "$@" ; do
if [ -d "$x1" ] ; then
xx chmod -R a+rX,ug+w,o-w "$x1"/.
xx chown -R "$u1"."$g1" "$x1"/.
else
xx chmod a+rX,ug+w,o-w "$x1"
xx chown "$u1"."$g1" "$x1"
fi
done
}
function is_user() { # user_name
hash getent >/dev/null 2>&1 || return $?
getent passwd "${1:?}" >/dev/null 2>&1
}
function is_group() { # group_name
hash getent >/dev/null 2>&1 || return $?
getent group "${1:?}" >/dev/null 2>&1
}
##
function wait_for_nonempty_file() { # file_pn
! [ -z "${1}" ] || return 0
while ! xxq [ -s "${1:?}" ] ; do sleep_awhile ; done
}
function sleep_awhile() {
sleep 10
}
##
function as_unix_pathname() { # msys_or_windows_pathname
if hash cygpath >/dev/null 2>&- ; then
cygpath -u "${1}"
else
echo "$1" | perl -pe 's{^([a-zA-Z]):} {/$1}'
fi
}
function omit_wsac() {
perl -ne 'next if m{^\s*#|^\s*$} ; print' "$@"
#^-- filter: omit whitespace and comments
}
##
function assert_cannot_happen() {
report "Internal logic error at line ${BASH_LINENO[1]} in file ${BASH_SOURCE[1]}."
exit 2
}
function assert() { # expression
! (eval "$@") || return 0
report "Assertion failed at line ${BASH_LINENO[1]} in file ${BASH_SOURCE[1]}:" "$@"
exit 2
}
##
function xx() { # ...
xxv "$@"
}
function xxq() { # ...
echo 1>&2 "+" "$@"
"$@" # quietly: no exit code report
}
function xxv() { # ...
echo 1>&2 "+" "$@"
"$@" || report_exit_code $?
}
function report() { # ...
echo 1>&2
echo 1>&2 "+" "$@"
}
function report_exit_code() { # xc
local xc="${1:-$?}"
echo 1>&2 "^-- EXIT CODE: ${xc}"
return ${xc}
}
##
product_name_tc= # inferred unless set by .conf file
product_site_root_url= # inferred unless set by .conf file
##
password="not_secure"
product_admin_mail_address="devops@example.com"
##
tomcat_service_jvm_heap_min=512
tomcat_service_jvm_heap_max=1024
##
module_fuseki_version="2.6.0"
module_mediawiki_version="1.29.1"
technology_stack_version="8.5.24-2"
##
##^-- default configuration parameters above
##^
source "$(inferred_configuration_file_pn)" ||
report_exit_code $?
umask 0022
#^-- required by the Bitnami installer
set -e
#^-- required by these scripts
##v
##v-- derived configuration parameters below
##
product_name_tc="$(inferred_product_name_tc)"
product_site_root_url="$(inferred_product_site_root_url)"
##
product_name_id="$(inferred_product_name_id)"
product_name_lc="$(inferred_product_name_lc)"
product_name_pn="$(inferred_product_name_pn)"
module_fuseki_installation_root_dpn="$(inferred_module_fuseki_installation_root_dpn)"
module_mediawiki_installation_root_dpn="$(inferred_module_mediawiki_installation_root_dpn)"
technology_stack_installation_root_dpn="$(inferred_technology_stack_installation_root_dpn)"
##
product_database_name="${product_name_id%_db}_db"
##
http_service_installation_root_dpn="${technology_stack_installation_root_dpn:?}/apache2"
mysql_service_installation_root_dpn="${technology_stack_installation_root_dpn:?}/mysql"
tomcat_service_installation_root_dpn="${technology_stack_installation_root_dpn:?}/apache-tomcat"
##
technology_stack_cli_fpn="${technology_stack_installation_root_dpn}/ctlscript.sh"
technology_stack_uninstaller_fpn="${technology_stack_installation_root_dpn}/uninstall"
technology_stack_uninstaller_data_fpn="${technology_stack_installation_root_dpn}/uninstall.dat"
case "$(inferred_os_type)" in
msys|windows)
http_service_pid_fpn="${http_service_installation_root_dpn}/logs/httpd.pid"
mysql_service_pid_fpn="${mysql_service_installation_root_dpn}/data/$(hostname).pid"
tomcat_service_pid_fpn= # no .pid file provided
;;
*)
http_service_pid_fpn="${http_service_installation_root_dpn}/logs/httpd.pid"
mysql_service_pid_fpn="${mysql_service_installation_root_dpn}/data/mysqld.pid"
tomcat_service_pid_fpn="${tomcat_service_installation_root_dpn}/temp/catalina.pid"
;;
esac
##
case "${module_fuseki_version:?}" in
2.6.0)
module_fuseki_tarball_sha1="92f27e01268ad47737bafd164474e36238351c86"
;;
3.4.0)
module_fuseki_tarball_sha1="514913b50d27798f3688a45a59f9bf5130b0dff2"
;;
3.5.0)
module_fuseki_tarball_sha1="ee89efb913cbab1840ad15ed426635c8f0529d1f"
;;
*)
report "Fuseki version not supported: ${module_fuseki_version:?}"
return 2
esac
case "${module_mediawiki_version:?}" in
1.29.1)
module_mediawiki_tarball_sha1="4ceacc2b5f883f37ed696fbe5413d547652acdc4"
;;
1.30.0)
module_mediawiki_tarball_sha1="16f4831904dbb7a67de2b78ebb968999d2fb996c"
;;
*)
report "MediaWiki version not supported: ${module_mediawiki_version:?}"
return 2
esac
case "${technology_stack_version:?}" in
8.5.24-2)
technology_stack_installer_x86_64_linux_sha1="4c0177a8d8e489c40594d414953d5ab42c4345e7"
technology_stack_installer_x86_32_windows_sha1="52650ac59499da74feb63e944be18c5d235ac8fa"
;;
*)
report "Bitnami Tomcat stack version not supported: ${technology_stack_version:?}"
return 2
esac
##
|
declare interface Rescale {
RescaleUnit(unit: CDOTA_BaseNPC): void,
RescaleBuildings(): void,
RescaleShops(): void,
}
declare var Rescale: Rescale;
|
<reponame>briefgw/brief_schunklwa4p_simulation
/******************************************************************************
*
* Copyright (c) 2012
*
* SCHUNK GmbH & Co. KG
*
* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
*
* Project name: Drivers for "Amtec M5 Protocol" Electronics V4
*
* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
*
* Email:<EMAIL>
*
* ToDo:
*
* ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of SCHUNK GmbH & Co. KG nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License LGPL as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License LGPL for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License LGPL along with this program.
* If not, see <http://www.gnu.org/licenses/>.
*
******************************************************************************/
#ifndef CPROTOCOLMESSAGE_H
#define CROTOCOLMESSAGE_H
#include "../Util/GlobalDefines.h"
class CProtocolMessage
{
public:
// ---- public data ----------------------------------------------------- ;
unsigned long m_uiMessageId; // Identifier 11-/29-Bit
// 11 bit in low word, 29 bit complete
unsigned char m_ucMessageLength; // number of data bytes (0-8)
unsigned char m_aucMessageData[8]; // Array for up to 8 data bytes
unsigned char m_ucMessageState; // Bit coded information for state
bool m_bRTRFlag; // RTR-Bit: 0=Dataframe, 1=Remoteframe
double m_fTime; // time stamp in sec
int m_iModuleId; // module bus address
// ---- constructors / destructor --------------------------------------- ;
/// default constructor
CProtocolMessage();
/// copy constructor
CProtocolMessage(const CProtocolMessage& rclProtocolMessage);
/// destructor
~CProtocolMessage();
// ---- operators ------------------------------------------------------ ;
// assignment operator
CProtocolMessage& operator=(const CProtocolMessage& rclProtocolMessage);
};
typedef struct
{
unsigned char m_aucMessageId[2];
unsigned char m_aucMessageData[8];
unsigned char m_ucMessageLength;
} CRS232Message;
typedef union
{
unsigned char aucData[4];
char acData[4];
unsigned short auiData[2];
short aiData[2];
unsigned long uiData;
long iData;
float fData;
} CProtocolData;
#endif
|
### From February 2022
# Write a program that gets the raw output logs that exist at the URL
# https://coderbyte.com/api/challenges/logs/web-logs-raw. The logs there
# are a sample of real web server logs. Each line begins with a date,
# e.g. April 10 11:17:35. Your program should do the following:
#
# Loop through each log item, and find the lines that contain the string
# heroku/router. For each of those, echo the request_id to a new line,
# and then in the fwd key has the value of MASKED, then add a [M] to the
# end of the line with a space before it.
#
# Your final output should look something like the following:
#
# b19a87a1-1bbb-000-00000
# b19a87a1-1bbb-000-111118b
# b2413c-3c67-4180-22222 [M]
# 10f93da3-2753-48a3-33333 [M]
#!/bin/bash
curl -s https://coderbyte.com/api/challenges/logs/web-logs-raw -O > /dev/null
heroku_router="heroku/router"
fwd_masked='fwd="MASKED"'
masked_bool=0
while read line; do
if [[ $line == *$heroku_router* ]]; then
if [[ $line == *$fwd_masked* ]]; then
masked_bool=1
fi
# On my mac, I use a literal ' ' in the second sed to do the filtering.
# On a linux machine, we can use '\s' instead.
temp_line=$(echo "$line" | sed "s/^.*request_id=//g" | sed "s/ .*$//")
if [[ $masked_bool == 1 ]]; then
echo "$temp_line [M]"
masked_bool=0
else
echo "$temp_line"
fi
fi
done < web-logs-raw
rm web-logs-raw
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { MatCardModule } from '@angular/material/card';
import { MatButtonModule } from '@angular/material/button';
import { WordsRoutingModule } from './words-routing.module';
import { WordsComponent } from './words.component';
@NgModule({
declarations: [WordsComponent],
imports: [
CommonModule,
MatCardModule,
MatButtonModule,
WordsRoutingModule
]
})
export class WordsModule { }
|
#!/bin/bash
# Copyright (c) 2014-2015, Technische Universitaet Muenchen
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
export REPO="$(pwd | sed s,^/home/travis/build/,,g)"
echo -e "Current Repo:$REPO --- Travis Branch:$TRAVIS_BRANCH"
git config --global user.email "rettenbs@in.tum.de"
git config --global user.name "Travis"
if [ "$TRAVIS_BRANCH" == "master" ]; then
doxygen
git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/TUM-I5/utils.git gh-pages > /dev/null
cd gh-pages
git rm -rf .
cp -r ../html/* .
git add -f .
git commit -m "Travis build $TRAVIS_BUILD_NUMBER pushed to gh-pages"
git push -fq origin gh-pages > /dev/null
fi |
#!/usr/bin/env bash
host=$1;
if [ ! -e $host ]; then
echo "Please check the file of $host.";
usage;
fi
for i in `cat $host`
do
#echo $i;
#host $i;
ip=$(host $i | grep -oP "(\d+\.){3}\d+");
echo $ip;
done
function usage {
echo "bash $0 hostlist";
exit 1;
}
|
var PDF = require('pdfkit');
var fs = require("fs");
var ticket = require('./ticket.json');
var pdf = {
pageSetting : function(options){
var doc = new PDF({
layout: 'portrait',
margins: {
top: 0,
bottom: 0,
left: 10,
right: 10
},
info: options.info
});
return doc;
},
pageHeader : function(doc,ticket){
doc.image('../public/assets/img/beautylane-logo.png', 15, 15, {width: 130});
doc.y = 20;
doc.font('Courier');
doc.fontSize(7);
doc.text("BEAUTY LANE PHILIPPINES, INC.",{align:'center'});
doc.moveDown(0.2);
doc.text("10/F Alabang Business Tower, Acacia Ave.",{align:'center'});
doc.moveDown(0.2);
doc.text("Madrigal Business Park, Alabang Muntinlupa City 1780 Philippines",{align:'center'});
doc.moveDown(0.2);
doc.text("02-771-0771 02-772-0772, 0917-707-0707",{align:'center'});
doc.moveDown(0.2);
doc.fontSize(14);
doc.font('Courier-Bold');
doc.text("TRIPTICKET FORM",380,20,{align:'right'});
doc.text("TI012014",380,35,{align:'right'});
doc.moveDown(0);
doc.fontSize(10);
doc.text("12-12-14",420,50,{align:'right'});
doc.text("Page "+1+" of "+2,420,65,{align:'right'});
doc.moveDown(0);
var y = 120;
doc.font('Courier-Bold');
doc.text("ARRIVAL",25, y);
doc.text("DEPARTURE",75, y);
doc.text("DR NUMBER",135, y);
doc.text("CUSTOMER NAME",210, y);
doc.text("ADDRESS",340, y);
doc.text("RECEIVED BY",430,y);
doc.text("SIGNATURE", 520,y);
doc.moveDown(2);
return doc;
},
pageFooter : function(doc,ticket){
doc.font('Courier');
doc.text("RELEASING:", 25,700);
doc.text("________________", 100,700);
doc.text("DATE/TIME", 120,710);
doc.text("________________",250,700);
doc.text("RELEASED BY", 260,710);
doc.text("________________", 410,700);
doc.text("RELEASED TO", 430,710);
doc.text("RETURN:", 25,730);
doc.text("________________", 100,730);
doc.text("TIME", 140,740);
doc.text("________________", 250,730);
doc.text("RECEIVED BY", 260,740);
doc.text("___________________________", 390,730);
doc.text("ENTERED INTO SYSTEM BY/DATE", 390,740);
return doc
}
};
module.exports.print = function(ticket,result){
var doc = pdf.pageSetting({
info:{
Title:ticket.sino,
Subject:ticket.sino,
Author:ticket.delivery_created_by
}});
var filename = 'ticket'+'.pdf';
doc.pipe(fs.createWriteStream(filename));
doc = pdf.pageHeader(doc,ticket);
doc = pdf.pageFooter(doc,ticket);
doc.y = 140;
doc.fontSize(9);
for(var i in ticket){
var y= doc.y;
doc.font('Courier');
doc.text(ticket[i].arrival,25,y,{width:70});
doc.text(ticket[i].departure,75,y,{width:80});
doc.text(ticket[i].drno,130,y,{width:70});
doc.text(ticket[i].customer_name,210,y,{width:100});
doc.text(ticket[i].address,300,y,{width:150});
doc.moveDown(0.8)
if(doc.y >= 640){
doc.addPage();
doc = pdf.pageHeader(doc,ticket);
doc = pdf.pageFooter(doc,ticket);
doc.y =140;
}
}
doc.end();
};
module.exports.print(ticket,function(err,result){
console.log(err);
console.log(result);
});
|
<reponame>codefacts/Elastic-Components
package tracker.model.merchandiser;
import tracker.model.BaseModel;
/**
* Created by sohan on 7/18/2017.
*/
public interface LocationModel extends BaseModel {
String lat = "lat";
String lng = "lng";
String accuracy = "accuracy";
}
|
<gh_stars>0
const { Router } = require('express');
const multer = require('multer');
const path = require('path');
const userController = require('./user.controller');
const userRouter = Router();
// MULTER
const storage = multer.diskStorage({
destination: 'tmp',
filename: function (req, file, cb) {
const ext = path.parse(file.originalname).ext;
cb(null, Date.now() + ext);
},
});
const upload = multer({ storage: storage });
// POST
// Create & Register
userRouter.post(
'/register',
userController.validateCreateUser,
userController.createUser,
);
// Create avatar
userRouter.post(
'/avatar',
upload.single('avatar'),
userController.minifyImage,
(req, res, next) => {
res.status(200).json(req.file);
},
);
// Login
userRouter.post('/login', userController.validateSignIn, userController.signIn);
// Logout
userRouter.post('/logout', userController.authorize, userController.logOut);
// GET
// Current user
userRouter.get(
'/current',
userController.authorize,
userController.getCurrentUser,
);
// All users
userRouter.get('/', userController.getUsers);
// User by id
userRouter.get('/:id', userController.validateId, userController.getUserById);
// Verify email
userRouter.get('/verify/:verificationToken', userController.verifyEmail);
// DELETE
userRouter.delete(
'/:id',
userController.validateId,
userController.deleteUserById,
);
// UPDATE
// Update user
userRouter.put(
'/:id',
userController.validateId,
userController.validateUpdateUser,
userController.updateUserById,
);
// Update avatar
userRouter.patch(
'/avatar',
upload.single('avatar'),
userController.minifyImage,
userController.updateAvatar,
(req, res, next) => {
res.status(200).send({ message: 'Avatar successfully updated' });
},
);
// EXPORT
module.exports = userRouter;
|
class ItemManager {
constructor() {
this.itemList = [];
}
addItem(item) {
this.itemList.push(item);
}
removeItem(item) {
const index = this.itemList.indexOf(item);
if (index !== -1) {
this.itemList.splice(index, 1);
}
}
getList() {
return this.itemList;
}
}
export default ItemManager; |
<reponame>alterem/smartCityService<filename>scs-web/src/main/java/com/zhcs/controller/GasController.java
package com.zhcs.controller;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.stereotype.Controller;
import com.zhcs.context.PlatformContext;
import com.zhcs.entity.GasEntity;
import com.zhcs.service.GasService;
import com.zhcs.utils.BeanUtil;
import com.zhcs.utils.PageUtils;
import com.zhcs.utils.StringUtil;
import com.zhcs.utils.R;
//*****************************************************************************
/**
* <p>Title:GasController</p>
* <p>Description: 油耗管理</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
@Controller
@RequestMapping("gas")
public class GasController extends AbstractController {
@Autowired
private GasService gasService;
@RequestMapping("/gas.html")
public String list(){
return "gas/gas.html";
}
/**
* 列表
*/
@ResponseBody
@RequestMapping("/list")
@RequiresPermissions("gas:list")
public R list(String sidx, String order, Integer page, Integer limit,String condition,String starttime,String endtime){
Map<String, Object> map = new HashMap<String, Object>();
map.put("sidx", sidx);
map.put("order", order);
map.put("offset", (page - 1) * limit);
map.put("limit", limit);
map.put("condition", condition);
map.put("starttime", starttime);
map.put("endtime", endtime);
//查询列表数据
// List<GasEntity> gasList = gasService.queryList(map);
List<GasEntity> gasList = gasService.queryList1(map);
int total = gasService.queryTotal(map);
PageUtils pageUtil = new PageUtils(gasList, total, limit, page);
return R.ok().put("page", pageUtil);
}
/**
* 信息
*/
@ResponseBody
@RequestMapping("/info/{id}")
@RequiresPermissions("gas:info")
public R info(@PathVariable("id") Long id){
GasEntity gas = gasService.queryObject(id);
return R.ok().put("gas", gas);
}
/**
* 保存
*/
@ResponseBody
@RequestMapping("/save")
@RequiresPermissions("gas:save")
public R save(@RequestBody GasEntity gas){
BeanUtil.fillCCUUD(gas, getUserId(), getUserId());
gasService.save(gas);
return R.ok();
}
/**
* 修改
*/
@ResponseBody
@RequestMapping("/update")
@RequiresPermissions("gas:update")
public R update(@RequestBody GasEntity gas){
BeanUtil.fillCCUUD(gas, getUserId());
gasService.update(gas);
return R.ok();
}
/**
* 删除
*/
@ResponseBody
@RequestMapping("/delete/{id}")
@RequiresPermissions("gas:delete")
public R delete(@PathVariable("id") Long id){
if(PlatformContext.getGoalbalContext("adminId", String.class).equals(StringUtil.valueOf(getUserId()))){
gasService.delete(id);
} else {
GasEntity gas = new GasEntity();
gas.setId(id);
gas.setStatus("0");
BeanUtil.fillCCUUD(gas, getUserId());
gasService.update(gas);
}
return R.ok();
}
}
|
if test -f '/usr/bin/luacheck' || test -f '/usr/local/bin/luacheck'; then
luacheck lua_src/* --no-max-string-line-length --no-max-comment-line-length --no-max-line-length || exit 1
else
echo 'No luacheck found, nothing to test.'
exit 0
fi
|
<reponame>cjh14813573/springblog<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { CommonResponse } from 'src/app/util/response/commonResponse';
import { WidgetService } from 'src/app/service/widget.service';
@Component({
selector: 'app-widgets',
templateUrl: './widgets.component.html',
styleUrls: ['./widgets.component.css']
})
export class WidgetsComponent implements OnInit {
widgetsData;
constructor(
private widgetsService: WidgetService
) {
this.widgetsService.getWidgetData().subscribe((data: CommonResponse) => {
if (data.status === 1) {
this.widgetsData = data.responseBody;
}
});
}
ngOnInit() {
}
}
|
#!/bin/bash
# Traffic_log.sh: script which setups all traffic record to pcap
#
# Copyright (c) 2021 Denis Svirepov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Denis Svirepov svirepovden@gmail.com
# tshark -i any -b filesize:100000 -w /var/log/traffic/traffic.pcap
cp traffic_log.sh /bin/
chmod 755 /bin/traffic_log.sh
cp traffic_log.service /lib/systemd/system/
chmod 644 /lib/systemd/system/traffic_log.service
systemctl daemon-reload
systemctl enable --now traffic_log.service
|
class BaseResource:
pass # Placeholder for the base resource class
class CoveredRateCentersData:
pass # Placeholder for the covered rate centers data class
class RateCenter:
pass # Placeholder for the rate center class
XPATH_COVERED_RATE_CENTERS = "/coveredratecenters"
class CoveredRateCenters(BaseResource, CoveredRateCentersData):
"""Covered rate centers"""
_xpath = XPATH_COVERED_RATE_CENTERS
def __init__(self):
# Initialize any necessary attributes or data structures
pass
def get_covered_rate_centers(self):
# Implement method to retrieve covered rate centers data
pass
def update_covered_rate_center(self, rate_center_id, new_data):
# Implement method to update a specific covered rate center
pass
def delete_covered_rate_center(self, rate_center_id):
# Implement method to delete a specific covered rate center
pass
def add_covered_rate_center(self, new_data):
# Implement method to add a new covered rate center
pass |
<gh_stars>1-10
// 11404. 플로이드
// 2019.05.22
// 플로이드 와샬 알고리즘, 그래프 이론
#include<iostream>
#include<algorithm>
using namespace std;
int d[101][101];
int main()
{
int n, m;
cin >> n >> m;
for (int i = 0; i < m; i++)
{
int from, to, cost;
cin >> from >> to >> cost;
if (!d[from][to])
{
d[from][to] = cost;
}
else // 이미 경로가 있는 경우 최소값 선택
{
d[from][to] = min(d[from][to], cost);
}
}
// i를 거쳐가는게 더 빠를 경우 update
for (int i = 1; i <= n; i++)
{
for (int from = 1; from <= n; from++)
{
if (d[from][i] == 0)
{
continue; // 출발지에서 i로 가는 경로가 없을땐 무시
}
for (int to = 1; to <= n; to++)
{
// i에서 to로 가는 비용이 없거나 출발지=도착지 일땐 무시
if (d[i][to] == 0 || from == to)
{
continue;
}
// 아직 비용이 없거나 i를 거치는게 최소 비용일 경우 갱신
if (d[from][to] == 0 || d[from][to] > d[from][i] + d[i][to])
{
d[from][to] = d[from][i] + d[i][to];
}
}
}
}
for (int i = 1; i <= n; i++)
{
for (int j = 1; j <= n; j++)
{
cout << d[i][j] << " ";
}
cout << endl;
}
return 0;
}
|
#!/bin/sh
# Usage: name-of-this-file program arg1 arg2 argN
if [ -n "$1" ]; then
# if there are arguments, put the contents in a file since arguments with spaces could be messed up
if [ $# -gt 1 ]; then
export IAMTEMPFILE=1
export EXECUTEMEPLOX=$(mktemp)
echo "$@" > "$EXECUTEMEPLOX"
else
unset IAMTEMPFILE
export EXECUTEMEPLOX="$1"
fi
x-terminal-emulator -e "$0"
else
if [ -n "$EXECUTEMEPLOX" ]; then
if [ -n "$IAMTEMPFILE" ]; then
unset IAMTEMPFILE
# note: make sure the executed program does not change $EXECUTEMEPLOX
. "$EXECUTEMEPLOX"
# and remove the file with the command
rm -f "$EXECUTEMEPLOX"
else
# no temp file, just execute the command
"$EXECUTEMEPLOX"
fi
fi
# and replace the current shell with bash
exec bash
fi
|
#! /bin/bash
DIR=$(dirname $(readlink -f "$0"))
LIBDIR=$(dirname "${DIR}")/lib
for f in ${LIBDIR}/*.sh; do
. $f
done
checkDockerfile
buildImage
runTests 'checkPHPVersion' '5.3'
doCleanup
endup
|
#!/bin/sh
#
# Make a clean build of Fabber
#
# Usage: build.sh [debug|release]
if [ -z $1 ]
then
TYPE=debug
else
TYPE=$1
fi
ORIGDIR=$PWD
scriptdir=`dirname $0`
rm -rf $scriptdir/../build_$TYPE
mkdir $scriptdir/../build_$TYPE
cd $scriptdir/../build_$TYPE
cmake .. -DCMAKE_BUILD_TYPE=$TYPE
make
cd $ORIGDIR
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.tdb.base.block;
import java.util.Iterator ;
import java.util.function.BiConsumer;
import org.apache.jena.atlas.lib.Cache ;
import org.apache.jena.atlas.lib.CacheFactory ;
import org.slf4j.Logger ;
import org.slf4j.LoggerFactory ;
/** Caching block manager - this is an LRU cache */
public class BlockMgrCache extends BlockMgrSync
{
// Actually, this is two cache one on the read blocks and one on the write blocks.
// The overridden public operations are sync'ed.
// As sync is on "this", it also covers all the other operations via BlockMgrSync
private static Logger log = LoggerFactory.getLogger(BlockMgrCache.class) ;
// Read cache : always present.
private final Cache<Long, Block> readCache ;
// Delayed dirty writes. May be present, may not.
private final Cache<Long, Block> writeCache ;
public static boolean globalLogging = false ; // Also enable the logging level.
private boolean logging = false ; // Also enable the logging level.
// ---- stats
long cacheReadHits = 0 ;
long cacheMisses = 0 ;
long cacheWriteHits = 0 ;
static BlockMgr create(int readSlots, int writeSlots, final BlockMgr blockMgr)
{
if ( readSlots < 0 && writeSlots < 0 )
return blockMgr ;
return new BlockMgrCache(readSlots, writeSlots, blockMgr) ;
}
private BlockMgrCache(int readSlots, int writeSlots, final BlockMgr blockMgr)
{
super(blockMgr) ;
// Caches are related so we can't use a Getter for cache management.
if ( readSlots < -1 )
readCache = CacheFactory.createNullCache() ;
else
readCache = CacheFactory.createCache(readSlots) ;
if ( writeSlots <= 0 )
writeCache = null ;
else
{
writeCache = CacheFactory.createCache(writeSlots) ;
writeCache.setDropHandler(new BiConsumer<Long, Block>(){
@Override
public void accept(Long id, Block block)
{
// We're inside a synchronized operation at this point.
log("Cache spill: write block: %d", id) ;
if (block == null)
{
log.warn("Write cache: " + id + " dropping an entry that isn't there") ;
return ;
}
// Force the block to be writtern
// by sending it to the wrapped BlockMgr
BlockMgrCache.super.write(block) ;
}
}) ;
}
}
// Pool?
// @Override
// public ByteBuffer allocateBuffer(int id)
// {
// super.allocateBuffer(id) ;
// }
@Override
synchronized
public Block getRead(long id)
{
// A Block may be in the read cache or the write cache.
// It can be just in the write cache because the read cache is finite.
Block blk = readCache.getIfPresent(id) ;
if ( blk != null )
{
cacheReadHits++ ;
log("Hit(r->r) : %d", id) ;
return blk ;
}
// A requested block may be in the other cache.
// Writable blocks are readable.
// readable blocks are not writeable (see below).
if ( writeCache != null )
// Might still be in the dirty blocks.
// Leave in write cache
blk = writeCache.getIfPresent(id) ;
if ( blk != null )
{
cacheWriteHits++ ;
log("Hit(r->w) : %d",id) ;
return blk ;
}
cacheMisses++ ;
log("Miss/r: %d", id) ;
blk = super.getRead(id) ;
readCache.put(id, blk) ;
return blk ;
}
@Override
synchronized
public Block getReadIterator(long id)
{
// And don't pass down "iterator" calls.
return getRead(id) ;
}
@Override
synchronized
public Block getWrite(long _id)
{
Long id = _id;
Block blk = null ;
if ( writeCache != null )
blk = writeCache.getIfPresent(id) ;
if ( blk != null )
{
cacheWriteHits++ ;
log("Hit(w->w) : %d", id) ;
return blk ;
}
// blk is null.
// A requested block may be in the other cache. Promote it.
if ( readCache.containsKey(id) )
{
blk = readCache.getIfPresent(id) ;
cacheReadHits++ ;
log("Hit(w->r) : %d", id) ;
blk = promote(blk) ;
return blk ;
}
// Did not find.
cacheMisses++ ;
log("Miss/w: %d", id) ;
// Pass operation to wrapper.
blk = super.getWrite(id);
if ( writeCache != null )
writeCache.put(id, blk) ;
return blk ;
}
@Override
synchronized
public Block promote(Block block)
{
Long id = block.getId() ;
readCache.remove(id) ;
Block block2 = super.promote(block) ;
if ( writeCache != null )
writeCache.put(id, block2) ;
return block ;
}
@Override
synchronized
public void write(Block block)
{
writeCache(block) ;
super.write(block) ;
}
@Override
synchronized
public void overwrite(Block block)
{
Long id = block.getId() ;
// It can be a read block (by the transaction), now being written for real (enacting a transaction).
super.overwrite(block) ;
// Keep read cache up-to-date.
// Must at least expel the read block (which is not the overwrite block).
readCache.put(id, block) ;
}
private void writeCache(Block block)
{
Long id = block.getId() ;
log("WriteCache : %d", id) ;
// Should not be in the read cache due to a getWrite earlier.
if ( readCache.containsKey(id) )
log.warn("write: Block in the read cache") ;
if ( writeCache != null )
{
writeCache.put(id, block) ;
return ;
}
}
@Override
synchronized
public void free(Block block)
{
Long id = block.getId() ;
log("Free : %d", id) ;
if ( readCache.containsKey(id) )
{
log.warn("Freeing block from read cache") ;
readCache.remove(id) ;
}
if ( writeCache != null )
writeCache.remove(id) ;
super.free(block) ;
}
@Override
synchronized
public void sync()
{
_sync(false) ;
}
@Override
synchronized
public void syncForce()
{
_sync(true) ;
}
@Override
synchronized
public void close()
{
if ( writeCache != null )
log("close ("+writeCache.size()+" blocks)") ;
syncFlush() ;
super.close() ;
}
@Override
public String toString()
{
return "Cache:"+super.blockMgr.toString() ;
}
private void log(String fmt, Object... args)
{
if ( ! logging && ! globalLogging ) return ;
String msg = String.format(fmt, args) ;
if ( getLabel() != null )
msg = getLabel()+" : "+msg ;
log.debug(msg) ;
}
private void _sync(boolean force)
{
if ( true )
{
String x = "" ;
if ( getLabel() != null )
x = getLabel()+" : ";
log("%sH=%d, M=%d, W=%d", x, cacheReadHits, cacheMisses, cacheWriteHits) ;
}
if ( writeCache != null )
log("sync (%d blocks)", writeCache.size()) ;
else
log("sync") ;
boolean somethingWritten = syncFlush() ;
if ( force )
{
log("syncForce underlying BlockMgr") ;
super.syncForce() ;
}
else if ( somethingWritten )
{
log("sync underlying BlockMgr") ;
super.sync() ;
}
else
log("Empty sync") ;
}
private boolean syncFlush()
{
if ( writeCache == null ) return false ;
boolean didSync = false ;
log("Flush (write cache)") ;
long N = writeCache.size() ;
Long[] ids = new Long[(int)N] ;
// Single writer (sync is a write operation MRSW)
// Iterating is safe.
Iterator<Long> iter = writeCache.keys() ;
if ( iter.hasNext() )
didSync = true ;
// Need to get all then delete else concurrent modification exception.
for ( int i = 0 ; iter.hasNext() ; i++ )
ids[i] = iter.next() ;
for ( int i = 0 ; i < N ; i++ )
{
Long id = ids[i] ;
expelEntry(id) ;
}
if ( didSync )
super.sync() ;
return didSync ;
}
// Write out when flushed.
// Do not call from drop handler.
private void expelEntry(Long id)
{
Block block = writeCache.getIfPresent(id) ;
if ( block == null )
{
log.warn("Write cache: "+id+" expelling entry that isn't there") ;
return ;
}
log("Expel (write cache): %d", id) ;
// This pushes the block to the BlockMgr being cached.
super.write(block) ;
writeCache.remove(id) ;
// Move it into the readCache because it's often read after writing
// and the read cache is often larger.
readCache.put(id, block) ;
}
}
|
package com.netshell.test.java;
import com.netshell.test.java.jmustache.JMustacheCollectionSizeTest;
/**
* Created by ASHEKHA
* on 4/30/2017.
*/
public class JavaFeatureTest {
public static void main(String[] args) throws Exception {
Test test = new JMustacheCollectionSizeTest();
test.execute();
}
}
|
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#ifndef MOCK_DEVICE_H
#define MOCK_DEVICE_H
#include <memory>
#include "astra_device.hpp"
#include "astra_sensor_info.hpp"
using astra::devices::device_status;
using astra::devices::device_status_value;
namespace orbbec { namespace mocks {
class mock_device : public astra::devices::device
{
public:
mock_device();
virtual ~mock_device();
private:
virtual device_status on_initialize() override;
virtual device_status on_connect() override;
virtual device_status on_disconnect() override;
};
}}
#endif /* MOCK_DEVICE_H */
|
#!/bin/bash
# File: 591-dns-bind-openpgpkey.sh
# Title: Create an OPENPGPKEY resource record
# Description:
#
DEFAULT_ZONE_NAME="example.test"
echo "Create a zone database containing OPENPGPKEY records for ISC Bind9"
echo
source ./maintainer-dns-isc.sh
if [ "${BUILDROOT:0:1}" == '/' ]; then
FILE_SETTING_PERFORM=true
echo "Absolute build"
else
FILE_SETTING_PERFORM=false
readonly FILE_SETTINGS_FILESPEC="${BUILDROOT}/file-zones-named${INSTANCE_NAMED_CONF_FILEPART_SUFFIX}.sh"
mkdir -p "$BUILDROOT"
mkdir -p "${BUILDROOT}${CHROOT_DIR}$ETC_DIRSPEC"
mkdir -p "${BUILDROOT}${CHROOT_DIR}$VAR_DIRSPEC"
mkdir -p "${BUILDROOT}${CHROOT_DIR}$VAR_LIB_DIRSPEC"
fi
echo
flex_ckdir "$ETC_NAMED_DIRSPEC"
flex_ckdir "$VAR_LIB_NAMED_DIRSPEC"
flex_ckdir "$INSTANCE_ETC_NAMED_DIRSPEC"
flex_ckdir "$INSTANCE_VAR_LIB_NAMED_DIRSPEC"
# Ask the user for the zone name (in form of a domain name)
if [ -n "$DEFAULT_ZONE_NAME" ]; then
read_opt="-i${DEFAULT_ZONE_NAME}"
fi
read -rp "Enter in name of domain: " -e ${read_opt}
ZONE_NAME="$REPLY"
REQUESTED_DOMAIN_NAME="$REPLY"
# ZONE_NAME_LEN="${#REPLY}"
# if [ "${ZONE_NAME:${ZONE_NAME_LEN}}" != '.' ]; then
# ZONE_NAME+='.'
# fi
echo "Primary (or Master)"
ZONE_TYPE_FILETYPE="pz"
ZONE_TYPE_NAME="primary"
ZONE_TYPE_NAME_C="Primary"
ZONE_CONF_FILENAME="${ZONE_TYPE_FILETYPE}.${ZONE_NAME}"
ZONE_CONF_DIRSPEC="${ETC_NAMED_DIRSPEC}"
ZONE_CONF_FILESPEC="${ETC_NAMED_DIRSPEC}/${ZONE_CONF_FILENAME}"
INSTANCE_ZONE_CONF_DIRSPEC="${INSTANCE_ETC_NAMED_DIRSPEC}"
flex_ckdir "$INSTANCE_ZONE_CONF_DIRSPEC"
INSTANCE_ZONE_CONF_FILESPEC="${INSTANCE_ZONE_CONF_DIRSPEC}/${ZONE_CONF_FILENAME}"
ZONE_DB_FILENAME="db.${ZONE_NAME}"
ZONE_DB_DIRSPEC="${VAR_LIB_NAMED_DIRSPEC}"
ZONE_DB_FILESPEC="${ZONE_DB_DIRSPEC}/${ZONE_DB_FILENAME}"
INSTANCE_ZONE_DB_DIRSPEC="${INSTANCE_VAR_LIB_NAMED_DIRSPEC}/${ZONE_TYPE_NAME}"
flex_ckdir "$INSTANCE_ZONE_DB_DIRSPEC"
INSTANCE_ZONE_DB_FILESPEC="${INSTANCE_ZONE_DB_DIRSPEC}/${ZONE_DB_FILENAME}"
INSTANCE_ZONE_KEYS_DIRSPEC="${INSTANCE_VAR_LIB_NAMED_DIRSPEC}/keys"
ZONE_DB_OPENPGPKEY_FILENAME="db.openpgp.${ZONE_NAME}"
ZONE_DB_OPENPGPKEY_DIRSPEC="${VAR_LIB_NAMED_DIRSPEC}"
ZONE_DB_OPENPGPKEY_FILESPEC="${ZONE_DB_OPENPGPKEY_DIRSPEC}/${ZONE_DB_OPENPGPKEY_FILENAME}"
INSTANCE_ZONE_DB_OPENPGPKEY_DIRSPEC="${INSTANCE_VAR_LIB_NAMED_DIRSPEC}/$ZONE_TYPE_NAME"
INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC="${INSTANCE_ZONE_DB_OPENPGPKEY_DIRSPEC}/$ZONE_DB_OPENPGPKEY_FILENAME"
# check that this host is different than the requested domain's name server
# - obtain this host info
THIS_HOSTNAME="$(hostname -f)"
THIS_DOMAIN="$(hostname -d)"
# Grab an email address to create OPENPGPKEY DNS records
read -rp "Enter email address: "
EMAIL_ADDR="$REPLY"
EMAIL_LOCALPART="$(echo "$EMAIL_ADDR"|awk -F@ '{print $1}')"
# Do sha1sum on localpart of email, cut the first 26-char in
EMAIL_SHA1SUM="$(echo -n $EMAIL_LOCALPART | sha256sum | awk '{print $1}')"
# - obtain requested domain's name server
OPENPGPKEY_DOMAIN_NS="$(dig +short $REQUESTED_DOMAIN_NAME SOA | awk '{print $1}')"
if [ "${OPENPGPKEY_DOMAIN_NS: -1}" != '.' ]; then
OPENPGPKEY_DOMAIN_NS="${OPENPGPKEY_DOMAIN_NS:0:-1}"
fi
# Create the zone configuration extension file so other scripts
# can pile on more statements and its value settings
echo "Creating ${BUILDROOT}${CHROOT_DIR}$INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC ..."
gpg --export-options export-dane --export "$EMAIL_ADDR" \
2>/tmp/opengpgkey.cmd \
> "${BUILDROOT}${CHROOT_DIR}$INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC"
retsts=$?
WARN_FOUND="$(grep -c -i "warning:" /tmp/opengpgkey.cmd)"
if [ $retsts -ne 0 ] || [ $WARN_FOUND -ne 0 ]; then
echo "No PGP found for $EMAIL_ADDR"
echo "Aborted."
exit $retsts
fi
flex_chown "${USER_NAME}:$GROUP_NAME" "$INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC"
flex_chmod "0640" "$INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC"
echo
echo "\$ORIGIN ${REQUESTED_DOMAIN_NAME}" >> ${BUILDROOT}${CHROOT_DIR}$INSTANCE_ZONE_DB_FILESPEC
echo "\$INCLUDE \"${INSTANCE_ZONE_DB_OPENPGPKEY_FILESPEC}\"" >> ${BUILDROOT}${CHROOT_DIR}$INSTANCE_ZONE_DB_FILESPEC
echo "Done."
|
<gh_stars>0
import abc
from hms_workflow_platform.core.services.builder import Builder
from hms_workflow_platform.core.common.utils.redis_manager import *
from datetime import datetime
from pytz import timezone
from hms_workflow_platform.core.common.utils.logger.logger import Logger
class BaseService(metaclass=abc.ABCMeta):
def __init__(self):
self._redis_m = Redis_manager()
self.logger = Logger().get_logger()
self._query = None
self._template = None
def prepareQuery(self, his, site):
_builder = Builder(self)
self._query = _builder.getQuery(his, site)
def prepareTemplate(self, his):
_builder = Builder(self)
self._template = _builder.getTemplate(his)
def done_list(self, data_list):
data_not_exists = []
for key in data_list:
if self._redis_m.r_exists(key + "ZZZ") == 0:
data_not_exists.append(key + "ZZZ")
date_now = datetime.now(timezone('Asia/Bangkok'))
date_str = date_now.strftime('%Y-%m-%d %H:%M:%S')
self._redis_m.r_setex(key + "ZZZ", 60, date_str)
return data_not_exists
|
<reponame>CodingExpertNeeraj/full-stack-development<gh_stars>1-10
public class NeerajOrganizer {
public static void main(String[] args) {
PujaOrganization myPuja = new PujaOrganization("26/07/2022", 200);
myPuja.pujaInvite();
}
} |
#!/bin/bash
# fail on any error
set -e
HEADNODE=$1
USER=$2
sed -i 's/^ResourceDisk.MountPoint=\/mnt\/resource$/ResourceDisk.MountPoint=\/mnt\/local_resource/g' /etc/waagent.conf
umount /mnt/resource
mkdir -p /mnt/resource/scratch
cat << EOF >> /etc/fstab
$HEADNODE:/home /home nfs defaults 0 0
$HEADNODE:/mnt/resource/scratch /mnt/resource/scratch nfs defaults 0 0
EOF
until yum install -y -q nfs-utils
do
sleep 10
done
setsebool -P use_nfs_home_dirs 1
mount -a
# Don't require password for HPC user sudo
echo "$USER ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
#!/bin/bash
echo '##########################################################################'
echo '### About to run setup-automounting-of-home-directories.sh script ########'
echo '##########################################################################'
yum install -y autofs
echo "/home/ldapusers /etc/autofs/nfs/automount-home-directories.conf" >> /etc/auto.master
mkdir /home/ldapusers
mkdir -p /etc/autofs/nfs
echo "* -fstype=nfs,rw home-directories.nfs-server.local:/nfs/home-directories/&" > /etc/autofs/nfs/automount-home-directories.conf
systemctl start autofs
|
dir=$2
type=$1
a=`ls $dir`
for step in $a
do
if [[ $2 == */ ]]; then
python3 evaluator/eval.py --step $(pwd)/$dir$step --type $type
else
python3 evaluator/eval.py --step $(pwd)/$dir/$step --type $type
fi
done
wait |
<reponame>liornoy/web-server<filename>web-server/Client.cpp
#include "Client.h"
namespace web_server {
Client::Client(SOCKET socketID, enum RecvMod recvMod){
this->recvMod = recvMod;
this->socketID = socketID;
time(&this->lastRecvTime);
}
// getLastRecvTime returns the lastRecvTime member.
time_t Client::getLastRecvTime() {
return lastRecvTime;
}
// sendResponses sends all the responses that are ready to be sent back.
// returns true if an error accured.
bool Client::sendResponses(){
bool err = false;
while (!readyResponses.empty() && err == false) {
Response response = readyResponses.front();
readyResponses.pop();
const char* msg = response.toString();
int bytesSent = send(socketID, msg, (int)strlen(msg), 0);
if (SOCKET_ERROR == bytesSent) {
err = true;
}
}
return err;
}
// hasPendingRequests returns true if there are requests pending.
bool Client::hasPendingRequests() {
return pendingRequests.size() > 0;
}
// hasReadyResponses returs true if there are responses ready.
bool Client::hasReadyResponses() {
return readyResponses.size() > 0;
}
// getRecvMod returns the recvMod member.
enum RecvMod Client::getRecvMod() {
return recvMod;
}
// getPendingRequests returns the pendingRequets member.
queue<Request> Client::getPendingRequests() {
return pendingRequests;
}
// getReadyResponses returns the readyResponses member.
queue<Response> Client::getReadyResponses() {
return readyResponses;
}
Client::~Client() {
closesocket(socketID);
}
// getSocketID returns the socketID member.
SOCKET Client::getSocketID() {
return socketID;
}
}
|
<reponame>lucasca2/lucas
import React from 'react';
import { Wrapper, Content, Logo, BackgroundImage, TopHeader, BottomHeader } from "./styles";
import Lucas from './lucas';
import Menu from './components/Menu';
import Title from './components/Title';
import TitleAndSubtitle from 'components/TitleAndSubtitle';
const Header: React.FC = () => {
return (
<Wrapper>
<Content>
<BackgroundImage>
<Lucas />
</BackgroundImage>
<TopHeader>
<Logo />
<Menu />
</TopHeader>
<BottomHeader>
<Title />
<TitleAndSubtitle
title={"Sobre mim"}
subtitle={`Desenvolvedor Full Stack e\nDesigner UI/UX, de Tubarão - SC`}
links={[{ title: "Mais sobre mim →" }]}
/>
</BottomHeader>
</Content>
</Wrapper>
)
};
export default Header;
|
#!/bin/bash
source activate torch16
currentDir=$(cd "$(dirname "$0")";pwd)
currtime=`date +%Y%m%d%H%M%S`
arch="resnet32"
platform="gpu"
save_dir=${currentDir}"/"${arch}_${platform}"/"${currtime}
name="cifar10-"${arch}
echo "save dir:"${save_dir}
echo "running "${name}
python3 -u train.py --arch=${arch} \
--platform=${platform} \
--amp="no" \
--workers=32 \
--data="~/Datasets/CIFAR10/" \
--save-dir=${save_dir} |
"""
Create a 'Person' class with 3 fields (name, age, and city) and 2 methods (get_info and set_info)
"""
class Person:
def __init__(self, name, age, city):
self.name = name
self.age = age
self.city = city
def get_info(self):
print(f"Name: {self.name}, Age: {self.age}, City: {self.city}.")
def set_info(self, name, age, city):
self.name = name
self.age = age
self.city = city
if __name__ == '__main__':
person1 = Person('John', 30, 'New York')
person1.get_info() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.